mercurial-3.7.3/0000755000175000017500000000000012676531544013130 5ustar mpmmpm00000000000000mercurial-3.7.3/Makefile0000644000175000017500000001567312676531524014602 0ustar mpmmpm00000000000000# If you want to change PREFIX, do not just edit it below. The changed # value wont get passed on to recursive make calls. You should instead # override the variable on the command like: # # % make PREFIX=/opt/ install PREFIX=/usr/local export PREFIX PYTHON=python $(eval HGROOT := $(shell pwd)) HGPYTHONS ?= $(HGROOT)/build/pythons PURE= PYFILES:=$(shell find mercurial hgext doc -name '*.py') DOCFILES=mercurial/help/*.txt export LANGUAGE=C export LC_ALL=C TESTFLAGS ?= $(shell echo $$HGTESTFLAGS) # Set this to e.g. "mingw32" to use a non-default compiler. COMPILER= help: @echo 'Commonly used make targets:' @echo ' all - build program and documentation' @echo ' install - install program and man pages to $$PREFIX ($(PREFIX))' @echo ' install-home - install with setup.py install --home=$$HOME ($(HOME))' @echo ' local - build for inplace usage' @echo ' tests - run all tests in the automatic test suite' @echo ' test-foo - run only specified tests (e.g. test-merge1.t)' @echo ' dist - run all tests and create a source tarball in dist/' @echo ' clean - remove files created by other targets' @echo ' (except installed files or dist source tarball)' @echo ' update-pot - update i18n/hg.pot' @echo @echo 'Example for a system-wide installation under /usr/local:' @echo ' make all && su -c "make install" && hg version' @echo @echo 'Example for a local installation (usable in this directory):' @echo ' make local && ./hg version' all: build doc local: $(PYTHON) setup.py $(PURE) \ build_py -c -d . \ build_ext $(COMPILER:%=-c %) -i \ build_hgexe $(COMPILER:%=-c %) -i \ build_mo env HGRCPATH= $(PYTHON) hg version build: $(PYTHON) setup.py $(PURE) build $(COMPILER:%=-c %) wheel: FORCE_SETUPTOOLS=1 $(PYTHON) setup.py $(PURE) bdist_wheel $(COMPILER:%=-c %) doc: $(MAKE) -C doc clean: -$(PYTHON) setup.py clean --all # ignore errors from this command find contrib doc hgext i18n mercurial tests \ \( -name '*.py[cdo]' -o -name '*.so' \) -exec rm -f '{}' ';' rm -f $(addprefix mercurial/,$(notdir $(wildcard mercurial/pure/[a-z]*.py))) rm -f MANIFEST MANIFEST.in hgext/__index__.py tests/*.err if test -d .hg; then rm -f mercurial/__version__.py; fi rm -rf build mercurial/locale $(MAKE) -C doc clean install: install-bin install-doc install-bin: build $(PYTHON) setup.py $(PURE) install --root="$(DESTDIR)/" --prefix="$(PREFIX)" --force install-doc: doc cd doc && $(MAKE) $(MFLAGS) install install-home: install-home-bin install-home-doc install-home-bin: build $(PYTHON) setup.py $(PURE) install --home="$(HOME)" --prefix="" --force install-home-doc: doc cd doc && $(MAKE) $(MFLAGS) PREFIX="$(HOME)" install MANIFEST-doc: $(MAKE) -C doc MANIFEST MANIFEST.in: MANIFEST-doc hg manifest | sed -e 's/^/include /' > MANIFEST.in echo include mercurial/__version__.py >> MANIFEST.in sed -e 's/^/include /' < doc/MANIFEST >> MANIFEST.in dist: tests dist-notests dist-notests: doc MANIFEST.in TAR_OPTIONS="--owner=root --group=root --mode=u+w,go-w,a+rX-s" $(PYTHON) setup.py -q sdist check: tests tests: cd tests && $(PYTHON) run-tests.py $(TESTFLAGS) test-%: cd tests && $(PYTHON) run-tests.py $(TESTFLAGS) $@ testpy-%: @echo Looking for Python $* in $(HGPYTHONS) [ -e $(HGPYTHONS)/$*/bin/python ] || ( \ cd $$(mktemp --directory --tmpdir) && \ $(MAKE) -f $(HGROOT)/contrib/Makefile.python PYTHONVER=$* PREFIX=$(HGPYTHONS)/$* python ) cd tests && $(HGPYTHONS)/$*/bin/python run-tests.py $(TESTFLAGS) check-code: hg manifest | xargs python contrib/check-code.py update-pot: i18n/hg.pot i18n/hg.pot: $(PYFILES) $(DOCFILES) i18n/posplit i18n/hggettext $(PYTHON) i18n/hggettext mercurial/commands.py \ hgext/*.py hgext/*/__init__.py \ mercurial/fileset.py mercurial/revset.py \ mercurial/templatefilters.py mercurial/templatekw.py \ mercurial/templater.py \ mercurial/filemerge.py \ mercurial/hgweb/webcommands.py \ $(DOCFILES) > i18n/hg.pot.tmp # All strings marked for translation in Mercurial contain # ASCII characters only. But some files contain string # literals like this '\037\213'. xgettext thinks it has to # parse them even though they are not marked for translation. # Extracting with an explicit encoding of ISO-8859-1 will make # xgettext "parse" and ignore them. echo $(PYFILES) | xargs \ xgettext --package-name "Mercurial" \ --msgid-bugs-address "" \ --copyright-holder "Matt Mackall and others" \ --from-code ISO-8859-1 --join --sort-by-file --add-comments=i18n: \ -d hg -p i18n -o hg.pot.tmp $(PYTHON) i18n/posplit i18n/hg.pot.tmp # The target file is not created before the last step. So it never is in # an intermediate state. mv -f i18n/hg.pot.tmp i18n/hg.pot %.po: i18n/hg.pot # work on a temporary copy for never having a half completed target cp $@ $@.tmp msgmerge --no-location --update $@.tmp $^ mv -f $@.tmp $@ # Packaging targets osx: python -c 'import bdist_mpkg.script_bdist_mpkg' || \ (echo "Missing bdist_mpkg (easy_install bdist_mpkg)"; false) rm -rf dist/mercurial-*.mpkg python -m bdist_mpkg.script_bdist_mpkg setup.py -- python contrib/fixpax.py dist/mercurial-*.mpkg/Contents/Packages/*.pkg/Contents/Archive.pax.gz mkdir -p packages/osx N=`cd dist && echo mercurial-*.mpkg | sed 's,\.mpkg$$,,'` && hdiutil create -srcfolder dist/$$N.mpkg/ -scrub -volname "$$N" -ov packages/osx/$$N.dmg rm -rf dist/mercurial-*.mpkg deb: contrib/builddeb docker-debian-jessie: mkdir -p packages/debian-jessie contrib/dockerdeb debian jessie fedora20: mkdir -p packages/fedora20 contrib/buildrpm cp rpmbuild/RPMS/*/* packages/fedora20 cp rpmbuild/SRPMS/* packages/fedora20 rm -rf rpmbuild docker-fedora20: mkdir -p packages/fedora20 contrib/dockerrpm fedora20 fedora21: mkdir -p packages/fedora21 contrib/buildrpm cp rpmbuild/RPMS/*/* packages/fedora21 cp rpmbuild/SRPMS/* packages/fedora21 rm -rf rpmbuild docker-fedora21: mkdir -p packages/fedora21 contrib/dockerrpm fedora21 centos5: mkdir -p packages/centos5 contrib/buildrpm --withpython cp rpmbuild/RPMS/*/* packages/centos5 cp rpmbuild/SRPMS/* packages/centos5 docker-centos5: mkdir -p packages/centos5 contrib/dockerrpm centos5 --withpython centos6: mkdir -p packages/centos6 contrib/buildrpm cp rpmbuild/RPMS/*/* packages/centos6 cp rpmbuild/SRPMS/* packages/centos6 docker-centos6: mkdir -p packages/centos6 contrib/dockerrpm centos6 centos7: mkdir -p packages/centos7 contrib/buildrpm cp rpmbuild/RPMS/*/* packages/centos7 cp rpmbuild/SRPMS/* packages/centos7 docker-centos7: mkdir -p packages/centos7 contrib/dockerrpm centos7 .PHONY: help all local build doc clean install install-bin install-doc \ install-home install-home-bin install-home-doc \ dist dist-notests check tests check-code update-pot \ osx fedora20 docker-fedora20 fedora21 docker-fedora21 \ centos5 docker-centos5 centos6 docker-centos6 centos7 docker-centos7 mercurial-3.7.3/mercurial/0000755000175000017500000000000012676531544015113 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/lsprofcalltree.py0000644000175000017500000000524312676531524020510 0ustar mpmmpm00000000000000""" lsprofcalltree.py - lsprof output which is readable by kcachegrind Authors: * David Allouche allouche.net> * Jp Calderone & Itamar Shtull-Trauring * Johan Dahlin This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. """ from __future__ import absolute_import, print_function def label(code): if isinstance(code, str): return '~' + code # built-in functions ('~' sorts at the end) else: return '%s %s:%d' % (code.co_name, code.co_filename, code.co_firstlineno) class KCacheGrind(object): def __init__(self, profiler): self.data = profiler.getstats() self.out_file = None def output(self, out_file): self.out_file = out_file print('events: Ticks', file=out_file) self._print_summary() for entry in self.data: self._entry(entry) def _print_summary(self): max_cost = 0 for entry in self.data: totaltime = int(entry.totaltime * 1000) max_cost = max(max_cost, totaltime) print('summary: %d' % max_cost, file=self.out_file) def _entry(self, entry): out_file = self.out_file code = entry.code if isinstance(code, str): print('fi=~', file=out_file) else: print('fi=%s' % code.co_filename, file=out_file) print('fn=%s' % label(code), file=out_file) inlinetime = int(entry.inlinetime * 1000) if isinstance(code, str): print('0 ', inlinetime, file=out_file) else: print('%d %d' % (code.co_firstlineno, inlinetime), file=out_file) # recursive calls are counted in entry.calls if entry.calls: calls = entry.calls else: calls = [] if isinstance(code, str): lineno = 0 else: lineno = code.co_firstlineno for subentry in calls: self._subentry(lineno, subentry) print(file=out_file) def _subentry(self, lineno, subentry): out_file = self.out_file code = subentry.code print('cfn=%s' % label(code), file=out_file) if isinstance(code, str): print('cfi=~', file=out_file) print('calls=%d 0' % subentry.callcount, file=out_file) else: print('cfi=%s' % code.co_filename, file=out_file) print('calls=%d %d' % ( subentry.callcount, code.co_firstlineno), file=out_file) totaltime = int(subentry.totaltime * 1000) print('%d %d' % (lineno, totaltime), file=out_file) mercurial-3.7.3/mercurial/pushkey.py0000644000175000017500000000327712676531525017165 0ustar mpmmpm00000000000000# pushkey.py - dispatching for pushing and pulling keys # # Copyright 2010 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import from . import ( bookmarks, encoding, obsolete, phases, ) def _nslist(repo): n = {} for k in _namespaces: n[k] = "" if not obsolete.isenabled(repo, obsolete.exchangeopt): n.pop('obsolete') return n _namespaces = {"namespaces": (lambda *x: False, _nslist), "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks), "phases": (phases.pushphase, phases.listphases), "obsolete": (obsolete.pushmarker, obsolete.listmarkers), } def register(namespace, pushkey, listkeys): _namespaces[namespace] = (pushkey, listkeys) def _get(namespace): return _namespaces.get(namespace, (lambda *x: False, lambda *x: {})) def push(repo, namespace, key, old, new): '''should succeed iff value was old''' pk = _get(namespace)[0] return pk(repo, key, old, new) def list(repo, namespace): '''return a dict''' lk = _get(namespace)[1] return lk(repo) encode = encoding.fromlocal decode = encoding.tolocal def encodekeys(keys): """encode the content of a pushkey namespace for exchange over the wire""" return '\n'.join(['%s\t%s' % (encode(k), encode(v)) for k, v in keys]) def decodekeys(data): """decode the content of a pushkey namespace from exchange over the wire""" result = {} for l in data.splitlines(): k, v = l.split('\t') result[decode(k)] = decode(v) return result mercurial-3.7.3/mercurial/byterange.py0000644000175000017500000004040712676531525017451 0ustar mpmmpm00000000000000# This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, see # . # This file is part of urlgrabber, a high-level cross-protocol url-grabber # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko # $Id: byterange.py,v 1.9 2005/02/14 21:55:07 mstenner Exp $ from __future__ import absolute_import import email import ftplib import mimetypes import os import re import socket import stat import urllib import urllib2 addclosehook = urllib.addclosehook addinfourl = urllib.addinfourl splitattr = urllib.splitattr splitpasswd = urllib.splitpasswd splitport = urllib.splitport splituser = urllib.splituser unquote = urllib.unquote class RangeError(IOError): """Error raised when an unsatisfiable range is requested.""" pass class HTTPRangeHandler(urllib2.BaseHandler): """Handler that enables HTTP Range headers. This was extremely simple. The Range header is a HTTP feature to begin with so all this class does is tell urllib2 that the "206 Partial Content" response from the HTTP server is what we expected. Example: import urllib2 import byterange range_handler = range.HTTPRangeHandler() opener = urllib2.build_opener(range_handler) # install it urllib2.install_opener(opener) # create Request and set Range header req = urllib2.Request('http://www.python.org/') req.header['Range'] = 'bytes=30-50' f = urllib2.urlopen(req) """ def http_error_206(self, req, fp, code, msg, hdrs): # 206 Partial Content Response r = urllib.addinfourl(fp, hdrs, req.get_full_url()) r.code = code r.msg = msg return r def http_error_416(self, req, fp, code, msg, hdrs): # HTTP's Range Not Satisfiable error raise RangeError('Requested Range Not Satisfiable') class RangeableFileObject(object): """File object wrapper to enable raw range handling. This was implemented primarily for handling range specifications for file:// urls. This object effectively makes a file object look like it consists only of a range of bytes in the stream. Examples: # expose 10 bytes, starting at byte position 20, from # /etc/aliases. >>> fo = RangeableFileObject(file('/etc/passwd', 'r'), (20,30)) # seek seeks within the range (to position 23 in this case) >>> fo.seek(3) # tell tells where your at _within the range_ (position 3 in # this case) >>> fo.tell() # read EOFs if an attempt is made to read past the last # byte in the range. the following will return only 7 bytes. >>> fo.read(30) """ def __init__(self, fo, rangetup): """Create a RangeableFileObject. fo -- a file like object. only the read() method need be supported but supporting an optimized seek() is preferable. rangetup -- a (firstbyte,lastbyte) tuple specifying the range to work over. The file object provided is assumed to be at byte offset 0. """ self.fo = fo (self.firstbyte, self.lastbyte) = range_tuple_normalize(rangetup) self.realpos = 0 self._do_seek(self.firstbyte) def __getattr__(self, name): """This effectively allows us to wrap at the instance level. Any attribute not found in _this_ object will be searched for in self.fo. This includes methods.""" return getattr(self.fo, name) def tell(self): """Return the position within the range. This is different from fo.seek in that position 0 is the first byte position of the range tuple. For example, if this object was created with a range tuple of (500,899), tell() will return 0 when at byte position 500 of the file. """ return (self.realpos - self.firstbyte) def seek(self, offset, whence=0): """Seek within the byte range. Positioning is identical to that described under tell(). """ assert whence in (0, 1, 2) if whence == 0: # absolute seek realoffset = self.firstbyte + offset elif whence == 1: # relative seek realoffset = self.realpos + offset elif whence == 2: # absolute from end of file # XXX: are we raising the right Error here? raise IOError('seek from end of file not supported.') # do not allow seek past lastbyte in range if self.lastbyte and (realoffset >= self.lastbyte): realoffset = self.lastbyte self._do_seek(realoffset - self.realpos) def read(self, size=-1): """Read within the range. This method will limit the size read based on the range. """ size = self._calc_read_size(size) rslt = self.fo.read(size) self.realpos += len(rslt) return rslt def readline(self, size=-1): """Read lines within the range. This method will limit the size read based on the range. """ size = self._calc_read_size(size) rslt = self.fo.readline(size) self.realpos += len(rslt) return rslt def _calc_read_size(self, size): """Handles calculating the amount of data to read based on the range. """ if self.lastbyte: if size > -1: if ((self.realpos + size) >= self.lastbyte): size = (self.lastbyte - self.realpos) else: size = (self.lastbyte - self.realpos) return size def _do_seek(self, offset): """Seek based on whether wrapped object supports seek(). offset is relative to the current position (self.realpos). """ assert offset >= 0 seek = getattr(self.fo, 'seek', self._poor_mans_seek) seek(self.realpos + offset) self.realpos += offset def _poor_mans_seek(self, offset): """Seek by calling the wrapped file objects read() method. This is used for file like objects that do not have native seek support. The wrapped objects read() method is called to manually seek to the desired position. offset -- read this number of bytes from the wrapped file object. raise RangeError if we encounter EOF before reaching the specified offset. """ pos = 0 bufsize = 1024 while pos < offset: if (pos + bufsize) > offset: bufsize = offset - pos buf = self.fo.read(bufsize) if len(buf) != bufsize: raise RangeError('Requested Range Not Satisfiable') pos += bufsize class FileRangeHandler(urllib2.FileHandler): """FileHandler subclass that adds Range support. This class handles Range headers exactly like an HTTP server would. """ def open_local_file(self, req): host = req.get_host() file = req.get_selector() localfile = urllib.url2pathname(file) stats = os.stat(localfile) size = stats[stat.ST_SIZE] modified = email.Utils.formatdate(stats[stat.ST_MTIME]) mtype = mimetypes.guess_type(file)[0] if host: host, port = urllib.splitport(host) if port or socket.gethostbyname(host) not in self.get_names(): raise urllib2.URLError('file not on local host') fo = open(localfile,'rb') brange = req.headers.get('Range', None) brange = range_header_to_tuple(brange) assert brange != () if brange: (fb, lb) = brange if lb == '': lb = size if fb < 0 or fb > size or lb > size: raise RangeError('Requested Range Not Satisfiable') size = (lb - fb) fo = RangeableFileObject(fo, (fb, lb)) headers = email.message_from_string( 'Content-Type: %s\nContent-Length: %d\nLast-Modified: %s\n' % (mtype or 'text/plain', size, modified)) return urllib.addinfourl(fo, headers, 'file:'+file) # FTP Range Support # Unfortunately, a large amount of base FTP code had to be copied # from urllib and urllib2 in order to insert the FTP REST command. # Code modifications for range support have been commented as # follows: # -- range support modifications start/end here class FTPRangeHandler(urllib2.FTPHandler): def ftp_open(self, req): host = req.get_host() if not host: raise IOError('ftp error', 'no host given') host, port = splitport(host) if port is None: port = ftplib.FTP_PORT else: port = int(port) # username/password handling user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = unquote(user or '') passwd = unquote(passwd or '') try: host = socket.gethostbyname(host) except socket.error as msg: raise urllib2.URLError(msg) path, attrs = splitattr(req.get_selector()) dirs = path.split('/') dirs = map(unquote, dirs) dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] try: fw = self.connect_ftp(user, passwd, host, port, dirs) if file: type = 'I' else: type = 'D' for attr in attrs: attr, value = splitattr(attr) if attr.lower() == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() # -- range support modifications start here rest = None range_tup = range_header_to_tuple(req.headers.get('Range', None)) assert range_tup != () if range_tup: (fb, lb) = range_tup if fb > 0: rest = fb # -- range support modifications end here fp, retrlen = fw.retrfile(file, type, rest) # -- range support modifications start here if range_tup: (fb, lb) = range_tup if lb == '': if retrlen is None or retrlen == 0: raise RangeError('Requested Range Not Satisfiable due' ' to unobtainable file length.') lb = retrlen retrlen = lb - fb if retrlen < 0: # beginning of range is larger than file raise RangeError('Requested Range Not Satisfiable') else: retrlen = lb - fb fp = RangeableFileObject(fp, (0, retrlen)) # -- range support modifications end here headers = "" mtype = mimetypes.guess_type(req.get_full_url())[0] if mtype: headers += "Content-Type: %s\n" % mtype if retrlen is not None and retrlen >= 0: headers += "Content-Length: %d\n" % retrlen headers = email.message_from_string(headers) return addinfourl(fp, headers, req.get_full_url()) except ftplib.all_errors as msg: raise IOError('ftp error', msg) def connect_ftp(self, user, passwd, host, port, dirs): fw = ftpwrapper(user, passwd, host, port, dirs) return fw class ftpwrapper(urllib.ftpwrapper): # range support note: # this ftpwrapper code is copied directly from # urllib. The only enhancement is to add the rest # argument and pass it on to ftp.ntransfercmd def retrfile(self, file, type, rest=None): self.endtransfer() if type in ('d', 'D'): cmd = 'TYPE A' isdir = 1 else: cmd = 'TYPE ' + type isdir = 0 try: self.ftp.voidcmd(cmd) except ftplib.all_errors: self.init() self.ftp.voidcmd(cmd) conn = None if file and not isdir: # Use nlst to see if the file exists at all try: self.ftp.nlst(file) except ftplib.error_perm as reason: raise IOError('ftp error', reason) # Restore the transfer mode! self.ftp.voidcmd(cmd) # Try to retrieve as a file try: cmd = 'RETR ' + file conn = self.ftp.ntransfercmd(cmd, rest) except ftplib.error_perm as reason: if str(reason).startswith('501'): # workaround for REST not supported error fp, retrlen = self.retrfile(file, type) fp = RangeableFileObject(fp, (rest,'')) return (fp, retrlen) elif not str(reason).startswith('550'): raise IOError('ftp error', reason) if not conn: # Set transfer mode to ASCII! self.ftp.voidcmd('TYPE A') # Try a directory listing if file: cmd = 'LIST ' + file else: cmd = 'LIST' conn = self.ftp.ntransfercmd(cmd) self.busy = 1 # Pass back both a suitably decorated object and a retrieval length return (addclosehook(conn[0].makefile('rb'), self.endtransfer), conn[1]) #################################################################### # Range Tuple Functions # XXX: These range tuple functions might go better in a class. _rangere = None def range_header_to_tuple(range_header): """Get a (firstbyte,lastbyte) tuple from a Range header value. Range headers have the form "bytes=-". This function pulls the firstbyte and lastbyte values and returns a (firstbyte,lastbyte) tuple. If lastbyte is not specified in the header value, it is returned as an empty string in the tuple. Return None if range_header is None Return () if range_header does not conform to the range spec pattern. """ global _rangere if range_header is None: return None if _rangere is None: _rangere = re.compile(r'^bytes=(\d{1,})-(\d*)') match = _rangere.match(range_header) if match: tup = range_tuple_normalize(match.group(1, 2)) if tup and tup[1]: tup = (tup[0], tup[1]+1) return tup return () def range_tuple_to_header(range_tup): """Convert a range tuple to a Range header value. Return a string of the form "bytes=-" or None if no range is needed. """ if range_tup is None: return None range_tup = range_tuple_normalize(range_tup) if range_tup: if range_tup[1]: range_tup = (range_tup[0], range_tup[1] - 1) return 'bytes=%s-%s' % range_tup def range_tuple_normalize(range_tup): """Normalize a (first_byte,last_byte) range tuple. Return a tuple whose first element is guaranteed to be an int and whose second element will be '' (meaning: the last byte) or an int. Finally, return None if the normalized tuple == (0,'') as that is equivalent to retrieving the entire file. """ if range_tup is None: return None # handle first byte fb = range_tup[0] if fb in (None, ''): fb = 0 else: fb = int(fb) # handle last byte try: lb = range_tup[1] except IndexError: lb = '' else: if lb is None: lb = '' elif lb != '': lb = int(lb) # check if range is over the entire file if (fb, lb) == (0, ''): return None # check that the range is valid if lb < fb: raise RangeError('Invalid byte range: %s-%s' % (fb, lb)) return (fb, lb) mercurial-3.7.3/mercurial/context.py0000644000175000017500000021362512676531525017161 0ustar mpmmpm00000000000000# context.py - changeset and file context objects for mercurial # # Copyright 2006, 2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import os import re import stat from .i18n import _ from .node import ( bin, hex, nullid, nullrev, short, wdirid, ) from . import ( encoding, error, fileset, match as matchmod, mdiff, obsolete as obsmod, patch, phases, repoview, revlog, scmutil, subrepo, util, ) propertycache = util.propertycache # Phony node value to stand-in for new files in some uses of # manifests. Manifests support 21-byte hashes for nodes which are # dirty in the working copy. _newnode = '!' * 21 nonascii = re.compile(r'[^\x21-\x7f]').search class basectx(object): """A basectx object represents the common logic for its children: changectx: read-only context that is already present in the repo, workingctx: a context that represents the working directory and can be committed, memctx: a context that represents changes in-memory and can also be committed.""" def __new__(cls, repo, changeid='', *args, **kwargs): if isinstance(changeid, basectx): return changeid o = super(basectx, cls).__new__(cls) o._repo = repo o._rev = nullrev o._node = nullid return o def __str__(self): return short(self.node()) def __int__(self): return self.rev() def __repr__(self): return "<%s %s>" % (type(self).__name__, str(self)) def __eq__(self, other): try: return type(self) == type(other) and self._rev == other._rev except AttributeError: return False def __ne__(self, other): return not (self == other) def __contains__(self, key): return key in self._manifest def __getitem__(self, key): return self.filectx(key) def __iter__(self): return iter(self._manifest) def _manifestmatches(self, match, s): """generate a new manifest filtered by the match argument This method is for internal use only and mainly exists to provide an object oriented way for other contexts to customize the manifest generation. """ return self.manifest().matches(match) def _matchstatus(self, other, match): """return match.always if match is none This internal method provides a way for child objects to override the match operator. """ return match or matchmod.always(self._repo.root, self._repo.getcwd()) def _buildstatus(self, other, s, match, listignored, listclean, listunknown): """build a status with respect to another context""" # Load earliest manifest first for caching reasons. More specifically, # if you have revisions 1000 and 1001, 1001 is probably stored as a # delta against 1000. Thus, if you read 1000 first, we'll reconstruct # 1000 and cache it so that when you read 1001, we just need to apply a # delta to what's in the cache. So that's one full reconstruction + one # delta application. if self.rev() is not None and self.rev() < other.rev(): self.manifest() mf1 = other._manifestmatches(match, s) mf2 = self._manifestmatches(match, s) modified, added = [], [] removed = [] clean = [] deleted, unknown, ignored = s.deleted, s.unknown, s.ignored deletedset = set(deleted) d = mf1.diff(mf2, clean=listclean) for fn, value in d.iteritems(): if fn in deletedset: continue if value is None: clean.append(fn) continue (node1, flag1), (node2, flag2) = value if node1 is None: added.append(fn) elif node2 is None: removed.append(fn) elif flag1 != flag2: modified.append(fn) elif node2 != _newnode: # When comparing files between two commits, we save time by # not comparing the file contents when the nodeids differ. # Note that this means we incorrectly report a reverted change # to a file as a modification. modified.append(fn) elif self[fn].cmp(other[fn]): modified.append(fn) else: clean.append(fn) if removed: # need to filter files if they are already reported as removed unknown = [fn for fn in unknown if fn not in mf1] ignored = [fn for fn in ignored if fn not in mf1] # if they're deleted, don't report them as removed removed = [fn for fn in removed if fn not in deletedset] return scmutil.status(modified, added, removed, deleted, unknown, ignored, clean) @propertycache def substate(self): return subrepo.state(self, self._repo.ui) def subrev(self, subpath): return self.substate[subpath][1] def rev(self): return self._rev def node(self): return self._node def hex(self): return hex(self.node()) def manifest(self): return self._manifest def repo(self): return self._repo def phasestr(self): return phases.phasenames[self.phase()] def mutable(self): return self.phase() > phases.public def getfileset(self, expr): return fileset.getfileset(self, expr) def obsolete(self): """True if the changeset is obsolete""" return self.rev() in obsmod.getrevs(self._repo, 'obsolete') def extinct(self): """True if the changeset is extinct""" return self.rev() in obsmod.getrevs(self._repo, 'extinct') def unstable(self): """True if the changeset is not obsolete but it's ancestor are""" return self.rev() in obsmod.getrevs(self._repo, 'unstable') def bumped(self): """True if the changeset try to be a successor of a public changeset Only non-public and non-obsolete changesets may be bumped. """ return self.rev() in obsmod.getrevs(self._repo, 'bumped') def divergent(self): """Is a successors of a changeset with multiple possible successors set Only non-public and non-obsolete changesets may be divergent. """ return self.rev() in obsmod.getrevs(self._repo, 'divergent') def troubled(self): """True if the changeset is either unstable, bumped or divergent""" return self.unstable() or self.bumped() or self.divergent() def troubles(self): """return the list of troubles affecting this changesets. Troubles are returned as strings. possible values are: - unstable, - bumped, - divergent. """ troubles = [] if self.unstable(): troubles.append('unstable') if self.bumped(): troubles.append('bumped') if self.divergent(): troubles.append('divergent') return troubles def parents(self): """return contexts for each parent changeset""" return self._parents def p1(self): return self._parents[0] def p2(self): parents = self._parents if len(parents) == 2: return parents[1] return changectx(self._repo, nullrev) def _fileinfo(self, path): if '_manifest' in self.__dict__: try: return self._manifest[path], self._manifest.flags(path) except KeyError: raise error.ManifestLookupError(self._node, path, _('not found in manifest')) if '_manifestdelta' in self.__dict__ or path in self.files(): if path in self._manifestdelta: return (self._manifestdelta[path], self._manifestdelta.flags(path)) node, flag = self._repo.manifest.find(self._changeset[0], path) if not node: raise error.ManifestLookupError(self._node, path, _('not found in manifest')) return node, flag def filenode(self, path): return self._fileinfo(path)[0] def flags(self, path): try: return self._fileinfo(path)[1] except error.LookupError: return '' def sub(self, path): '''return a subrepo for the stored revision of path, never wdir()''' return subrepo.subrepo(self, path) def nullsub(self, path, pctx): return subrepo.nullsubrepo(self, path, pctx) def workingsub(self, path): '''return a subrepo for the stored revision, or wdir if this is a wdir context. ''' return subrepo.subrepo(self, path, allowwdir=True) def match(self, pats=[], include=None, exclude=None, default='glob', listsubrepos=False, badfn=None): r = self._repo return matchmod.match(r.root, r.getcwd(), pats, include, exclude, default, auditor=r.nofsauditor, ctx=self, listsubrepos=listsubrepos, badfn=badfn) def diff(self, ctx2=None, match=None, **opts): """Returns a diff generator for the given contexts and matcher""" if ctx2 is None: ctx2 = self.p1() if ctx2 is not None: ctx2 = self._repo[ctx2] diffopts = patch.diffopts(self._repo.ui, opts) return patch.diff(self._repo, ctx2, self, match=match, opts=diffopts) def dirs(self): return self._manifest.dirs() def hasdir(self, dir): return self._manifest.hasdir(dir) def dirty(self, missing=False, merge=True, branch=True): return False def status(self, other=None, match=None, listignored=False, listclean=False, listunknown=False, listsubrepos=False): """return status of files between two nodes or node and working directory. If other is None, compare this node with working directory. returns (modified, added, removed, deleted, unknown, ignored, clean) """ ctx1 = self ctx2 = self._repo[other] # This next code block is, admittedly, fragile logic that tests for # reversing the contexts and wouldn't need to exist if it weren't for # the fast (and common) code path of comparing the working directory # with its first parent. # # What we're aiming for here is the ability to call: # # workingctx.status(parentctx) # # If we always built the manifest for each context and compared those, # then we'd be done. But the special case of the above call means we # just copy the manifest of the parent. reversed = False if (not isinstance(ctx1, changectx) and isinstance(ctx2, changectx)): reversed = True ctx1, ctx2 = ctx2, ctx1 match = ctx2._matchstatus(ctx1, match) r = scmutil.status([], [], [], [], [], [], []) r = ctx2._buildstatus(ctx1, r, match, listignored, listclean, listunknown) if reversed: # Reverse added and removed. Clear deleted, unknown and ignored as # these make no sense to reverse. r = scmutil.status(r.modified, r.removed, r.added, [], [], [], r.clean) if listsubrepos: for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): try: rev2 = ctx2.subrev(subpath) except KeyError: # A subrepo that existed in node1 was deleted between # node1 and node2 (inclusive). Thus, ctx2's substate # won't contain that subpath. The best we can do ignore it. rev2 = None submatch = matchmod.narrowmatcher(subpath, match) s = sub.status(rev2, match=submatch, ignored=listignored, clean=listclean, unknown=listunknown, listsubrepos=True) for rfiles, sfiles in zip(r, s): rfiles.extend("%s/%s" % (subpath, f) for f in sfiles) for l in r: l.sort() return r def makememctx(repo, parents, text, user, date, branch, files, store, editor=None, extra=None): def getfilectx(repo, memctx, path): data, mode, copied = store.getfile(path) if data is None: return None islink, isexec = mode return memfilectx(repo, path, data, islink=islink, isexec=isexec, copied=copied, memctx=memctx) if extra is None: extra = {} if branch: extra['branch'] = encoding.fromlocal(branch) ctx = memctx(repo, parents, text, files, getfilectx, user, date, extra, editor) return ctx class changectx(basectx): """A changecontext object makes access to data related to a particular changeset convenient. It represents a read-only context already present in the repo.""" def __init__(self, repo, changeid=''): """changeid is a revision number, node, or tag""" # since basectx.__new__ already took care of copying the object, we # don't need to do anything in __init__, so we just exit here if isinstance(changeid, basectx): return if changeid == '': changeid = '.' self._repo = repo try: if isinstance(changeid, int): self._node = repo.changelog.node(changeid) self._rev = changeid return if isinstance(changeid, long): changeid = str(changeid) if changeid == 'null': self._node = nullid self._rev = nullrev return if changeid == 'tip': self._node = repo.changelog.tip() self._rev = repo.changelog.rev(self._node) return if changeid == '.' or changeid == repo.dirstate.p1(): # this is a hack to delay/avoid loading obsmarkers # when we know that '.' won't be hidden self._node = repo.dirstate.p1() self._rev = repo.unfiltered().changelog.rev(self._node) return if len(changeid) == 20: try: self._node = changeid self._rev = repo.changelog.rev(changeid) return except error.FilteredRepoLookupError: raise except LookupError: pass try: r = int(changeid) if str(r) != changeid: raise ValueError l = len(repo.changelog) if r < 0: r += l if r < 0 or r >= l: raise ValueError self._rev = r self._node = repo.changelog.node(r) return except error.FilteredIndexError: raise except (ValueError, OverflowError, IndexError): pass if len(changeid) == 40: try: self._node = bin(changeid) self._rev = repo.changelog.rev(self._node) return except error.FilteredLookupError: raise except (TypeError, LookupError): pass # lookup bookmarks through the name interface try: self._node = repo.names.singlenode(repo, changeid) self._rev = repo.changelog.rev(self._node) return except KeyError: pass except error.FilteredRepoLookupError: raise except error.RepoLookupError: pass self._node = repo.unfiltered().changelog._partialmatch(changeid) if self._node is not None: self._rev = repo.changelog.rev(self._node) return # lookup failed # check if it might have come from damaged dirstate # # XXX we could avoid the unfiltered if we had a recognizable # exception for filtered changeset access if changeid in repo.unfiltered().dirstate.parents(): msg = _("working directory has unknown parent '%s'!") raise error.Abort(msg % short(changeid)) try: if len(changeid) == 20 and nonascii(changeid): changeid = hex(changeid) except TypeError: pass except (error.FilteredIndexError, error.FilteredLookupError, error.FilteredRepoLookupError): if repo.filtername.startswith('visible'): msg = _("hidden revision '%s'") % changeid hint = _('use --hidden to access hidden revisions') raise error.FilteredRepoLookupError(msg, hint=hint) msg = _("filtered revision '%s' (not in '%s' subset)") msg %= (changeid, repo.filtername) raise error.FilteredRepoLookupError(msg) except IndexError: pass raise error.RepoLookupError( _("unknown revision '%s'") % changeid) def __hash__(self): try: return hash(self._rev) except AttributeError: return id(self) def __nonzero__(self): return self._rev != nullrev @propertycache def _changeset(self): return self._repo.changelog.read(self.rev()) @propertycache def _manifest(self): return self._repo.manifest.read(self._changeset[0]) @propertycache def _manifestdelta(self): return self._repo.manifest.readdelta(self._changeset[0]) @propertycache def _parents(self): repo = self._repo p1, p2 = repo.changelog.parentrevs(self._rev) if p2 == nullrev: return [changectx(repo, p1)] return [changectx(repo, p1), changectx(repo, p2)] def changeset(self): return self._changeset def manifestnode(self): return self._changeset[0] def user(self): return self._changeset[1] def date(self): return self._changeset[2] def files(self): return self._changeset[3] def description(self): return self._changeset[4] def branch(self): return encoding.tolocal(self._changeset[5].get("branch")) def closesbranch(self): return 'close' in self._changeset[5] def extra(self): return self._changeset[5] def tags(self): return self._repo.nodetags(self._node) def bookmarks(self): return self._repo.nodebookmarks(self._node) def phase(self): return self._repo._phasecache.phase(self._repo, self._rev) def hidden(self): return self._rev in repoview.filterrevs(self._repo, 'visible') def children(self): """return contexts for each child changeset""" c = self._repo.changelog.children(self._node) return [changectx(self._repo, x) for x in c] def ancestors(self): for a in self._repo.changelog.ancestors([self._rev]): yield changectx(self._repo, a) def descendants(self): for d in self._repo.changelog.descendants([self._rev]): yield changectx(self._repo, d) def filectx(self, path, fileid=None, filelog=None): """get a file context from this changeset""" if fileid is None: fileid = self.filenode(path) return filectx(self._repo, path, fileid=fileid, changectx=self, filelog=filelog) def ancestor(self, c2, warn=False): """return the "best" ancestor context of self and c2 If there are multiple candidates, it will show a message and check merge.preferancestor configuration before falling back to the revlog ancestor.""" # deal with workingctxs n2 = c2._node if n2 is None: n2 = c2._parents[0]._node cahs = self._repo.changelog.commonancestorsheads(self._node, n2) if not cahs: anc = nullid elif len(cahs) == 1: anc = cahs[0] else: # experimental config: merge.preferancestor for r in self._repo.ui.configlist('merge', 'preferancestor', ['*']): try: ctx = changectx(self._repo, r) except error.RepoLookupError: continue anc = ctx.node() if anc in cahs: break else: anc = self._repo.changelog.ancestor(self._node, n2) if warn: self._repo.ui.status( (_("note: using %s as ancestor of %s and %s\n") % (short(anc), short(self._node), short(n2))) + ''.join(_(" alternatively, use --config " "merge.preferancestor=%s\n") % short(n) for n in sorted(cahs) if n != anc)) return changectx(self._repo, anc) def descendant(self, other): """True if other is descendant of this changeset""" return self._repo.changelog.descendant(self._rev, other._rev) def walk(self, match): '''Generates matching file names.''' # Wrap match.bad method to have message with nodeid def bad(fn, msg): # The manifest doesn't know about subrepos, so don't complain about # paths into valid subrepos. if any(fn == s or fn.startswith(s + '/') for s in self.substate): return match.bad(fn, _('no such file in rev %s') % self) m = matchmod.badmatch(match, bad) return self._manifest.walk(m) def matches(self, match): return self.walk(match) class basefilectx(object): """A filecontext object represents the common logic for its children: filectx: read-only access to a filerevision that is already present in the repo, workingfilectx: a filecontext that represents files from the working directory, memfilectx: a filecontext that represents files in-memory.""" def __new__(cls, repo, path, *args, **kwargs): return super(basefilectx, cls).__new__(cls) @propertycache def _filelog(self): return self._repo.file(self._path) @propertycache def _changeid(self): if '_changeid' in self.__dict__: return self._changeid elif '_changectx' in self.__dict__: return self._changectx.rev() elif '_descendantrev' in self.__dict__: # this file context was created from a revision with a known # descendant, we can (lazily) correct for linkrev aliases return self._adjustlinkrev(self._path, self._filelog, self._filenode, self._descendantrev) else: return self._filelog.linkrev(self._filerev) @propertycache def _filenode(self): if '_fileid' in self.__dict__: return self._filelog.lookup(self._fileid) else: return self._changectx.filenode(self._path) @propertycache def _filerev(self): return self._filelog.rev(self._filenode) @propertycache def _repopath(self): return self._path def __nonzero__(self): try: self._filenode return True except error.LookupError: # file is missing return False def __str__(self): return "%s@%s" % (self.path(), self._changectx) def __repr__(self): return "<%s %s>" % (type(self).__name__, str(self)) def __hash__(self): try: return hash((self._path, self._filenode)) except AttributeError: return id(self) def __eq__(self, other): try: return (type(self) == type(other) and self._path == other._path and self._filenode == other._filenode) except AttributeError: return False def __ne__(self, other): return not (self == other) def filerev(self): return self._filerev def filenode(self): return self._filenode def flags(self): return self._changectx.flags(self._path) def filelog(self): return self._filelog def rev(self): return self._changeid def linkrev(self): return self._filelog.linkrev(self._filerev) def node(self): return self._changectx.node() def hex(self): return self._changectx.hex() def user(self): return self._changectx.user() def date(self): return self._changectx.date() def files(self): return self._changectx.files() def description(self): return self._changectx.description() def branch(self): return self._changectx.branch() def extra(self): return self._changectx.extra() def phase(self): return self._changectx.phase() def phasestr(self): return self._changectx.phasestr() def manifest(self): return self._changectx.manifest() def changectx(self): return self._changectx def repo(self): return self._repo def path(self): return self._path def isbinary(self): try: return util.binary(self.data()) except IOError: return False def isexec(self): return 'x' in self.flags() def islink(self): return 'l' in self.flags() def isabsent(self): """whether this filectx represents a file not in self._changectx This is mainly for merge code to detect change/delete conflicts. This is expected to be True for all subclasses of basectx.""" return False _customcmp = False def cmp(self, fctx): """compare with other file context returns True if different than fctx. """ if fctx._customcmp: return fctx.cmp(self) if (fctx._filerev is None and (self._repo._encodefilterpats # if file data starts with '\1\n', empty metadata block is # prepended, which adds 4 bytes to filelog.size(). or self.size() - 4 == fctx.size()) or self.size() == fctx.size()): return self._filelog.cmp(self._filenode, fctx.data()) return True def _adjustlinkrev(self, path, filelog, fnode, srcrev, inclusive=False): """return the first ancestor of introducing If the linkrev of the file revision does not point to an ancestor of srcrev, we'll walk down the ancestors until we find one introducing this file revision. :repo: a localrepository object (used to access changelog and manifest) :path: the file path :fnode: the nodeid of the file revision :filelog: the filelog of this path :srcrev: the changeset revision we search ancestors from :inclusive: if true, the src revision will also be checked """ repo = self._repo cl = repo.unfiltered().changelog ma = repo.manifest # fetch the linkrev fr = filelog.rev(fnode) lkr = filelog.linkrev(fr) # hack to reuse ancestor computation when searching for renames memberanc = getattr(self, '_ancestrycontext', None) iteranc = None if srcrev is None: # wctx case, used by workingfilectx during mergecopy revs = [p.rev() for p in self._repo[None].parents()] inclusive = True # we skipped the real (revless) source else: revs = [srcrev] if memberanc is None: memberanc = iteranc = cl.ancestors(revs, lkr, inclusive=inclusive) # check if this linkrev is an ancestor of srcrev if lkr not in memberanc: if iteranc is None: iteranc = cl.ancestors(revs, lkr, inclusive=inclusive) for a in iteranc: ac = cl.read(a) # get changeset data (we avoid object creation) if path in ac[3]: # checking the 'files' field. # The file has been touched, check if the content is # similar to the one we search for. if fnode == ma.readfast(ac[0]).get(path): return a # In theory, we should never get out of that loop without a result. # But if manifest uses a buggy file revision (not children of the # one it replaces) we could. Such a buggy situation will likely # result is crash somewhere else at to some point. return lkr def introrev(self): """return the rev of the changeset which introduced this file revision This method is different from linkrev because it take into account the changeset the filectx was created from. It ensures the returned revision is one of its ancestors. This prevents bugs from 'linkrev-shadowing' when a file revision is used by multiple changesets. """ lkr = self.linkrev() attrs = vars(self) noctx = not ('_changeid' in attrs or '_changectx' in attrs) if noctx or self.rev() == lkr: return self.linkrev() return self._adjustlinkrev(self._path, self._filelog, self._filenode, self.rev(), inclusive=True) def _parentfilectx(self, path, fileid, filelog): """create parent filectx keeping ancestry info for _adjustlinkrev()""" fctx = filectx(self._repo, path, fileid=fileid, filelog=filelog) if '_changeid' in vars(self) or '_changectx' in vars(self): # If self is associated with a changeset (probably explicitly # fed), ensure the created filectx is associated with a # changeset that is an ancestor of self.changectx. # This lets us later use _adjustlinkrev to get a correct link. fctx._descendantrev = self.rev() fctx._ancestrycontext = getattr(self, '_ancestrycontext', None) elif '_descendantrev' in vars(self): # Otherwise propagate _descendantrev if we have one associated. fctx._descendantrev = self._descendantrev fctx._ancestrycontext = getattr(self, '_ancestrycontext', None) return fctx def parents(self): _path = self._path fl = self._filelog parents = self._filelog.parents(self._filenode) pl = [(_path, node, fl) for node in parents if node != nullid] r = fl.renamed(self._filenode) if r: # - In the simple rename case, both parent are nullid, pl is empty. # - In case of merge, only one of the parent is null id and should # be replaced with the rename information. This parent is -always- # the first one. # # As null id have always been filtered out in the previous list # comprehension, inserting to 0 will always result in "replacing # first nullid parent with rename information. pl.insert(0, (r[0], r[1], self._repo.file(r[0]))) return [self._parentfilectx(path, fnode, l) for path, fnode, l in pl] def p1(self): return self.parents()[0] def p2(self): p = self.parents() if len(p) == 2: return p[1] return filectx(self._repo, self._path, fileid=-1, filelog=self._filelog) def annotate(self, follow=False, linenumber=None, diffopts=None): '''returns a list of tuples of (ctx, line) for each line in the file, where ctx is the filectx of the node where that line was last changed. This returns tuples of ((ctx, linenumber), line) for each line, if "linenumber" parameter is NOT "None". In such tuples, linenumber means one at the first appearance in the managed file. To reduce annotation cost, this returns fixed value(False is used) as linenumber, if "linenumber" parameter is "False".''' if linenumber is None: def decorate(text, rev): return ([rev] * len(text.splitlines()), text) elif linenumber: def decorate(text, rev): size = len(text.splitlines()) return ([(rev, i) for i in xrange(1, size + 1)], text) else: def decorate(text, rev): return ([(rev, False)] * len(text.splitlines()), text) def pair(parent, child): blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts, refine=True) for (a1, a2, b1, b2), t in blocks: # Changed blocks ('!') or blocks made only of blank lines ('~') # belong to the child. if t == '=': child[0][b1:b2] = parent[0][a1:a2] return child getlog = util.lrucachefunc(lambda x: self._repo.file(x)) def parents(f): # Cut _descendantrev here to mitigate the penalty of lazy linkrev # adjustment. Otherwise, p._adjustlinkrev() would walk changelog # from the topmost introrev (= srcrev) down to p.linkrev() if it # isn't an ancestor of the srcrev. f._changeid pl = f.parents() # Don't return renamed parents if we aren't following. if not follow: pl = [p for p in pl if p.path() == f.path()] # renamed filectx won't have a filelog yet, so set it # from the cache to save time for p in pl: if not '_filelog' in p.__dict__: p._filelog = getlog(p.path()) return pl # use linkrev to find the first changeset where self appeared base = self introrev = self.introrev() if self.rev() != introrev: base = self.filectx(self.filenode(), changeid=introrev) if getattr(base, '_ancestrycontext', None) is None: cl = self._repo.changelog if introrev is None: # wctx is not inclusive, but works because _ancestrycontext # is used to test filelog revisions ac = cl.ancestors([p.rev() for p in base.parents()], inclusive=True) else: ac = cl.ancestors([introrev], inclusive=True) base._ancestrycontext = ac # This algorithm would prefer to be recursive, but Python is a # bit recursion-hostile. Instead we do an iterative # depth-first search. visit = [base] hist = {} pcache = {} needed = {base: 1} while visit: f = visit[-1] pcached = f in pcache if not pcached: pcache[f] = parents(f) ready = True pl = pcache[f] for p in pl: if p not in hist: ready = False visit.append(p) if not pcached: needed[p] = needed.get(p, 0) + 1 if ready: visit.pop() reusable = f in hist if reusable: curr = hist[f] else: curr = decorate(f.data(), f) for p in pl: if not reusable: curr = pair(hist[p], curr) if needed[p] == 1: del hist[p] del needed[p] else: needed[p] -= 1 hist[f] = curr pcache[f] = [] return zip(hist[base][0], hist[base][1].splitlines(True)) def ancestors(self, followfirst=False): visit = {} c = self if followfirst: cut = 1 else: cut = None while True: for parent in c.parents()[:cut]: visit[(parent.linkrev(), parent.filenode())] = parent if not visit: break c = visit.pop(max(visit)) yield c class filectx(basefilectx): """A filecontext object makes access to data related to a particular filerevision convenient.""" def __init__(self, repo, path, changeid=None, fileid=None, filelog=None, changectx=None): """changeid can be a changeset revision, node, or tag. fileid can be a file revision or node.""" self._repo = repo self._path = path assert (changeid is not None or fileid is not None or changectx is not None), \ ("bad args: changeid=%r, fileid=%r, changectx=%r" % (changeid, fileid, changectx)) if filelog is not None: self._filelog = filelog if changeid is not None: self._changeid = changeid if changectx is not None: self._changectx = changectx if fileid is not None: self._fileid = fileid @propertycache def _changectx(self): try: return changectx(self._repo, self._changeid) except error.FilteredRepoLookupError: # Linkrev may point to any revision in the repository. When the # repository is filtered this may lead to `filectx` trying to build # `changectx` for filtered revision. In such case we fallback to # creating `changectx` on the unfiltered version of the reposition. # This fallback should not be an issue because `changectx` from # `filectx` are not used in complex operations that care about # filtering. # # This fallback is a cheap and dirty fix that prevent several # crashes. It does not ensure the behavior is correct. However the # behavior was not correct before filtering either and "incorrect # behavior" is seen as better as "crash" # # Linkrevs have several serious troubles with filtering that are # complicated to solve. Proper handling of the issue here should be # considered when solving linkrev issue are on the table. return changectx(self._repo.unfiltered(), self._changeid) def filectx(self, fileid, changeid=None): '''opens an arbitrary revision of the file without opening a new filelog''' return filectx(self._repo, self._path, fileid=fileid, filelog=self._filelog, changeid=changeid) def data(self): try: return self._filelog.read(self._filenode) except error.CensoredNodeError: if self._repo.ui.config("censor", "policy", "abort") == "ignore": return "" raise error.Abort(_("censored node: %s") % short(self._filenode), hint=_("set censor.policy to ignore errors")) def size(self): return self._filelog.size(self._filerev) def renamed(self): """check if file was actually renamed in this changeset revision If rename logged in file revision, we report copy for changeset only if file revisions linkrev points back to the changeset in question or both changeset parents contain different file revisions. """ renamed = self._filelog.renamed(self._filenode) if not renamed: return renamed if self.rev() == self.linkrev(): return renamed name = self.path() fnode = self._filenode for p in self._changectx.parents(): try: if fnode == p.filenode(name): return None except error.LookupError: pass return renamed def children(self): # hard for renames c = self._filelog.children(self._filenode) return [filectx(self._repo, self._path, fileid=x, filelog=self._filelog) for x in c] class committablectx(basectx): """A committablectx object provides common functionality for a context that wants the ability to commit, e.g. workingctx or memctx.""" def __init__(self, repo, text="", user=None, date=None, extra=None, changes=None): self._repo = repo self._rev = None self._node = None self._text = text if date: self._date = util.parsedate(date) if user: self._user = user if changes: self._status = changes self._extra = {} if extra: self._extra = extra.copy() if 'branch' not in self._extra: try: branch = encoding.fromlocal(self._repo.dirstate.branch()) except UnicodeDecodeError: raise error.Abort(_('branch name not in UTF-8!')) self._extra['branch'] = branch if self._extra['branch'] == '': self._extra['branch'] = 'default' def __str__(self): return str(self._parents[0]) + "+" def __nonzero__(self): return True def _buildflagfunc(self): # Create a fallback function for getting file flags when the # filesystem doesn't support them copiesget = self._repo.dirstate.copies().get parents = self.parents() if len(parents) < 2: # when we have one parent, it's easy: copy from parent man = parents[0].manifest() def func(f): f = copiesget(f, f) return man.flags(f) else: # merges are tricky: we try to reconstruct the unstored # result from the merge (issue1802) p1, p2 = parents pa = p1.ancestor(p2) m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest() def func(f): f = copiesget(f, f) # may be wrong for merges with copies fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f) if fl1 == fl2: return fl1 if fl1 == fla: return fl2 if fl2 == fla: return fl1 return '' # punt for conflicts return func @propertycache def _flagfunc(self): return self._repo.dirstate.flagfunc(self._buildflagfunc) @propertycache def _manifest(self): """generate a manifest corresponding to the values in self._status This reuse the file nodeid from parent, but we append an extra letter when modified. Modified files get an extra 'm' while added files get an extra 'a'. This is used by manifests merge to see that files are different and by update logic to avoid deleting newly added files. """ parents = self.parents() man1 = parents[0].manifest() man = man1.copy() if len(parents) > 1: man2 = self.p2().manifest() def getman(f): if f in man1: return man1 return man2 else: getman = lambda f: man1 copied = self._repo.dirstate.copies() ff = self._flagfunc for i, l in (("a", self._status.added), ("m", self._status.modified)): for f in l: orig = copied.get(f, f) man[f] = getman(orig).get(orig, nullid) + i try: man.setflag(f, ff(f)) except OSError: pass for f in self._status.deleted + self._status.removed: if f in man: del man[f] return man @propertycache def _status(self): return self._repo.status() @propertycache def _user(self): return self._repo.ui.username() @propertycache def _date(self): return util.makedate() def subrev(self, subpath): return None def manifestnode(self): return None def user(self): return self._user or self._repo.ui.username() def date(self): return self._date def description(self): return self._text def files(self): return sorted(self._status.modified + self._status.added + self._status.removed) def modified(self): return self._status.modified def added(self): return self._status.added def removed(self): return self._status.removed def deleted(self): return self._status.deleted def branch(self): return encoding.tolocal(self._extra['branch']) def closesbranch(self): return 'close' in self._extra def extra(self): return self._extra def tags(self): return [] def bookmarks(self): b = [] for p in self.parents(): b.extend(p.bookmarks()) return b def phase(self): phase = phases.draft # default phase to draft for p in self.parents(): phase = max(phase, p.phase()) return phase def hidden(self): return False def children(self): return [] def flags(self, path): if '_manifest' in self.__dict__: try: return self._manifest.flags(path) except KeyError: return '' try: return self._flagfunc(path) except OSError: return '' def ancestor(self, c2): """return the "best" ancestor context of self and c2""" return self._parents[0].ancestor(c2) # punt on two parents for now def walk(self, match): '''Generates matching file names.''' return sorted(self._repo.dirstate.walk(match, sorted(self.substate), True, False)) def matches(self, match): return sorted(self._repo.dirstate.matches(match)) def ancestors(self): for p in self._parents: yield p for a in self._repo.changelog.ancestors( [p.rev() for p in self._parents]): yield changectx(self._repo, a) def markcommitted(self, node): """Perform post-commit cleanup necessary after committing this ctx Specifically, this updates backing stores this working context wraps to reflect the fact that the changes reflected by this workingctx have been committed. For example, it marks modified and added files as normal in the dirstate. """ self._repo.dirstate.beginparentchange() for f in self.modified() + self.added(): self._repo.dirstate.normal(f) for f in self.removed(): self._repo.dirstate.drop(f) self._repo.dirstate.setparents(node) self._repo.dirstate.endparentchange() # write changes out explicitly, because nesting wlock at # runtime may prevent 'wlock.release()' in 'repo.commit()' # from immediately doing so for subsequent changing files self._repo.dirstate.write(self._repo.currenttransaction()) class workingctx(committablectx): """A workingctx object makes access to data related to the current working directory convenient. date - any valid date string or (unixtime, offset), or None. user - username string, or None. extra - a dictionary of extra values, or None. changes - a list of file lists as returned by localrepo.status() or None to use the repository status. """ def __init__(self, repo, text="", user=None, date=None, extra=None, changes=None): super(workingctx, self).__init__(repo, text, user, date, extra, changes) def __iter__(self): d = self._repo.dirstate for f in d: if d[f] != 'r': yield f def __contains__(self, key): return self._repo.dirstate[key] not in "?r" def hex(self): return hex(wdirid) @propertycache def _parents(self): p = self._repo.dirstate.parents() if p[1] == nullid: p = p[:-1] return [changectx(self._repo, x) for x in p] def filectx(self, path, filelog=None): """get a file context from the working directory""" return workingfilectx(self._repo, path, workingctx=self, filelog=filelog) def dirty(self, missing=False, merge=True, branch=True): "check whether a working directory is modified" # check subrepos first for s in sorted(self.substate): if self.sub(s).dirty(): return True # check current working dir return ((merge and self.p2()) or (branch and self.branch() != self.p1().branch()) or self.modified() or self.added() or self.removed() or (missing and self.deleted())) def add(self, list, prefix=""): join = lambda f: os.path.join(prefix, f) with self._repo.wlock(): ui, ds = self._repo.ui, self._repo.dirstate rejected = [] lstat = self._repo.wvfs.lstat for f in list: scmutil.checkportable(ui, join(f)) try: st = lstat(f) except OSError: ui.warn(_("%s does not exist!\n") % join(f)) rejected.append(f) continue if st.st_size > 10000000: ui.warn(_("%s: up to %d MB of RAM may be required " "to manage this file\n" "(use 'hg revert %s' to cancel the " "pending addition)\n") % (f, 3 * st.st_size // 1000000, join(f))) if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)): ui.warn(_("%s not added: only files and symlinks " "supported currently\n") % join(f)) rejected.append(f) elif ds[f] in 'amn': ui.warn(_("%s already tracked!\n") % join(f)) elif ds[f] == 'r': ds.normallookup(f) else: ds.add(f) return rejected def forget(self, files, prefix=""): join = lambda f: os.path.join(prefix, f) with self._repo.wlock(): rejected = [] for f in files: if f not in self._repo.dirstate: self._repo.ui.warn(_("%s not tracked!\n") % join(f)) rejected.append(f) elif self._repo.dirstate[f] != 'a': self._repo.dirstate.remove(f) else: self._repo.dirstate.drop(f) return rejected def undelete(self, list): pctxs = self.parents() with self._repo.wlock(): for f in list: if self._repo.dirstate[f] != 'r': self._repo.ui.warn(_("%s not removed!\n") % f) else: fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f] t = fctx.data() self._repo.wwrite(f, t, fctx.flags()) self._repo.dirstate.normal(f) def copy(self, source, dest): try: st = self._repo.wvfs.lstat(dest) except OSError as err: if err.errno != errno.ENOENT: raise self._repo.ui.warn(_("%s does not exist!\n") % dest) return if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)): self._repo.ui.warn(_("copy failed: %s is not a file or a " "symbolic link\n") % dest) else: with self._repo.wlock(): if self._repo.dirstate[dest] in '?': self._repo.dirstate.add(dest) elif self._repo.dirstate[dest] in 'r': self._repo.dirstate.normallookup(dest) self._repo.dirstate.copy(source, dest) def match(self, pats=[], include=None, exclude=None, default='glob', listsubrepos=False, badfn=None): r = self._repo # Only a case insensitive filesystem needs magic to translate user input # to actual case in the filesystem. if not util.checkcase(r.root): return matchmod.icasefsmatcher(r.root, r.getcwd(), pats, include, exclude, default, r.auditor, self, listsubrepos=listsubrepos, badfn=badfn) return matchmod.match(r.root, r.getcwd(), pats, include, exclude, default, auditor=r.auditor, ctx=self, listsubrepos=listsubrepos, badfn=badfn) def _filtersuspectsymlink(self, files): if not files or self._repo.dirstate._checklink: return files # Symlink placeholders may get non-symlink-like contents # via user error or dereferencing by NFS or Samba servers, # so we filter out any placeholders that don't look like a # symlink sane = [] for f in files: if self.flags(f) == 'l': d = self[f].data() if d == '' or len(d) >= 1024 or '\n' in d or util.binary(d): self._repo.ui.debug('ignoring suspect symlink placeholder' ' "%s"\n' % f) continue sane.append(f) return sane def _checklookup(self, files): # check for any possibly clean files if not files: return [], [] modified = [] fixup = [] pctx = self._parents[0] # do a full compare of any files that might have changed for f in sorted(files): if (f not in pctx or self.flags(f) != pctx.flags(f) or pctx[f].cmp(self[f])): modified.append(f) else: fixup.append(f) # update dirstate for files that are actually clean if fixup: try: # updating the dirstate is optional # so we don't wait on the lock # wlock can invalidate the dirstate, so cache normal _after_ # taking the lock with self._repo.wlock(False): normal = self._repo.dirstate.normal for f in fixup: normal(f) # write changes out explicitly, because nesting # wlock at runtime may prevent 'wlock.release()' # after this block from doing so for subsequent # changing files self._repo.dirstate.write(self._repo.currenttransaction()) except error.LockError: pass return modified, fixup def _manifestmatches(self, match, s): """Slow path for workingctx The fast path is when we compare the working directory to its parent which means this function is comparing with a non-parent; therefore we need to build a manifest and return what matches. """ mf = self._repo['.']._manifestmatches(match, s) for f in s.modified + s.added: mf[f] = _newnode mf.setflag(f, self.flags(f)) for f in s.removed: if f in mf: del mf[f] return mf def _dirstatestatus(self, match=None, ignored=False, clean=False, unknown=False): '''Gets the status from the dirstate -- internal use only.''' listignored, listclean, listunknown = ignored, clean, unknown match = match or matchmod.always(self._repo.root, self._repo.getcwd()) subrepos = [] if '.hgsub' in self: subrepos = sorted(self.substate) cmp, s = self._repo.dirstate.status(match, subrepos, listignored, listclean, listunknown) # check for any possibly clean files if cmp: modified2, fixup = self._checklookup(cmp) s.modified.extend(modified2) # update dirstate for files that are actually clean if fixup and listclean: s.clean.extend(fixup) if match.always(): # cache for performance if s.unknown or s.ignored or s.clean: # "_status" is cached with list*=False in the normal route self._status = scmutil.status(s.modified, s.added, s.removed, s.deleted, [], [], []) else: self._status = s return s def _buildstatus(self, other, s, match, listignored, listclean, listunknown): """build a status with respect to another context This includes logic for maintaining the fast path of status when comparing the working directory against its parent, which is to skip building a new manifest if self (working directory) is not comparing against its parent (repo['.']). """ s = self._dirstatestatus(match, listignored, listclean, listunknown) # Filter out symlinks that, in the case of FAT32 and NTFS filesystems, # might have accidentally ended up with the entire contents of the file # they are supposed to be linking to. s.modified[:] = self._filtersuspectsymlink(s.modified) if other != self._repo['.']: s = super(workingctx, self)._buildstatus(other, s, match, listignored, listclean, listunknown) return s def _matchstatus(self, other, match): """override the match method with a filter for directory patterns We use inheritance to customize the match.bad method only in cases of workingctx since it belongs only to the working directory when comparing against the parent changeset. If we aren't comparing against the working directory's parent, then we just use the default match object sent to us. """ superself = super(workingctx, self) match = superself._matchstatus(other, match) if other != self._repo['.']: def bad(f, msg): # 'f' may be a directory pattern from 'match.files()', # so 'f not in ctx1' is not enough if f not in other and not other.hasdir(f): self._repo.ui.warn('%s: %s\n' % (self._repo.dirstate.pathto(f), msg)) match.bad = bad return match class committablefilectx(basefilectx): """A committablefilectx provides common functionality for a file context that wants the ability to commit, e.g. workingfilectx or memfilectx.""" def __init__(self, repo, path, filelog=None, ctx=None): self._repo = repo self._path = path self._changeid = None self._filerev = self._filenode = None if filelog is not None: self._filelog = filelog if ctx: self._changectx = ctx def __nonzero__(self): return True def linkrev(self): # linked to self._changectx no matter if file is modified or not return self.rev() def parents(self): '''return parent filectxs, following copies if necessary''' def filenode(ctx, path): return ctx._manifest.get(path, nullid) path = self._path fl = self._filelog pcl = self._changectx._parents renamed = self.renamed() if renamed: pl = [renamed + (None,)] else: pl = [(path, filenode(pcl[0], path), fl)] for pc in pcl[1:]: pl.append((path, filenode(pc, path), fl)) return [self._parentfilectx(p, fileid=n, filelog=l) for p, n, l in pl if n != nullid] def children(self): return [] class workingfilectx(committablefilectx): """A workingfilectx object makes access to data related to a particular file in the working directory convenient.""" def __init__(self, repo, path, filelog=None, workingctx=None): super(workingfilectx, self).__init__(repo, path, filelog, workingctx) @propertycache def _changectx(self): return workingctx(self._repo) def data(self): return self._repo.wread(self._path) def renamed(self): rp = self._repo.dirstate.copied(self._path) if not rp: return None return rp, self._changectx._parents[0]._manifest.get(rp, nullid) def size(self): return self._repo.wvfs.lstat(self._path).st_size def date(self): t, tz = self._changectx.date() try: return (self._repo.wvfs.lstat(self._path).st_mtime, tz) except OSError as err: if err.errno != errno.ENOENT: raise return (t, tz) def cmp(self, fctx): """compare with other file context returns True if different than fctx. """ # fctx should be a filectx (not a workingfilectx) # invert comparison to reuse the same code path return fctx.cmp(self) def remove(self, ignoremissing=False): """wraps unlink for a repo's working directory""" util.unlinkpath(self._repo.wjoin(self._path), ignoremissing) def write(self, data, flags): """wraps repo.wwrite""" self._repo.wwrite(self._path, data, flags) class workingcommitctx(workingctx): """A workingcommitctx object makes access to data related to the revision being committed convenient. This hides changes in the working directory, if they aren't committed in this context. """ def __init__(self, repo, changes, text="", user=None, date=None, extra=None): super(workingctx, self).__init__(repo, text, user, date, extra, changes) def _dirstatestatus(self, match=None, ignored=False, clean=False, unknown=False): """Return matched files only in ``self._status`` Uncommitted files appear "clean" via this context, even if they aren't actually so in the working directory. """ match = match or matchmod.always(self._repo.root, self._repo.getcwd()) if clean: clean = [f for f in self._manifest if f not in self._changedset] else: clean = [] return scmutil.status([f for f in self._status.modified if match(f)], [f for f in self._status.added if match(f)], [f for f in self._status.removed if match(f)], [], [], [], clean) @propertycache def _changedset(self): """Return the set of files changed in this context """ changed = set(self._status.modified) changed.update(self._status.added) changed.update(self._status.removed) return changed def makecachingfilectxfn(func): """Create a filectxfn that caches based on the path. We can't use util.cachefunc because it uses all arguments as the cache key and this creates a cycle since the arguments include the repo and memctx. """ cache = {} def getfilectx(repo, memctx, path): if path not in cache: cache[path] = func(repo, memctx, path) return cache[path] return getfilectx class memctx(committablectx): """Use memctx to perform in-memory commits via localrepo.commitctx(). Revision information is supplied at initialization time while related files data and is made available through a callback mechanism. 'repo' is the current localrepo, 'parents' is a sequence of two parent revisions identifiers (pass None for every missing parent), 'text' is the commit message and 'files' lists names of files touched by the revision (normalized and relative to repository root). filectxfn(repo, memctx, path) is a callable receiving the repository, the current memctx object and the normalized path of requested file, relative to repository root. It is fired by the commit function for every file in 'files', but calls order is undefined. If the file is available in the revision being committed (updated or added), filectxfn returns a memfilectx object. If the file was removed, filectxfn raises an IOError. Moved files are represented by marking the source file removed and the new file added with copy information (see memfilectx). user receives the committer name and defaults to current repository username, date is the commit date in any format supported by util.parsedate() and defaults to current date, extra is a dictionary of metadata or is left empty. """ # Mercurial <= 3.1 expects the filectxfn to raise IOError for missing files. # Extensions that need to retain compatibility across Mercurial 3.1 can use # this field to determine what to do in filectxfn. _returnnoneformissingfiles = True def __init__(self, repo, parents, text, files, filectxfn, user=None, date=None, extra=None, editor=False): super(memctx, self).__init__(repo, text, user, date, extra) self._rev = None self._node = None parents = [(p or nullid) for p in parents] p1, p2 = parents self._parents = [changectx(self._repo, p) for p in (p1, p2)] files = sorted(set(files)) self._files = files self.substate = {} # if store is not callable, wrap it in a function if not callable(filectxfn): def getfilectx(repo, memctx, path): fctx = filectxfn[path] # this is weird but apparently we only keep track of one parent # (why not only store that instead of a tuple?) copied = fctx.renamed() if copied: copied = copied[0] return memfilectx(repo, path, fctx.data(), islink=fctx.islink(), isexec=fctx.isexec(), copied=copied, memctx=memctx) self._filectxfn = getfilectx else: # memoizing increases performance for e.g. vcs convert scenarios. self._filectxfn = makecachingfilectxfn(filectxfn) if extra: self._extra = extra.copy() else: self._extra = {} if self._extra.get('branch', '') == '': self._extra['branch'] = 'default' if editor: self._text = editor(self._repo, self, []) self._repo.savecommitmessage(self._text) def filectx(self, path, filelog=None): """get a file context from the working directory Returns None if file doesn't exist and should be removed.""" return self._filectxfn(self._repo, self, path) def commit(self): """commit context to the repo""" return self._repo.commitctx(self) @propertycache def _manifest(self): """generate a manifest based on the return values of filectxfn""" # keep this simple for now; just worry about p1 pctx = self._parents[0] man = pctx.manifest().copy() for f in self._status.modified: p1node = nullid p2node = nullid p = pctx[f].parents() # if file isn't in pctx, check p2? if len(p) > 0: p1node = p[0].node() if len(p) > 1: p2node = p[1].node() man[f] = revlog.hash(self[f].data(), p1node, p2node) for f in self._status.added: man[f] = revlog.hash(self[f].data(), nullid, nullid) for f in self._status.removed: if f in man: del man[f] return man @propertycache def _status(self): """Calculate exact status from ``files`` specified at construction """ man1 = self.p1().manifest() p2 = self._parents[1] # "1 < len(self._parents)" can't be used for checking # existence of the 2nd parent, because "memctx._parents" is # explicitly initialized by the list, of which length is 2. if p2.node() != nullid: man2 = p2.manifest() managing = lambda f: f in man1 or f in man2 else: managing = lambda f: f in man1 modified, added, removed = [], [], [] for f in self._files: if not managing(f): added.append(f) elif self[f]: modified.append(f) else: removed.append(f) return scmutil.status(modified, added, removed, [], [], [], []) class memfilectx(committablefilectx): """memfilectx represents an in-memory file to commit. See memctx and committablefilectx for more details. """ def __init__(self, repo, path, data, islink=False, isexec=False, copied=None, memctx=None): """ path is the normalized file path relative to repository root. data is the file content as a string. islink is True if the file is a symbolic link. isexec is True if the file is executable. copied is the source file path if current file was copied in the revision being committed, or None.""" super(memfilectx, self).__init__(repo, path, None, memctx) self._data = data self._flags = (islink and 'l' or '') + (isexec and 'x' or '') self._copied = None if copied: self._copied = (copied, nullid) def data(self): return self._data def size(self): return len(self.data()) def flags(self): return self._flags def renamed(self): return self._copied def remove(self, ignoremissing=False): """wraps unlink for a repo's working directory""" # need to figure out what to do here del self._changectx[self._path] def write(self, data, flags): """wraps repo.wwrite""" self._data = data mercurial-3.7.3/mercurial/mpatch.c0000644000175000017500000002102512676531524016531 0ustar mpmmpm00000000000000/* mpatch.c - efficient binary patching for Mercurial This implements a patch algorithm that's O(m + nlog n) where m is the size of the output and n is the number of patches. Given a list of binary patches, it unpacks each into a hunk list, then combines the hunk lists with a treewise recursion to form a single hunk list. This hunk list is then applied to the original text. The text (or binary) fragments are copied directly from their source Python objects into a preallocated output string to avoid the allocation of intermediate Python objects. Working memory is about 2x the total number of hunks. Copyright 2005, 2006 Matt Mackall This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. */ #define PY_SSIZE_T_CLEAN #include #include #include #include "util.h" static char mpatch_doc[] = "Efficient binary patching."; static PyObject *mpatch_Error; struct frag { int start, end, len; const char *data; }; struct flist { struct frag *base, *head, *tail; }; static struct flist *lalloc(Py_ssize_t size) { struct flist *a = NULL; if (size < 1) size = 1; a = (struct flist *)malloc(sizeof(struct flist)); if (a) { a->base = (struct frag *)malloc(sizeof(struct frag) * size); if (a->base) { a->head = a->tail = a->base; return a; } free(a); a = NULL; } if (!PyErr_Occurred()) PyErr_NoMemory(); return NULL; } static void lfree(struct flist *a) { if (a) { free(a->base); free(a); } } static Py_ssize_t lsize(struct flist *a) { return a->tail - a->head; } /* move hunks in source that are less cut to dest, compensating for changes in offset. the last hunk may be split if necessary. */ static int gather(struct flist *dest, struct flist *src, int cut, int offset) { struct frag *d = dest->tail, *s = src->head; int postend, c, l; while (s != src->tail) { if (s->start + offset >= cut) break; /* we've gone far enough */ postend = offset + s->start + s->len; if (postend <= cut) { /* save this hunk */ offset += s->start + s->len - s->end; *d++ = *s++; } else { /* break up this hunk */ c = cut - offset; if (s->end < c) c = s->end; l = cut - offset - s->start; if (s->len < l) l = s->len; offset += s->start + l - c; d->start = s->start; d->end = c; d->len = l; d->data = s->data; d++; s->start = c; s->len = s->len - l; s->data = s->data + l; break; } } dest->tail = d; src->head = s; return offset; } /* like gather, but with no output list */ static int discard(struct flist *src, int cut, int offset) { struct frag *s = src->head; int postend, c, l; while (s != src->tail) { if (s->start + offset >= cut) break; postend = offset + s->start + s->len; if (postend <= cut) { offset += s->start + s->len - s->end; s++; } else { c = cut - offset; if (s->end < c) c = s->end; l = cut - offset - s->start; if (s->len < l) l = s->len; offset += s->start + l - c; s->start = c; s->len = s->len - l; s->data = s->data + l; break; } } src->head = s; return offset; } /* combine hunk lists a and b, while adjusting b for offset changes in a/ this deletes a and b and returns the resultant list. */ static struct flist *combine(struct flist *a, struct flist *b) { struct flist *c = NULL; struct frag *bh, *ct; int offset = 0, post; if (a && b) c = lalloc((lsize(a) + lsize(b)) * 2); if (c) { for (bh = b->head; bh != b->tail; bh++) { /* save old hunks */ offset = gather(c, a, bh->start, offset); /* discard replaced hunks */ post = discard(a, bh->end, offset); /* insert new hunk */ ct = c->tail; ct->start = bh->start - offset; ct->end = bh->end - post; ct->len = bh->len; ct->data = bh->data; c->tail++; offset = post; } /* hold on to tail from a */ memcpy(c->tail, a->head, sizeof(struct frag) * lsize(a)); c->tail += lsize(a); } lfree(a); lfree(b); return c; } /* decode a binary patch into a hunk list */ static struct flist *decode(const char *bin, Py_ssize_t len) { struct flist *l; struct frag *lt; int pos = 0; /* assume worst case size, we won't have many of these lists */ l = lalloc(len / 12 + 1); if (!l) return NULL; lt = l->tail; while (pos >= 0 && pos < len) { lt->start = getbe32(bin + pos); lt->end = getbe32(bin + pos + 4); lt->len = getbe32(bin + pos + 8); lt->data = bin + pos + 12; pos += 12 + lt->len; if (lt->start > lt->end || lt->len < 0) break; /* sanity check */ lt++; } if (pos != len) { if (!PyErr_Occurred()) PyErr_SetString(mpatch_Error, "patch cannot be decoded"); lfree(l); return NULL; } l->tail = lt; return l; } /* calculate the size of resultant text */ static Py_ssize_t calcsize(Py_ssize_t len, struct flist *l) { Py_ssize_t outlen = 0, last = 0; struct frag *f = l->head; while (f != l->tail) { if (f->start < last || f->end > len) { if (!PyErr_Occurred()) PyErr_SetString(mpatch_Error, "invalid patch"); return -1; } outlen += f->start - last; last = f->end; outlen += f->len; f++; } outlen += len - last; return outlen; } static int apply(char *buf, const char *orig, Py_ssize_t len, struct flist *l) { struct frag *f = l->head; int last = 0; char *p = buf; while (f != l->tail) { if (f->start < last || f->end > len) { if (!PyErr_Occurred()) PyErr_SetString(mpatch_Error, "invalid patch"); return 0; } memcpy(p, orig + last, f->start - last); p += f->start - last; memcpy(p, f->data, f->len); last = f->end; p += f->len; f++; } memcpy(p, orig + last, len - last); return 1; } /* recursively generate a patch of all bins between start and end */ static struct flist *fold(PyObject *bins, Py_ssize_t start, Py_ssize_t end) { Py_ssize_t len, blen; const char *buffer; if (start + 1 == end) { /* trivial case, output a decoded list */ PyObject *tmp = PyList_GetItem(bins, start); if (!tmp) return NULL; if (PyObject_AsCharBuffer(tmp, &buffer, &blen)) return NULL; return decode(buffer, blen); } /* divide and conquer, memory management is elsewhere */ len = (end - start) / 2; return combine(fold(bins, start, start + len), fold(bins, start + len, end)); } static PyObject * patches(PyObject *self, PyObject *args) { PyObject *text, *bins, *result; struct flist *patch; const char *in; char *out; Py_ssize_t len, outlen, inlen; if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins)) return NULL; len = PyList_Size(bins); if (!len) { /* nothing to do */ Py_INCREF(text); return text; } if (PyObject_AsCharBuffer(text, &in, &inlen)) return NULL; patch = fold(bins, 0, len); if (!patch) return NULL; outlen = calcsize(inlen, patch); if (outlen < 0) { result = NULL; goto cleanup; } result = PyBytes_FromStringAndSize(NULL, outlen); if (!result) { result = NULL; goto cleanup; } out = PyBytes_AsString(result); if (!apply(out, in, inlen, patch)) { Py_DECREF(result); result = NULL; } cleanup: lfree(patch); return result; } /* calculate size of a patched file directly */ static PyObject * patchedsize(PyObject *self, PyObject *args) { long orig, start, end, len, outlen = 0, last = 0, pos = 0; Py_ssize_t patchlen; char *bin; if (!PyArg_ParseTuple(args, "ls#", &orig, &bin, &patchlen)) return NULL; while (pos >= 0 && pos < patchlen) { start = getbe32(bin + pos); end = getbe32(bin + pos + 4); len = getbe32(bin + pos + 8); if (start > end) break; /* sanity check */ pos += 12 + len; outlen += start - last; last = end; outlen += len; } if (pos != patchlen) { if (!PyErr_Occurred()) PyErr_SetString(mpatch_Error, "patch cannot be decoded"); return NULL; } outlen += orig - last; return Py_BuildValue("l", outlen); } static PyMethodDef methods[] = { {"patches", patches, METH_VARARGS, "apply a series of patches\n"}, {"patchedsize", patchedsize, METH_VARARGS, "calculed patched size\n"}, {NULL, NULL} }; #ifdef IS_PY3K static struct PyModuleDef mpatch_module = { PyModuleDef_HEAD_INIT, "mpatch", mpatch_doc, -1, methods }; PyMODINIT_FUNC PyInit_mpatch(void) { PyObject *m; m = PyModule_Create(&mpatch_module); if (m == NULL) return NULL; mpatch_Error = PyErr_NewException("mpatch.mpatchError", NULL, NULL); Py_INCREF(mpatch_Error); PyModule_AddObject(m, "mpatchError", mpatch_Error); return m; } #else PyMODINIT_FUNC initmpatch(void) { Py_InitModule3("mpatch", methods, mpatch_doc); mpatch_Error = PyErr_NewException("mpatch.mpatchError", NULL, NULL); } #endif mercurial-3.7.3/mercurial/match.py0000644000175000017500000006012112676531524016557 0ustar mpmmpm00000000000000# match.py - filename matching # # Copyright 2008, 2009 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import copy import os import re from .i18n import _ from . import ( error, pathutil, util, ) propertycache = util.propertycache def _rematcher(regex): '''compile the regexp with the best available regexp engine and return a matcher function''' m = util.re.compile(regex) try: # slightly faster, provided by facebook's re2 bindings return m.test_match except AttributeError: return m.match def _expandsets(kindpats, ctx, listsubrepos): '''Returns the kindpats list with the 'set' patterns expanded.''' fset = set() other = [] for kind, pat, source in kindpats: if kind == 'set': if not ctx: raise error.Abort("fileset expression with no context") s = ctx.getfileset(pat) fset.update(s) if listsubrepos: for subpath in ctx.substate: s = ctx.sub(subpath).getfileset(pat) fset.update(subpath + '/' + f for f in s) continue other.append((kind, pat, source)) return fset, other def _expandsubinclude(kindpats, root): '''Returns the list of subinclude matchers and the kindpats without the subincludes in it.''' relmatchers = [] other = [] for kind, pat, source in kindpats: if kind == 'subinclude': sourceroot = pathutil.dirname(util.normpath(source)) pat = util.pconvert(pat) path = pathutil.join(sourceroot, pat) newroot = pathutil.dirname(path) relmatcher = match(newroot, '', [], ['include:%s' % path]) prefix = pathutil.canonpath(root, root, newroot) if prefix: prefix += '/' relmatchers.append((prefix, relmatcher)) else: other.append((kind, pat, source)) return relmatchers, other def _kindpatsalwaysmatch(kindpats): """"Checks whether the kindspats match everything, as e.g. 'relpath:.' does. """ for kind, pat, source in kindpats: if pat != '' or kind not in ['relpath', 'glob']: return False return True class match(object): def __init__(self, root, cwd, patterns, include=[], exclude=[], default='glob', exact=False, auditor=None, ctx=None, listsubrepos=False, warn=None, badfn=None): """build an object to match a set of file patterns arguments: root - the canonical root of the tree you're matching against cwd - the current working directory, if relevant patterns - patterns to find include - patterns to include (unless they are excluded) exclude - patterns to exclude (even if they are included) default - if a pattern in patterns has no explicit type, assume this one exact - patterns are actually filenames (include/exclude still apply) warn - optional function used for printing warnings badfn - optional bad() callback for this matcher instead of the default a pattern is one of: 'glob:' - a glob relative to cwd 're:' - a regular expression 'path:' - a path relative to repository root 'relglob:' - an unrooted glob (*.c matches C files in all dirs) 'relpath:' - a path relative to cwd 'relre:' - a regexp that needn't match the start of a name 'set:' - a fileset expression 'include:' - a file of patterns to read and include 'subinclude:' - a file of patterns to match against files under the same directory '' - a pattern of the specified default type """ self._root = root self._cwd = cwd self._files = [] # exact files and roots of patterns self._anypats = bool(include or exclude) self._always = False self._pathrestricted = bool(include or exclude or patterns) self._warn = warn self._includeroots = set() self._includedirs = set(['.']) self._excluderoots = set() if badfn is not None: self.bad = badfn matchfns = [] if include: kindpats = self._normalize(include, 'glob', root, cwd, auditor) self.includepat, im = _buildmatch(ctx, kindpats, '(?:/|$)', listsubrepos, root) self._includeroots.update(_roots(kindpats)) self._includedirs.update(util.dirs(self._includeroots)) matchfns.append(im) if exclude: kindpats = self._normalize(exclude, 'glob', root, cwd, auditor) self.excludepat, em = _buildmatch(ctx, kindpats, '(?:/|$)', listsubrepos, root) if not _anypats(kindpats): self._excluderoots.update(_roots(kindpats)) matchfns.append(lambda f: not em(f)) if exact: if isinstance(patterns, list): self._files = patterns else: self._files = list(patterns) matchfns.append(self.exact) elif patterns: kindpats = self._normalize(patterns, default, root, cwd, auditor) if not _kindpatsalwaysmatch(kindpats): self._files = _roots(kindpats) self._anypats = self._anypats or _anypats(kindpats) self.patternspat, pm = _buildmatch(ctx, kindpats, '$', listsubrepos, root) matchfns.append(pm) if not matchfns: m = util.always self._always = True elif len(matchfns) == 1: m = matchfns[0] else: def m(f): for matchfn in matchfns: if not matchfn(f): return False return True self.matchfn = m self._fileroots = set(self._files) def __call__(self, fn): return self.matchfn(fn) def __iter__(self): for f in self._files: yield f # Callbacks related to how the matcher is used by dirstate.walk. # Subscribers to these events must monkeypatch the matcher object. def bad(self, f, msg): '''Callback from dirstate.walk for each explicit file that can't be found/accessed, with an error message.''' pass # If an explicitdir is set, it will be called when an explicitly listed # directory is visited. explicitdir = None # If an traversedir is set, it will be called when a directory discovered # by recursive traversal is visited. traversedir = None def abs(self, f): '''Convert a repo path back to path that is relative to the root of the matcher.''' return f def rel(self, f): '''Convert repo path back to path that is relative to cwd of matcher.''' return util.pathto(self._root, self._cwd, f) def uipath(self, f): '''Convert repo path to a display path. If patterns or -I/-X were used to create this matcher, the display path will be relative to cwd. Otherwise it is relative to the root of the repo.''' return (self._pathrestricted and self.rel(f)) or self.abs(f) def files(self): '''Explicitly listed files or patterns or roots: if no patterns or .always(): empty list, if exact: list exact files, if not .anypats(): list all files and dirs, else: optimal roots''' return self._files @propertycache def _dirs(self): return set(util.dirs(self._fileroots)) | set(['.']) def visitdir(self, dir): '''Decides whether a directory should be visited based on whether it has potential matches in it or one of its subdirectories. This is based on the match's primary, included, and excluded patterns. Returns the string 'all' if the given directory and all subdirectories should be visited. Otherwise returns True or False indicating whether the given directory should be visited. This function's behavior is undefined if it has returned False for one of the dir's parent directories. ''' if self.prefix() and dir in self._fileroots: return 'all' if dir in self._excluderoots: return False if (self._includeroots and '.' not in self._includeroots and dir not in self._includeroots and dir not in self._includedirs and not any(parent in self._includeroots for parent in util.finddirs(dir))): return False return (not self._fileroots or '.' in self._fileroots or dir in self._fileroots or dir in self._dirs or any(parentdir in self._fileroots for parentdir in util.finddirs(dir))) def exact(self, f): '''Returns True if f is in .files().''' return f in self._fileroots def anypats(self): '''Matcher uses patterns or include/exclude.''' return self._anypats def always(self): '''Matcher will match everything and .files() will be empty - optimization might be possible and necessary.''' return self._always def ispartial(self): '''True if the matcher won't always match. Although it's just the inverse of _always in this implementation, an extension such as narrowhg might make it return something slightly different.''' return not self._always def isexact(self): return self.matchfn == self.exact def prefix(self): return not self.always() and not self.isexact() and not self.anypats() def _normalize(self, patterns, default, root, cwd, auditor): '''Convert 'kind:pat' from the patterns list to tuples with kind and normalized and rooted patterns and with listfiles expanded.''' kindpats = [] for kind, pat in [_patsplit(p, default) for p in patterns]: if kind in ('glob', 'relpath'): pat = pathutil.canonpath(root, cwd, pat, auditor) elif kind in ('relglob', 'path'): pat = util.normpath(pat) elif kind in ('listfile', 'listfile0'): try: files = util.readfile(pat) if kind == 'listfile0': files = files.split('\0') else: files = files.splitlines() files = [f for f in files if f] except EnvironmentError: raise error.Abort(_("unable to read file list (%s)") % pat) for k, p, source in self._normalize(files, default, root, cwd, auditor): kindpats.append((k, p, pat)) continue elif kind == 'include': try: fullpath = os.path.join(root, util.localpath(pat)) includepats = readpatternfile(fullpath, self._warn) for k, p, source in self._normalize(includepats, default, root, cwd, auditor): kindpats.append((k, p, source or pat)) except error.Abort as inst: raise error.Abort('%s: %s' % (pat, inst[0])) except IOError as inst: if self._warn: self._warn(_("skipping unreadable pattern file " "'%s': %s\n") % (pat, inst.strerror)) continue # else: re or relre - which cannot be normalized kindpats.append((kind, pat, '')) return kindpats def exact(root, cwd, files, badfn=None): return match(root, cwd, files, exact=True, badfn=badfn) def always(root, cwd): return match(root, cwd, []) def badmatch(match, badfn): """Make a copy of the given matcher, replacing its bad method with the given one. """ m = copy.copy(match) m.bad = badfn return m class narrowmatcher(match): """Adapt a matcher to work on a subdirectory only. The paths are remapped to remove/insert the path as needed: >>> m1 = match('root', '', ['a.txt', 'sub/b.txt']) >>> m2 = narrowmatcher('sub', m1) >>> bool(m2('a.txt')) False >>> bool(m2('b.txt')) True >>> bool(m2.matchfn('a.txt')) False >>> bool(m2.matchfn('b.txt')) True >>> m2.files() ['b.txt'] >>> m2.exact('b.txt') True >>> util.pconvert(m2.rel('b.txt')) 'sub/b.txt' >>> def bad(f, msg): ... print "%s: %s" % (f, msg) >>> m1.bad = bad >>> m2.bad('x.txt', 'No such file') sub/x.txt: No such file >>> m2.abs('c.txt') 'sub/c.txt' """ def __init__(self, path, matcher): self._root = matcher._root self._cwd = matcher._cwd self._path = path self._matcher = matcher self._always = matcher._always self._pathrestricted = matcher._pathrestricted self._files = [f[len(path) + 1:] for f in matcher._files if f.startswith(path + "/")] # If the parent repo had a path to this subrepo and no patterns are # specified, this submatcher always matches. if not self._always and not matcher._anypats: self._always = any(f == path for f in matcher._files) self._anypats = matcher._anypats self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn) self._fileroots = set(self._files) def abs(self, f): return self._matcher.abs(self._path + "/" + f) def bad(self, f, msg): self._matcher.bad(self._path + "/" + f, msg) def rel(self, f): return self._matcher.rel(self._path + "/" + f) class icasefsmatcher(match): """A matcher for wdir on case insensitive filesystems, which normalizes the given patterns to the case in the filesystem. """ def __init__(self, root, cwd, patterns, include, exclude, default, auditor, ctx, listsubrepos=False, badfn=None): init = super(icasefsmatcher, self).__init__ self._dirstate = ctx.repo().dirstate self._dsnormalize = self._dirstate.normalize init(root, cwd, patterns, include, exclude, default, auditor=auditor, ctx=ctx, listsubrepos=listsubrepos, badfn=badfn) # m.exact(file) must be based off of the actual user input, otherwise # inexact case matches are treated as exact, and not noted without -v. if self._files: self._fileroots = set(_roots(self._kp)) def _normalize(self, patterns, default, root, cwd, auditor): self._kp = super(icasefsmatcher, self)._normalize(patterns, default, root, cwd, auditor) kindpats = [] for kind, pats, source in self._kp: if kind not in ('re', 'relre'): # regex can't be normalized p = pats pats = self._dsnormalize(pats) # Preserve the original to handle a case only rename. if p != pats and p in self._dirstate: kindpats.append((kind, p, source)) kindpats.append((kind, pats, source)) return kindpats def patkind(pattern, default=None): '''If pattern is 'kind:pat' with a known kind, return kind.''' return _patsplit(pattern, default)[0] def _patsplit(pattern, default): """Split a string into the optional pattern kind prefix and the actual pattern.""" if ':' in pattern: kind, pat = pattern.split(':', 1) if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre', 'listfile', 'listfile0', 'set', 'include', 'subinclude'): return kind, pat return default, pattern def _globre(pat): r'''Convert an extended glob string to a regexp string. >>> print _globre(r'?') . >>> print _globre(r'*') [^/]* >>> print _globre(r'**') .* >>> print _globre(r'**/a') (?:.*/)?a >>> print _globre(r'a/**/b') a\/(?:.*/)?b >>> print _globre(r'[a*?!^][^b][!c]') [a*?!^][\^b][^c] >>> print _globre(r'{a,b}') (?:a|b) >>> print _globre(r'.\*\?') \.\*\? ''' i, n = 0, len(pat) res = '' group = 0 escape = util.re.escape def peek(): return i < n and pat[i] while i < n: c = pat[i] i += 1 if c not in '*?[{},\\': res += escape(c) elif c == '*': if peek() == '*': i += 1 if peek() == '/': i += 1 res += '(?:.*/)?' else: res += '.*' else: res += '[^/]*' elif c == '?': res += '.' elif c == '[': j = i if j < n and pat[j] in '!]': j += 1 while j < n and pat[j] != ']': j += 1 if j >= n: res += '\\[' else: stuff = pat[i:j].replace('\\','\\\\') i = j + 1 if stuff[0] == '!': stuff = '^' + stuff[1:] elif stuff[0] == '^': stuff = '\\' + stuff res = '%s[%s]' % (res, stuff) elif c == '{': group += 1 res += '(?:' elif c == '}' and group: res += ')' group -= 1 elif c == ',' and group: res += '|' elif c == '\\': p = peek() if p: i += 1 res += escape(p) else: res += escape(c) else: res += escape(c) return res def _regex(kind, pat, globsuffix): '''Convert a (normalized) pattern of any kind into a regular expression. globsuffix is appended to the regexp of globs.''' if not pat: return '' if kind == 're': return pat if kind == 'path': if pat == '.': return '' return '^' + util.re.escape(pat) + '(?:/|$)' if kind == 'relglob': return '(?:|.*/)' + _globre(pat) + globsuffix if kind == 'relpath': return util.re.escape(pat) + '(?:/|$)' if kind == 'relre': if pat.startswith('^'): return pat return '.*' + pat return _globre(pat) + globsuffix def _buildmatch(ctx, kindpats, globsuffix, listsubrepos, root): '''Return regexp string and a matcher function for kindpats. globsuffix is appended to the regexp of globs.''' matchfuncs = [] subincludes, kindpats = _expandsubinclude(kindpats, root) if subincludes: def matchsubinclude(f): for prefix, mf in subincludes: if f.startswith(prefix) and mf(f[len(prefix):]): return True return False matchfuncs.append(matchsubinclude) fset, kindpats = _expandsets(kindpats, ctx, listsubrepos) if fset: matchfuncs.append(fset.__contains__) regex = '' if kindpats: regex, mf = _buildregexmatch(kindpats, globsuffix) matchfuncs.append(mf) if len(matchfuncs) == 1: return regex, matchfuncs[0] else: return regex, lambda f: any(mf(f) for mf in matchfuncs) def _buildregexmatch(kindpats, globsuffix): """Build a match function from a list of kinds and kindpats, return regexp string and a matcher function.""" try: regex = '(?:%s)' % '|'.join([_regex(k, p, globsuffix) for (k, p, s) in kindpats]) if len(regex) > 20000: raise OverflowError return regex, _rematcher(regex) except OverflowError: # We're using a Python with a tiny regex engine and we # made it explode, so we'll divide the pattern list in two # until it works l = len(kindpats) if l < 2: raise regexa, a = _buildregexmatch(kindpats[:l//2], globsuffix) regexb, b = _buildregexmatch(kindpats[l//2:], globsuffix) return regex, lambda s: a(s) or b(s) except re.error: for k, p, s in kindpats: try: _rematcher('(?:%s)' % _regex(k, p, globsuffix)) except re.error: if s: raise error.Abort(_("%s: invalid pattern (%s): %s") % (s, k, p)) else: raise error.Abort(_("invalid pattern (%s): %s") % (k, p)) raise error.Abort(_("invalid pattern")) def _roots(kindpats): '''return roots and exact explicitly listed files from patterns >>> _roots([('glob', 'g/*', ''), ('glob', 'g', ''), ('glob', 'g*', '')]) ['g', 'g', '.'] >>> _roots([('relpath', 'r', ''), ('path', 'p/p', ''), ('path', '', '')]) ['r', 'p/p', '.'] >>> _roots([('relglob', 'rg*', ''), ('re', 're/', ''), ('relre', 'rr', '')]) ['.', '.', '.'] ''' r = [] for kind, pat, source in kindpats: if kind == 'glob': # find the non-glob prefix root = [] for p in pat.split('/'): if '[' in p or '{' in p or '*' in p or '?' in p: break root.append(p) r.append('/'.join(root) or '.') elif kind in ('relpath', 'path'): r.append(pat or '.') else: # relglob, re, relre r.append('.') return r def _anypats(kindpats): for kind, pat, source in kindpats: if kind in ('glob', 're', 'relglob', 'relre', 'set'): return True _commentre = None def readpatternfile(filepath, warn, sourceinfo=False): '''parse a pattern file, returning a list of patterns. These patterns should be given to compile() to be validated and converted into a match function. trailing white space is dropped. the escape character is backslash. comments start with #. empty lines are skipped. lines can be of the following formats: syntax: regexp # defaults following lines to non-rooted regexps syntax: glob # defaults following lines to non-rooted globs re:pattern # non-rooted regular expression glob:pattern # non-rooted glob pattern # pattern of the current default type if sourceinfo is set, returns a list of tuples: (pattern, lineno, originalline). This is useful to debug ignore patterns. ''' syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:', 'include': 'include', 'subinclude': 'subinclude'} syntax = 'relre:' patterns = [] fp = open(filepath) for lineno, line in enumerate(fp, start=1): if "#" in line: global _commentre if not _commentre: _commentre = util.re.compile(r'((?:^|[^\\])(?:\\\\)*)#.*') # remove comments prefixed by an even number of escapes m = _commentre.search(line) if m: line = line[:m.end(1)] # fixup properly escaped comments that survived the above line = line.replace("\\#", "#") line = line.rstrip() if not line: continue if line.startswith('syntax:'): s = line[7:].strip() try: syntax = syntaxes[s] except KeyError: if warn: warn(_("%s: ignoring invalid syntax '%s'\n") % (filepath, s)) continue linesyntax = syntax for s, rels in syntaxes.iteritems(): if line.startswith(rels): linesyntax = rels line = line[len(rels):] break elif line.startswith(s+':'): linesyntax = rels line = line[len(s) + 1:] break if sourceinfo: patterns.append((linesyntax + line, lineno, line)) else: patterns.append(linesyntax + line) fp.close() return patterns mercurial-3.7.3/mercurial/wireproto.py0000644000175000017500000007157712676531525017537 0ustar mpmmpm00000000000000# wireproto.py - generic wire protocol support functions # # Copyright 2005-2010 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import os import sys import tempfile import urllib from .i18n import _ from .node import ( bin, hex, ) from . import ( bundle2, changegroup as changegroupmod, encoding, error, exchange, peer, pushkey as pushkeymod, streamclone, util, ) bundle2required = _( 'incompatible Mercurial client; bundle2 required\n' '(see https://www.mercurial-scm.org/wiki/IncompatibleClient)\n') class abstractserverproto(object): """abstract class that summarizes the protocol API Used as reference and documentation. """ def getargs(self, args): """return the value for arguments in returns a list of values (same order as )""" raise NotImplementedError() def getfile(self, fp): """write the whole content of a file into a file like object The file is in the form:: (\n)+0\n chunk size is the ascii version of the int. """ raise NotImplementedError() def redirect(self): """may setup interception for stdout and stderr See also the `restore` method.""" raise NotImplementedError() # If the `redirect` function does install interception, the `restore` # function MUST be defined. If interception is not used, this function # MUST NOT be defined. # # left commented here on purpose # #def restore(self): # """reinstall previous stdout and stderr and return intercepted stdout # """ # raise NotImplementedError() def groupchunks(self, cg): """return 4096 chunks from a changegroup object Some protocols may have compressed the contents.""" raise NotImplementedError() class remotebatch(peer.batcher): '''batches the queued calls; uses as few roundtrips as possible''' def __init__(self, remote): '''remote must support _submitbatch(encbatch) and _submitone(op, encargs)''' peer.batcher.__init__(self) self.remote = remote def submit(self): req, rsp = [], [] for name, args, opts, resref in self.calls: mtd = getattr(self.remote, name) batchablefn = getattr(mtd, 'batchable', None) if batchablefn is not None: batchable = batchablefn(mtd.im_self, *args, **opts) encargsorres, encresref = batchable.next() if encresref: req.append((name, encargsorres,)) rsp.append((batchable, encresref, resref,)) else: resref.set(encargsorres) else: if req: self._submitreq(req, rsp) req, rsp = [], [] resref.set(mtd(*args, **opts)) if req: self._submitreq(req, rsp) def _submitreq(self, req, rsp): encresults = self.remote._submitbatch(req) for encres, r in zip(encresults, rsp): batchable, encresref, resref = r encresref.set(encres) resref.set(batchable.next()) # Forward a couple of names from peer to make wireproto interactions # slightly more sensible. batchable = peer.batchable future = peer.future # list of nodes encoding / decoding def decodelist(l, sep=' '): if l: return map(bin, l.split(sep)) return [] def encodelist(l, sep=' '): try: return sep.join(map(hex, l)) except TypeError: raise # batched call argument encoding def escapearg(plain): return (plain .replace(':', ':c') .replace(',', ':o') .replace(';', ':s') .replace('=', ':e')) def unescapearg(escaped): return (escaped .replace(':e', '=') .replace(':s', ';') .replace(':o', ',') .replace(':c', ':')) # mapping of options accepted by getbundle and their types # # Meant to be extended by extensions. It is extensions responsibility to ensure # such options are properly processed in exchange.getbundle. # # supported types are: # # :nodes: list of binary nodes # :csv: list of comma-separated values # :scsv: list of comma-separated values return as set # :plain: string with no transformation needed. gboptsmap = {'heads': 'nodes', 'common': 'nodes', 'obsmarkers': 'boolean', 'bundlecaps': 'scsv', 'listkeys': 'csv', 'cg': 'boolean', 'cbattempted': 'boolean'} # client side class wirepeer(peer.peerrepository): """Client-side interface for communicating with a peer repository. Methods commonly call wire protocol commands of the same name. See also httppeer.py and sshpeer.py for protocol-specific implementations of this interface. """ def batch(self): if self.capable('batch'): return remotebatch(self) else: return peer.localbatch(self) def _submitbatch(self, req): cmds = [] for op, argsdict in req: args = ','.join('%s=%s' % (escapearg(k), escapearg(v)) for k, v in argsdict.iteritems()) cmds.append('%s %s' % (op, args)) rsp = self._call("batch", cmds=';'.join(cmds)) return [unescapearg(r) for r in rsp.split(';')] def _submitone(self, op, args): return self._call(op, **args) @batchable def lookup(self, key): self.requirecap('lookup', _('look up remote revision')) f = future() yield {'key': encoding.fromlocal(key)}, f d = f.value success, data = d[:-1].split(" ", 1) if int(success): yield bin(data) self._abort(error.RepoError(data)) @batchable def heads(self): f = future() yield {}, f d = f.value try: yield decodelist(d[:-1]) except ValueError: self._abort(error.ResponseError(_("unexpected response:"), d)) @batchable def known(self, nodes): f = future() yield {'nodes': encodelist(nodes)}, f d = f.value try: yield [bool(int(b)) for b in d] except ValueError: self._abort(error.ResponseError(_("unexpected response:"), d)) @batchable def branchmap(self): f = future() yield {}, f d = f.value try: branchmap = {} for branchpart in d.splitlines(): branchname, branchheads = branchpart.split(' ', 1) branchname = encoding.tolocal(urllib.unquote(branchname)) branchheads = decodelist(branchheads) branchmap[branchname] = branchheads yield branchmap except TypeError: self._abort(error.ResponseError(_("unexpected response:"), d)) def branches(self, nodes): n = encodelist(nodes) d = self._call("branches", nodes=n) try: br = [tuple(decodelist(b)) for b in d.splitlines()] return br except ValueError: self._abort(error.ResponseError(_("unexpected response:"), d)) def between(self, pairs): batch = 8 # avoid giant requests r = [] for i in xrange(0, len(pairs), batch): n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]]) d = self._call("between", pairs=n) try: r.extend(l and decodelist(l) or [] for l in d.splitlines()) except ValueError: self._abort(error.ResponseError(_("unexpected response:"), d)) return r @batchable def pushkey(self, namespace, key, old, new): if not self.capable('pushkey'): yield False, None f = future() self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key)) yield {'namespace': encoding.fromlocal(namespace), 'key': encoding.fromlocal(key), 'old': encoding.fromlocal(old), 'new': encoding.fromlocal(new)}, f d = f.value d, output = d.split('\n', 1) try: d = bool(int(d)) except ValueError: raise error.ResponseError( _('push failed (unexpected response):'), d) for l in output.splitlines(True): self.ui.status(_('remote: '), l) yield d @batchable def listkeys(self, namespace): if not self.capable('pushkey'): yield {}, None f = future() self.ui.debug('preparing listkeys for "%s"\n' % namespace) yield {'namespace': encoding.fromlocal(namespace)}, f d = f.value self.ui.debug('received listkey for "%s": %i bytes\n' % (namespace, len(d))) yield pushkeymod.decodekeys(d) def stream_out(self): return self._callstream('stream_out') def changegroup(self, nodes, kind): n = encodelist(nodes) f = self._callcompressable("changegroup", roots=n) return changegroupmod.cg1unpacker(f, 'UN') def changegroupsubset(self, bases, heads, kind): self.requirecap('changegroupsubset', _('look up remote changes')) bases = encodelist(bases) heads = encodelist(heads) f = self._callcompressable("changegroupsubset", bases=bases, heads=heads) return changegroupmod.cg1unpacker(f, 'UN') def getbundle(self, source, **kwargs): self.requirecap('getbundle', _('look up remote changes')) opts = {} bundlecaps = kwargs.get('bundlecaps') if bundlecaps is not None: kwargs['bundlecaps'] = sorted(bundlecaps) else: bundlecaps = () # kwargs could have it to None for key, value in kwargs.iteritems(): if value is None: continue keytype = gboptsmap.get(key) if keytype is None: assert False, 'unexpected' elif keytype == 'nodes': value = encodelist(value) elif keytype in ('csv', 'scsv'): value = ','.join(value) elif keytype == 'boolean': value = '%i' % bool(value) elif keytype != 'plain': raise KeyError('unknown getbundle option type %s' % keytype) opts[key] = value f = self._callcompressable("getbundle", **opts) if any((cap.startswith('HG2') for cap in bundlecaps)): return bundle2.getunbundler(self.ui, f) else: return changegroupmod.cg1unpacker(f, 'UN') def unbundle(self, cg, heads, source): '''Send cg (a readable file-like object representing the changegroup to push, typically a chunkbuffer object) to the remote server as a bundle. When pushing a bundle10 stream, return an integer indicating the result of the push (see localrepository.addchangegroup()). When pushing a bundle20 stream, return a bundle20 stream.''' if heads != ['force'] and self.capable('unbundlehash'): heads = encodelist(['hashed', util.sha1(''.join(sorted(heads))).digest()]) else: heads = encodelist(heads) if util.safehasattr(cg, 'deltaheader'): # this a bundle10, do the old style call sequence ret, output = self._callpush("unbundle", cg, heads=heads) if ret == "": raise error.ResponseError( _('push failed:'), output) try: ret = int(ret) except ValueError: raise error.ResponseError( _('push failed (unexpected response):'), ret) for l in output.splitlines(True): self.ui.status(_('remote: '), l) else: # bundle2 push. Send a stream, fetch a stream. stream = self._calltwowaystream('unbundle', cg, heads=heads) ret = bundle2.getunbundler(self.ui, stream) return ret def debugwireargs(self, one, two, three=None, four=None, five=None): # don't pass optional arguments left at their default value opts = {} if three is not None: opts['three'] = three if four is not None: opts['four'] = four return self._call('debugwireargs', one=one, two=two, **opts) def _call(self, cmd, **args): """execute on the server The command is expected to return a simple string. returns the server reply as a string.""" raise NotImplementedError() def _callstream(self, cmd, **args): """execute on the server The command is expected to return a stream. returns the server reply as a file like object.""" raise NotImplementedError() def _callcompressable(self, cmd, **args): """execute on the server The command is expected to return a stream. The stream may have been compressed in some implementations. This function takes care of the decompression. This is the only difference with _callstream. returns the server reply as a file like object. """ raise NotImplementedError() def _callpush(self, cmd, fp, **args): """execute a on server The command is expected to be related to a push. Push has a special return method. returns the server reply as a (ret, output) tuple. ret is either empty (error) or a stringified int. """ raise NotImplementedError() def _calltwowaystream(self, cmd, fp, **args): """execute on server The command will send a stream to the server and get a stream in reply. """ raise NotImplementedError() def _abort(self, exception): """clearly abort the wire protocol connection and raise the exception """ raise NotImplementedError() # server side # wire protocol command can either return a string or one of these classes. class streamres(object): """wireproto reply: binary stream The call was successful and the result is a stream. Iterate on the `self.gen` attribute to retrieve chunks. """ def __init__(self, gen): self.gen = gen class pushres(object): """wireproto reply: success with simple integer return The call was successful and returned an integer contained in `self.res`. """ def __init__(self, res): self.res = res class pusherr(object): """wireproto reply: failure The call failed. The `self.res` attribute contains the error message. """ def __init__(self, res): self.res = res class ooberror(object): """wireproto reply: failure of a batch of operation Something failed during a batch call. The error message is stored in `self.message`. """ def __init__(self, message): self.message = message def dispatch(repo, proto, command): repo = repo.filtered("served") func, spec = commands[command] args = proto.getargs(spec) return func(repo, proto, *args) def options(cmd, keys, others): opts = {} for k in keys: if k in others: opts[k] = others[k] del others[k] if others: sys.stderr.write("warning: %s ignored unexpected arguments %s\n" % (cmd, ",".join(others))) return opts def bundle1allowed(repo, action): """Whether a bundle1 operation is allowed from the server. Priority is: 1. server.bundle1gd. (if generaldelta active) 2. server.bundle1. 3. server.bundle1gd (if generaldelta active) 4. server.bundle1 """ ui = repo.ui gd = 'generaldelta' in repo.requirements if gd: v = ui.configbool('server', 'bundle1gd.%s' % action, None) if v is not None: return v v = ui.configbool('server', 'bundle1.%s' % action, None) if v is not None: return v if gd: v = ui.configbool('server', 'bundle1gd', None) if v is not None: return v return ui.configbool('server', 'bundle1', True) # list of commands commands = {} def wireprotocommand(name, args=''): """decorator for wire protocol command""" def register(func): commands[name] = (func, args) return func return register @wireprotocommand('batch', 'cmds *') def batch(repo, proto, cmds, others): repo = repo.filtered("served") res = [] for pair in cmds.split(';'): op, args = pair.split(' ', 1) vals = {} for a in args.split(','): if a: n, v = a.split('=') vals[n] = unescapearg(v) func, spec = commands[op] if spec: keys = spec.split() data = {} for k in keys: if k == '*': star = {} for key in vals.keys(): if key not in keys: star[key] = vals[key] data['*'] = star else: data[k] = vals[k] result = func(repo, proto, *[data[k] for k in keys]) else: result = func(repo, proto) if isinstance(result, ooberror): return result res.append(escapearg(result)) return ';'.join(res) @wireprotocommand('between', 'pairs') def between(repo, proto, pairs): pairs = [decodelist(p, '-') for p in pairs.split(" ")] r = [] for b in repo.between(pairs): r.append(encodelist(b) + "\n") return "".join(r) @wireprotocommand('branchmap') def branchmap(repo, proto): branchmap = repo.branchmap() heads = [] for branch, nodes in branchmap.iteritems(): branchname = urllib.quote(encoding.fromlocal(branch)) branchnodes = encodelist(nodes) heads.append('%s %s' % (branchname, branchnodes)) return '\n'.join(heads) @wireprotocommand('branches', 'nodes') def branches(repo, proto, nodes): nodes = decodelist(nodes) r = [] for b in repo.branches(nodes): r.append(encodelist(b) + "\n") return "".join(r) @wireprotocommand('clonebundles', '') def clonebundles(repo, proto): """Server command for returning info for available bundles to seed clones. Clients will parse this response and determine what bundle to fetch. Extensions may wrap this command to filter or dynamically emit data depending on the request. e.g. you could advertise URLs for the closest data center given the client's IP address. """ return repo.opener.tryread('clonebundles.manifest') wireprotocaps = ['lookup', 'changegroupsubset', 'branchmap', 'pushkey', 'known', 'getbundle', 'unbundlehash', 'batch'] def _capabilities(repo, proto): """return a list of capabilities for a repo This function exists to allow extensions to easily wrap capabilities computation - returns a lists: easy to alter - change done here will be propagated to both `capabilities` and `hello` command without any other action needed. """ # copy to prevent modification of the global list caps = list(wireprotocaps) if streamclone.allowservergeneration(repo.ui): if repo.ui.configbool('server', 'preferuncompressed', False): caps.append('stream-preferred') requiredformats = repo.requirements & repo.supportedformats # if our local revlogs are just revlogv1, add 'stream' cap if not requiredformats - set(('revlogv1',)): caps.append('stream') # otherwise, add 'streamreqs' detailing our local revlog format else: caps.append('streamreqs=%s' % ','.join(sorted(requiredformats))) if repo.ui.configbool('experimental', 'bundle2-advertise', True): capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo)) caps.append('bundle2=' + urllib.quote(capsblob)) caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority)) caps.append( 'httpheader=%d' % repo.ui.configint('server', 'maxhttpheaderlen', 1024)) return caps # If you are writing an extension and consider wrapping this function. Wrap # `_capabilities` instead. @wireprotocommand('capabilities') def capabilities(repo, proto): return ' '.join(_capabilities(repo, proto)) @wireprotocommand('changegroup', 'roots') def changegroup(repo, proto, roots): nodes = decodelist(roots) cg = changegroupmod.changegroup(repo, nodes, 'serve') return streamres(proto.groupchunks(cg)) @wireprotocommand('changegroupsubset', 'bases heads') def changegroupsubset(repo, proto, bases, heads): bases = decodelist(bases) heads = decodelist(heads) cg = changegroupmod.changegroupsubset(repo, bases, heads, 'serve') return streamres(proto.groupchunks(cg)) @wireprotocommand('debugwireargs', 'one two *') def debugwireargs(repo, proto, one, two, others): # only accept optional args from the known set opts = options('debugwireargs', ['three', 'four'], others) return repo.debugwireargs(one, two, **opts) # List of options accepted by getbundle. # # Meant to be extended by extensions. It is the extension's responsibility to # ensure such options are properly processed in exchange.getbundle. gboptslist = ['heads', 'common', 'bundlecaps'] @wireprotocommand('getbundle', '*') def getbundle(repo, proto, others): opts = options('getbundle', gboptsmap.keys(), others) for k, v in opts.iteritems(): keytype = gboptsmap[k] if keytype == 'nodes': opts[k] = decodelist(v) elif keytype == 'csv': opts[k] = list(v.split(',')) elif keytype == 'scsv': opts[k] = set(v.split(',')) elif keytype == 'boolean': # Client should serialize False as '0', which is a non-empty string # so it evaluates as a True bool. if v == '0': opts[k] = False else: opts[k] = bool(v) elif keytype != 'plain': raise KeyError('unknown getbundle option type %s' % keytype) if not bundle1allowed(repo, 'pull'): if not exchange.bundle2requested(opts.get('bundlecaps')): return ooberror(bundle2required) cg = exchange.getbundle(repo, 'serve', **opts) return streamres(proto.groupchunks(cg)) @wireprotocommand('heads') def heads(repo, proto): h = repo.heads() return encodelist(h) + "\n" @wireprotocommand('hello') def hello(repo, proto): '''the hello command returns a set of lines describing various interesting things about the server, in an RFC822-like format. Currently the only one defined is "capabilities", which consists of a line in the form: capabilities: space separated list of tokens ''' return "capabilities: %s\n" % (capabilities(repo, proto)) @wireprotocommand('listkeys', 'namespace') def listkeys(repo, proto, namespace): d = repo.listkeys(encoding.tolocal(namespace)).items() return pushkeymod.encodekeys(d) @wireprotocommand('lookup', 'key') def lookup(repo, proto, key): try: k = encoding.tolocal(key) c = repo[k] r = c.hex() success = 1 except Exception as inst: r = str(inst) success = 0 return "%s %s\n" % (success, r) @wireprotocommand('known', 'nodes *') def known(repo, proto, nodes, others): return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes))) @wireprotocommand('pushkey', 'namespace key old new') def pushkey(repo, proto, namespace, key, old, new): # compatibility with pre-1.8 clients which were accidentally # sending raw binary nodes rather than utf-8-encoded hex if len(new) == 20 and new.encode('string-escape') != new: # looks like it could be a binary node try: new.decode('utf-8') new = encoding.tolocal(new) # but cleanly decodes as UTF-8 except UnicodeDecodeError: pass # binary, leave unmodified else: new = encoding.tolocal(new) # normal path if util.safehasattr(proto, 'restore'): proto.redirect() try: r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key), encoding.tolocal(old), new) or False except error.Abort: r = False output = proto.restore() return '%s\n%s' % (int(r), output) r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key), encoding.tolocal(old), new) return '%s\n' % int(r) @wireprotocommand('stream_out') def stream(repo, proto): '''If the server supports streaming clone, it advertises the "stream" capability with a value representing the version and flags of the repo it is serving. Client checks to see if it understands the format. ''' if not streamclone.allowservergeneration(repo.ui): return '1\n' def getstream(it): yield '0\n' for chunk in it: yield chunk try: # LockError may be raised before the first result is yielded. Don't # emit output until we're sure we got the lock successfully. it = streamclone.generatev1wireproto(repo) return streamres(getstream(it)) except error.LockError: return '2\n' @wireprotocommand('unbundle', 'heads') def unbundle(repo, proto, heads): their_heads = decodelist(heads) try: proto.redirect() exchange.check_heads(repo, their_heads, 'preparing changes') # write bundle data to temporary file because it can be big fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-') fp = os.fdopen(fd, 'wb+') r = 0 try: proto.getfile(fp) fp.seek(0) gen = exchange.readbundle(repo.ui, fp, None) if (isinstance(gen, changegroupmod.cg1unpacker) and not bundle1allowed(repo, 'push')): return ooberror(bundle2required) r = exchange.unbundle(repo, gen, their_heads, 'serve', proto._client()) if util.safehasattr(r, 'addpart'): # The return looks streamable, we are in the bundle2 case and # should return a stream. return streamres(r.getchunks()) return pushres(r) finally: fp.close() os.unlink(tempname) except (error.BundleValueError, error.Abort, error.PushRaced) as exc: # handle non-bundle2 case first if not getattr(exc, 'duringunbundle2', False): try: raise except error.Abort: # The old code we moved used sys.stderr directly. # We did not change it to minimise code change. # This need to be moved to something proper. # Feel free to do it. sys.stderr.write("abort: %s\n" % exc) return pushres(0) except error.PushRaced: return pusherr(str(exc)) bundler = bundle2.bundle20(repo.ui) for out in getattr(exc, '_bundle2salvagedoutput', ()): bundler.addpart(out) try: try: raise except error.PushkeyFailed as exc: # check client caps remotecaps = getattr(exc, '_replycaps', None) if (remotecaps is not None and 'pushkey' not in remotecaps.get('error', ())): # no support remote side, fallback to Abort handler. raise part = bundler.newpart('error:pushkey') part.addparam('in-reply-to', exc.partid) if exc.namespace is not None: part.addparam('namespace', exc.namespace, mandatory=False) if exc.key is not None: part.addparam('key', exc.key, mandatory=False) if exc.new is not None: part.addparam('new', exc.new, mandatory=False) if exc.old is not None: part.addparam('old', exc.old, mandatory=False) if exc.ret is not None: part.addparam('ret', exc.ret, mandatory=False) except error.BundleValueError as exc: errpart = bundler.newpart('error:unsupportedcontent') if exc.parttype is not None: errpart.addparam('parttype', exc.parttype) if exc.params: errpart.addparam('params', '\0'.join(exc.params)) except error.Abort as exc: manargs = [('message', str(exc))] advargs = [] if exc.hint is not None: advargs.append(('hint', exc.hint)) bundler.addpart(bundle2.bundlepart('error:abort', manargs, advargs)) except error.PushRaced as exc: bundler.newpart('error:pushraced', [('message', str(exc))]) return streamres(bundler.getchunks()) mercurial-3.7.3/mercurial/commandserver.py0000644000175000017500000002566712676531525020351 0ustar mpmmpm00000000000000# commandserver.py - communicate with Mercurial's API over a pipe # # Copyright Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import SocketServer import errno import os import struct import sys import traceback from .i18n import _ from . import ( encoding, error, util, ) logfile = None def log(*args): if not logfile: return for a in args: logfile.write(str(a)) logfile.flush() class channeledoutput(object): """ Write data to out in the following format: data length (unsigned int), data """ def __init__(self, out, channel): self.out = out self.channel = channel @property def name(self): return '<%c-channel>' % self.channel def write(self, data): if not data: return self.out.write(struct.pack('>cI', self.channel, len(data))) self.out.write(data) self.out.flush() def __getattr__(self, attr): if attr in ('isatty', 'fileno', 'tell', 'seek'): raise AttributeError(attr) return getattr(self.out, attr) class channeledinput(object): """ Read data from in_. Requests for input are written to out in the following format: channel identifier - 'I' for plain input, 'L' line based (1 byte) how many bytes to send at most (unsigned int), The client replies with: data length (unsigned int), 0 meaning EOF data """ maxchunksize = 4 * 1024 def __init__(self, in_, out, channel): self.in_ = in_ self.out = out self.channel = channel @property def name(self): return '<%c-channel>' % self.channel def read(self, size=-1): if size < 0: # if we need to consume all the clients input, ask for 4k chunks # so the pipe doesn't fill up risking a deadlock size = self.maxchunksize s = self._read(size, self.channel) buf = s while s: s = self._read(size, self.channel) buf += s return buf else: return self._read(size, self.channel) def _read(self, size, channel): if not size: return '' assert size > 0 # tell the client we need at most size bytes self.out.write(struct.pack('>cI', channel, size)) self.out.flush() length = self.in_.read(4) length = struct.unpack('>I', length)[0] if not length: return '' else: return self.in_.read(length) def readline(self, size=-1): if size < 0: size = self.maxchunksize s = self._read(size, 'L') buf = s # keep asking for more until there's either no more or # we got a full line while s and s[-1] != '\n': s = self._read(size, 'L') buf += s return buf else: return self._read(size, 'L') def __iter__(self): return self def next(self): l = self.readline() if not l: raise StopIteration return l def __getattr__(self, attr): if attr in ('isatty', 'fileno', 'tell', 'seek'): raise AttributeError(attr) return getattr(self.in_, attr) class server(object): """ Listens for commands on fin, runs them and writes the output on a channel based stream to fout. """ def __init__(self, ui, repo, fin, fout): self.cwd = os.getcwd() # developer config: cmdserver.log logpath = ui.config("cmdserver", "log", None) if logpath: global logfile if logpath == '-': # write log on a special 'd' (debug) channel logfile = channeledoutput(fout, 'd') else: logfile = open(logpath, 'a') if repo: # the ui here is really the repo ui so take its baseui so we don't # end up with its local configuration self.ui = repo.baseui self.repo = repo self.repoui = repo.ui else: self.ui = ui self.repo = self.repoui = None self.cerr = channeledoutput(fout, 'e') self.cout = channeledoutput(fout, 'o') self.cin = channeledinput(fin, fout, 'I') self.cresult = channeledoutput(fout, 'r') self.client = fin def _read(self, size): if not size: return '' data = self.client.read(size) # is the other end closed? if not data: raise EOFError return data def runcommand(self): """ reads a list of \0 terminated arguments, executes and writes the return code to the result channel """ from . import dispatch # avoid cycle length = struct.unpack('>I', self._read(4))[0] if not length: args = [] else: args = self._read(length).split('\0') # copy the uis so changes (e.g. --config or --verbose) don't # persist between requests copiedui = self.ui.copy() uis = [copiedui] if self.repo: self.repo.baseui = copiedui # clone ui without using ui.copy because this is protected repoui = self.repoui.__class__(self.repoui) repoui.copy = copiedui.copy # redo copy protection uis.append(repoui) self.repo.ui = self.repo.dirstate._ui = repoui self.repo.invalidateall() # reset last-print time of progress bar per command # (progbar is singleton, we don't have to do for all uis) if copiedui._progbar: copiedui._progbar.resetstate() for ui in uis: # any kind of interaction must use server channels, but chg may # replace channels by fully functional tty files. so nontty is # enforced only if cin is a channel. if not util.safehasattr(self.cin, 'fileno'): ui.setconfig('ui', 'nontty', 'true', 'commandserver') req = dispatch.request(args[:], copiedui, self.repo, self.cin, self.cout, self.cerr) ret = (dispatch.dispatch(req) or 0) & 255 # might return None # restore old cwd if '--cwd' in args: os.chdir(self.cwd) self.cresult.write(struct.pack('>i', int(ret))) def getencoding(self): """ writes the current encoding to the result channel """ self.cresult.write(encoding.encoding) def serveone(self): cmd = self.client.readline()[:-1] if cmd: handler = self.capabilities.get(cmd) if handler: handler(self) else: # clients are expected to check what commands are supported by # looking at the servers capabilities raise error.Abort(_('unknown command %s') % cmd) return cmd != '' capabilities = {'runcommand' : runcommand, 'getencoding' : getencoding} def serve(self): hellomsg = 'capabilities: ' + ' '.join(sorted(self.capabilities)) hellomsg += '\n' hellomsg += 'encoding: ' + encoding.encoding hellomsg += '\n' hellomsg += 'pid: %d' % os.getpid() # write the hello msg in -one- chunk self.cout.write(hellomsg) try: while self.serveone(): pass except EOFError: # we'll get here if the client disconnected while we were reading # its request return 1 return 0 def _protectio(ui): """ duplicates streams and redirect original to null if ui uses stdio """ ui.flush() newfiles = [] nullfd = os.open(os.devnull, os.O_RDWR) for f, sysf, mode in [(ui.fin, sys.stdin, 'rb'), (ui.fout, sys.stdout, 'wb')]: if f is sysf: newfd = os.dup(f.fileno()) os.dup2(nullfd, f.fileno()) f = os.fdopen(newfd, mode) newfiles.append(f) os.close(nullfd) return tuple(newfiles) def _restoreio(ui, fin, fout): """ restores streams from duplicated ones """ ui.flush() for f, uif in [(fin, ui.fin), (fout, ui.fout)]: if f is not uif: os.dup2(f.fileno(), uif.fileno()) f.close() class pipeservice(object): def __init__(self, ui, repo, opts): self.ui = ui self.repo = repo def init(self): pass def run(self): ui = self.ui # redirect stdio to null device so that broken extensions or in-process # hooks will never cause corruption of channel protocol. fin, fout = _protectio(ui) try: sv = server(ui, self.repo, fin, fout) return sv.serve() finally: _restoreio(ui, fin, fout) class _requesthandler(SocketServer.StreamRequestHandler): def handle(self): ui = self.server.ui repo = self.server.repo sv = server(ui, repo, self.rfile, self.wfile) try: try: sv.serve() # handle exceptions that may be raised by command server. most of # known exceptions are caught by dispatch. except error.Abort as inst: ui.warn(_('abort: %s\n') % inst) except IOError as inst: if inst.errno != errno.EPIPE: raise except KeyboardInterrupt: pass except: # re-raises # also write traceback to error channel. otherwise client cannot # see it because it is written to server's stderr by default. traceback.print_exc(file=sv.cerr) raise class unixservice(object): """ Listens on unix domain socket and forks server per connection """ def __init__(self, ui, repo, opts): self.ui = ui self.repo = repo self.address = opts['address'] if not util.safehasattr(SocketServer, 'UnixStreamServer'): raise error.Abort(_('unsupported platform')) if not self.address: raise error.Abort(_('no socket path specified with --address')) def init(self): class cls(SocketServer.ForkingMixIn, SocketServer.UnixStreamServer): ui = self.ui repo = self.repo self.server = cls(self.address, _requesthandler) self.ui.status(_('listening at %s\n') % self.address) self.ui.flush() # avoid buffering of status message def run(self): try: self.server.serve_forever() finally: os.unlink(self.address) _servicemap = { 'pipe': pipeservice, 'unix': unixservice, } def createservice(ui, repo, opts): mode = opts['cmdserver'] try: return _servicemap[mode](ui, repo, opts) except KeyError: raise error.Abort(_('unknown mode %s') % mode) mercurial-3.7.3/mercurial/store.py0000644000175000017500000004121212676531525016620 0ustar mpmmpm00000000000000# store.py - repository store handling for Mercurial # # Copyright 2008 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import os import stat from .i18n import _ from . import ( error, parsers, scmutil, util, ) _sha = util.sha1 # This avoids a collision between a file named foo and a dir named # foo.i or foo.d def _encodedir(path): ''' >>> _encodedir('data/foo.i') 'data/foo.i' >>> _encodedir('data/foo.i/bla.i') 'data/foo.i.hg/bla.i' >>> _encodedir('data/foo.i.hg/bla.i') 'data/foo.i.hg.hg/bla.i' >>> _encodedir('data/foo.i\\ndata/foo.i/bla.i\\ndata/foo.i.hg/bla.i\\n') 'data/foo.i\\ndata/foo.i.hg/bla.i\\ndata/foo.i.hg.hg/bla.i\\n' ''' return (path .replace(".hg/", ".hg.hg/") .replace(".i/", ".i.hg/") .replace(".d/", ".d.hg/")) encodedir = getattr(parsers, 'encodedir', _encodedir) def decodedir(path): ''' >>> decodedir('data/foo.i') 'data/foo.i' >>> decodedir('data/foo.i.hg/bla.i') 'data/foo.i/bla.i' >>> decodedir('data/foo.i.hg.hg/bla.i') 'data/foo.i.hg/bla.i' ''' if ".hg/" not in path: return path return (path .replace(".d.hg/", ".d/") .replace(".i.hg/", ".i/") .replace(".hg.hg/", ".hg/")) def _buildencodefun(): ''' >>> enc, dec = _buildencodefun() >>> enc('nothing/special.txt') 'nothing/special.txt' >>> dec('nothing/special.txt') 'nothing/special.txt' >>> enc('HELLO') '_h_e_l_l_o' >>> dec('_h_e_l_l_o') 'HELLO' >>> enc('hello:world?') 'hello~3aworld~3f' >>> dec('hello~3aworld~3f') 'hello:world?' >>> enc('the\x07quick\xADshot') 'the~07quick~adshot' >>> dec('the~07quick~adshot') 'the\\x07quick\\xadshot' ''' e = '_' winreserved = [ord(x) for x in '\\:*?"<>|'] cmap = dict([(chr(x), chr(x)) for x in xrange(127)]) for x in (range(32) + range(126, 256) + winreserved): cmap[chr(x)] = "~%02x" % x for x in range(ord("A"), ord("Z") + 1) + [ord(e)]: cmap[chr(x)] = e + chr(x).lower() dmap = {} for k, v in cmap.iteritems(): dmap[v] = k def decode(s): i = 0 while i < len(s): for l in xrange(1, 4): try: yield dmap[s[i:i + l]] i += l break except KeyError: pass else: raise KeyError return (lambda s: ''.join([cmap[c] for c in s]), lambda s: ''.join(list(decode(s)))) _encodefname, _decodefname = _buildencodefun() def encodefilename(s): ''' >>> encodefilename('foo.i/bar.d/bla.hg/hi:world?/HELLO') 'foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o' ''' return _encodefname(encodedir(s)) def decodefilename(s): ''' >>> decodefilename('foo.i.hg/bar.d.hg/bla.hg.hg/hi~3aworld~3f/_h_e_l_l_o') 'foo.i/bar.d/bla.hg/hi:world?/HELLO' ''' return decodedir(_decodefname(s)) def _buildlowerencodefun(): ''' >>> f = _buildlowerencodefun() >>> f('nothing/special.txt') 'nothing/special.txt' >>> f('HELLO') 'hello' >>> f('hello:world?') 'hello~3aworld~3f' >>> f('the\x07quick\xADshot') 'the~07quick~adshot' ''' winreserved = [ord(x) for x in '\\:*?"<>|'] cmap = dict([(chr(x), chr(x)) for x in xrange(127)]) for x in (range(32) + range(126, 256) + winreserved): cmap[chr(x)] = "~%02x" % x for x in range(ord("A"), ord("Z") + 1): cmap[chr(x)] = chr(x).lower() return lambda s: "".join([cmap[c] for c in s]) lowerencode = getattr(parsers, 'lowerencode', None) or _buildlowerencodefun() # Windows reserved names: con, prn, aux, nul, com1..com9, lpt1..lpt9 _winres3 = ('aux', 'con', 'prn', 'nul') # length 3 _winres4 = ('com', 'lpt') # length 4 (with trailing 1..9) def _auxencode(path, dotencode): ''' Encodes filenames containing names reserved by Windows or which end in period or space. Does not touch other single reserved characters c. Specifically, c in '\\:*?"<>|' or ord(c) <= 31 are *not* encoded here. Additionally encodes space or period at the beginning, if dotencode is True. Parameter path is assumed to be all lowercase. A segment only needs encoding if a reserved name appears as a basename (e.g. "aux", "aux.foo"). A directory or file named "foo.aux" doesn't need encoding. >>> s = '.foo/aux.txt/txt.aux/con/prn/nul/foo.' >>> _auxencode(s.split('/'), True) ['~2efoo', 'au~78.txt', 'txt.aux', 'co~6e', 'pr~6e', 'nu~6c', 'foo~2e'] >>> s = '.com1com2/lpt9.lpt4.lpt1/conprn/com0/lpt0/foo.' >>> _auxencode(s.split('/'), False) ['.com1com2', 'lp~749.lpt4.lpt1', 'conprn', 'com0', 'lpt0', 'foo~2e'] >>> _auxencode(['foo. '], True) ['foo.~20'] >>> _auxencode([' .foo'], True) ['~20.foo'] ''' for i, n in enumerate(path): if not n: continue if dotencode and n[0] in '. ': n = "~%02x" % ord(n[0]) + n[1:] path[i] = n else: l = n.find('.') if l == -1: l = len(n) if ((l == 3 and n[:3] in _winres3) or (l == 4 and n[3] <= '9' and n[3] >= '1' and n[:3] in _winres4)): # encode third letter ('aux' -> 'au~78') ec = "~%02x" % ord(n[2]) n = n[0:2] + ec + n[3:] path[i] = n if n[-1] in '. ': # encode last period or space ('foo...' -> 'foo..~2e') path[i] = n[:-1] + "~%02x" % ord(n[-1]) return path _maxstorepathlen = 120 _dirprefixlen = 8 _maxshortdirslen = 8 * (_dirprefixlen + 1) - 4 def _hashencode(path, dotencode): digest = _sha(path).hexdigest() le = lowerencode(path[5:]).split('/') # skips prefix 'data/' or 'meta/' parts = _auxencode(le, dotencode) basename = parts[-1] _root, ext = os.path.splitext(basename) sdirs = [] sdirslen = 0 for p in parts[:-1]: d = p[:_dirprefixlen] if d[-1] in '. ': # Windows can't access dirs ending in period or space d = d[:-1] + '_' if sdirslen == 0: t = len(d) else: t = sdirslen + 1 + len(d) if t > _maxshortdirslen: break sdirs.append(d) sdirslen = t dirs = '/'.join(sdirs) if len(dirs) > 0: dirs += '/' res = 'dh/' + dirs + digest + ext spaceleft = _maxstorepathlen - len(res) if spaceleft > 0: filler = basename[:spaceleft] res = 'dh/' + dirs + filler + digest + ext return res def _hybridencode(path, dotencode): '''encodes path with a length limit Encodes all paths that begin with 'data/', according to the following. Default encoding (reversible): Encodes all uppercase letters 'X' as '_x'. All reserved or illegal characters are encoded as '~xx', where xx is the two digit hex code of the character (see encodefilename). Relevant path components consisting of Windows reserved filenames are masked by encoding the third character ('aux' -> 'au~78', see _auxencode). Hashed encoding (not reversible): If the default-encoded path is longer than _maxstorepathlen, a non-reversible hybrid hashing of the path is done instead. This encoding uses up to _dirprefixlen characters of all directory levels of the lowerencoded path, but not more levels than can fit into _maxshortdirslen. Then follows the filler followed by the sha digest of the full path. The filler is the beginning of the basename of the lowerencoded path (the basename is everything after the last path separator). The filler is as long as possible, filling in characters from the basename until the encoded path has _maxstorepathlen characters (or all chars of the basename have been taken). The extension (e.g. '.i' or '.d') is preserved. The string 'data/' at the beginning is replaced with 'dh/', if the hashed encoding was used. ''' path = encodedir(path) ef = _encodefname(path).split('/') res = '/'.join(_auxencode(ef, dotencode)) if len(res) > _maxstorepathlen: res = _hashencode(path, dotencode) return res def _pathencode(path): de = encodedir(path) if len(path) > _maxstorepathlen: return _hashencode(de, True) ef = _encodefname(de).split('/') res = '/'.join(_auxencode(ef, True)) if len(res) > _maxstorepathlen: return _hashencode(de, True) return res _pathencode = getattr(parsers, 'pathencode', _pathencode) def _plainhybridencode(f): return _hybridencode(f, False) def _calcmode(vfs): try: # files in .hg/ will be created using this mode mode = vfs.stat().st_mode # avoid some useless chmods if (0o777 & ~util.umask) == (0o777 & mode): mode = None except OSError: mode = None return mode _data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i' ' phaseroots obsstore') class basicstore(object): '''base class for local repository stores''' def __init__(self, path, vfstype): vfs = vfstype(path) self.path = vfs.base self.createmode = _calcmode(vfs) vfs.createmode = self.createmode self.rawvfs = vfs self.vfs = scmutil.filtervfs(vfs, encodedir) self.opener = self.vfs def join(self, f): return self.path + '/' + encodedir(f) def _walk(self, relpath, recurse): '''yields (unencoded, encoded, size)''' path = self.path if relpath: path += '/' + relpath striplen = len(self.path) + 1 l = [] if self.rawvfs.isdir(path): visit = [path] readdir = self.rawvfs.readdir while visit: p = visit.pop() for f, kind, st in readdir(p, stat=True): fp = p + '/' + f if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'): n = util.pconvert(fp[striplen:]) l.append((decodedir(n), n, st.st_size)) elif kind == stat.S_IFDIR and recurse: visit.append(fp) l.sort() return l def datafiles(self): return self._walk('data', True) def topfiles(self): # yield manifest before changelog return reversed(self._walk('', False)) def walk(self): '''yields (unencoded, encoded, size)''' # yield data files first for x in self.datafiles(): yield x for x in self.topfiles(): yield x def copylist(self): return ['requires'] + _data.split() def write(self, tr): pass def invalidatecaches(self): pass def markremoved(self, fn): pass def __contains__(self, path): '''Checks if the store contains path''' path = "/".join(("data", path)) # file? if self.vfs.exists(path + ".i"): return True # dir? if not path.endswith("/"): path = path + "/" return self.vfs.exists(path) class encodedstore(basicstore): def __init__(self, path, vfstype): vfs = vfstype(path + '/store') self.path = vfs.base self.createmode = _calcmode(vfs) vfs.createmode = self.createmode self.rawvfs = vfs self.vfs = scmutil.filtervfs(vfs, encodefilename) self.opener = self.vfs def datafiles(self): for a, b, size in self._walk('data', True): try: a = decodefilename(a) except KeyError: a = None yield a, b, size def join(self, f): return self.path + '/' + encodefilename(f) def copylist(self): return (['requires', '00changelog.i'] + ['store/' + f for f in _data.split()]) class fncache(object): # the filename used to be partially encoded # hence the encodedir/decodedir dance def __init__(self, vfs): self.vfs = vfs self.entries = None self._dirty = False def _load(self): '''fill the entries from the fncache file''' self._dirty = False try: fp = self.vfs('fncache', mode='rb') except IOError: # skip nonexistent file self.entries = set() return self.entries = set(decodedir(fp.read()).splitlines()) if '' in self.entries: fp.seek(0) for n, line in enumerate(fp): if not line.rstrip('\n'): t = _('invalid entry in fncache, line %d') % (n + 1) raise error.Abort(t) fp.close() def write(self, tr): if self._dirty: tr.addbackup('fncache') fp = self.vfs('fncache', mode='wb', atomictemp=True) if self.entries: fp.write(encodedir('\n'.join(self.entries) + '\n')) fp.close() self._dirty = False def add(self, fn): if self.entries is None: self._load() if fn not in self.entries: self._dirty = True self.entries.add(fn) def remove(self, fn): if self.entries is None: self._load() try: self.entries.remove(fn) self._dirty = True except KeyError: pass def __contains__(self, fn): if self.entries is None: self._load() return fn in self.entries def __iter__(self): if self.entries is None: self._load() return iter(self.entries) class _fncachevfs(scmutil.abstractvfs, scmutil.auditvfs): def __init__(self, vfs, fnc, encode): scmutil.auditvfs.__init__(self, vfs) self.fncache = fnc self.encode = encode def __call__(self, path, mode='r', *args, **kw): if mode not in ('r', 'rb') and path.startswith('data/'): self.fncache.add(path) return self.vfs(self.encode(path), mode, *args, **kw) def join(self, path): if path: return self.vfs.join(self.encode(path)) else: return self.vfs.join(path) class fncachestore(basicstore): def __init__(self, path, vfstype, dotencode): if dotencode: encode = _pathencode else: encode = _plainhybridencode self.encode = encode vfs = vfstype(path + '/store') self.path = vfs.base self.pathsep = self.path + '/' self.createmode = _calcmode(vfs) vfs.createmode = self.createmode self.rawvfs = vfs fnc = fncache(vfs) self.fncache = fnc self.vfs = _fncachevfs(vfs, fnc, encode) self.opener = self.vfs def join(self, f): return self.pathsep + self.encode(f) def getsize(self, path): return self.rawvfs.stat(path).st_size def datafiles(self): for f in sorted(self.fncache): ef = self.encode(f) try: yield f, ef, self.getsize(ef) except OSError as err: if err.errno != errno.ENOENT: raise def copylist(self): d = ('data dh fncache phaseroots obsstore' ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i') return (['requires', '00changelog.i'] + ['store/' + f for f in d.split()]) def write(self, tr): self.fncache.write(tr) def invalidatecaches(self): self.fncache.entries = None def markremoved(self, fn): self.fncache.remove(fn) def _exists(self, f): ef = self.encode(f) try: self.getsize(ef) return True except OSError as err: if err.errno != errno.ENOENT: raise # nonexistent entry return False def __contains__(self, path): '''Checks if the store contains path''' path = "/".join(("data", path)) # check for files (exact match) e = path + '.i' if e in self.fncache and self._exists(e): return True # now check for directories (prefix match) if not path.endswith('/'): path += '/' for e in self.fncache: if e.startswith(path) and self._exists(e): return True return False def store(requirements, path, vfstype): if 'store' in requirements: if 'fncache' in requirements: return fncachestore(path, vfstype, 'dotencode' in requirements) return encodedstore(path, vfstype) return basicstore(path, vfstype) mercurial-3.7.3/mercurial/hbisect.py0000644000175000017500000002203412676531525017106 0ustar mpmmpm00000000000000# changelog bisection for mercurial # # Copyright 2007 Matt Mackall # Copyright 2005, 2006 Benoit Boissinot # # Inspired by git bisect, extension skeleton taken from mq.py. # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import collections from .i18n import _ from .node import ( hex, short, ) from . import ( error, ) def bisect(changelog, state): """find the next node (if any) for testing during a bisect search. returns a (nodes, number, good) tuple. 'nodes' is the final result of the bisect if 'number' is 0. Otherwise 'number' indicates the remaining possible candidates for the search and 'nodes' contains the next bisect target. 'good' is True if bisect is searching for a first good changeset, False if searching for a first bad one. """ clparents = changelog.parentrevs skip = set([changelog.rev(n) for n in state['skip']]) def buildancestors(bad, good): # only the earliest bad revision matters badrev = min([changelog.rev(n) for n in bad]) goodrevs = [changelog.rev(n) for n in good] goodrev = min(goodrevs) # build visit array ancestors = [None] * (len(changelog) + 1) # an extra for [-1] # set nodes descended from goodrevs for rev in goodrevs: ancestors[rev] = [] for rev in changelog.revs(goodrev + 1): for prev in clparents(rev): if ancestors[prev] == []: ancestors[rev] = [] # clear good revs from array for rev in goodrevs: ancestors[rev] = None for rev in changelog.revs(len(changelog), goodrev): if ancestors[rev] is None: for prev in clparents(rev): ancestors[prev] = None if ancestors[badrev] is None: return badrev, None return badrev, ancestors good = False badrev, ancestors = buildancestors(state['bad'], state['good']) if not ancestors: # looking for bad to good transition? good = True badrev, ancestors = buildancestors(state['good'], state['bad']) bad = changelog.node(badrev) if not ancestors: # now we're confused if (len(state['bad']) == 1 and len(state['good']) == 1 and state['bad'] != state['good']): raise error.Abort(_("starting revisions are not directly related")) raise error.Abort(_("inconsistent state, %s:%s is good and bad") % (badrev, short(bad))) # build children dict children = {} visit = collections.deque([badrev]) candidates = [] while visit: rev = visit.popleft() if ancestors[rev] == []: candidates.append(rev) for prev in clparents(rev): if prev != -1: if prev in children: children[prev].append(rev) else: children[prev] = [rev] visit.append(prev) candidates.sort() # have we narrowed it down to one entry? # or have all other possible candidates besides 'bad' have been skipped? tot = len(candidates) unskipped = [c for c in candidates if (c not in skip) and (c != badrev)] if tot == 1 or not unskipped: return ([changelog.node(rev) for rev in candidates], 0, good) perfect = tot // 2 # find the best node to test best_rev = None best_len = -1 poison = set() for rev in candidates: if rev in poison: # poison children poison.update(children.get(rev, [])) continue a = ancestors[rev] or [rev] ancestors[rev] = None x = len(a) # number of ancestors y = tot - x # number of non-ancestors value = min(x, y) # how good is this test? if value > best_len and rev not in skip: best_len = value best_rev = rev if value == perfect: # found a perfect candidate? quit early break if y < perfect and rev not in skip: # all downhill from here? # poison children poison.update(children.get(rev, [])) continue for c in children.get(rev, []): if ancestors[c]: ancestors[c] = list(set(ancestors[c] + a)) else: ancestors[c] = a + [c] assert best_rev is not None best_node = changelog.node(best_rev) return ([best_node], tot, good) def load_state(repo): state = {'current': [], 'good': [], 'bad': [], 'skip': []} for l in repo.vfs.tryreadlines("bisect.state"): kind, node = l[:-1].split() node = repo.lookup(node) if kind not in state: raise error.Abort(_("unknown bisect kind %s") % kind) state[kind].append(node) return state def save_state(repo, state): f = repo.vfs("bisect.state", "w", atomictemp=True) with repo.wlock(): for kind in sorted(state): for node in state[kind]: f.write("%s %s\n" % (kind, hex(node))) f.close() def get(repo, status): """ Return a list of revision(s) that match the given status: - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip - ``goods``, ``bads`` : csets topologically good/bad - ``range`` : csets taking part in the bisection - ``pruned`` : csets that are goods, bads or skipped - ``untested`` : csets whose fate is yet unknown - ``ignored`` : csets ignored due to DAG topology - ``current`` : the cset currently being bisected """ state = load_state(repo) if status in ('good', 'bad', 'skip', 'current'): return map(repo.changelog.rev, state[status]) else: # In the following sets, we do *not* call 'bisect()' with more # than one level of recursion, because that can be very, very # time consuming. Instead, we always develop the expression as # much as possible. # 'range' is all csets that make the bisection: # - have a good ancestor and a bad descendant, or conversely # that's because the bisection can go either way range = '( bisect(bad)::bisect(good) | bisect(good)::bisect(bad) )' _t = repo.revs('bisect(good)::bisect(bad)') # The sets of topologically good or bad csets if len(_t) == 0: # Goods are topologically after bads goods = 'bisect(good)::' # Pruned good csets bads = '::bisect(bad)' # Pruned bad csets else: # Goods are topologically before bads goods = '::bisect(good)' # Pruned good csets bads = 'bisect(bad)::' # Pruned bad csets # 'pruned' is all csets whose fate is already known: good, bad, skip skips = 'bisect(skip)' # Pruned skipped csets pruned = '( (%s) | (%s) | (%s) )' % (goods, bads, skips) # 'untested' is all cset that are- in 'range', but not in 'pruned' untested = '( (%s) - (%s) )' % (range, pruned) # 'ignored' is all csets that were not used during the bisection # due to DAG topology, but may however have had an impact. # E.g., a branch merged between bads and goods, but whose branch- # point is out-side of the range. iba = '::bisect(bad) - ::bisect(good)' # Ignored bads' ancestors iga = '::bisect(good) - ::bisect(bad)' # Ignored goods' ancestors ignored = '( ( (%s) | (%s) ) - (%s) )' % (iba, iga, range) if status == 'range': return repo.revs(range) elif status == 'pruned': return repo.revs(pruned) elif status == 'untested': return repo.revs(untested) elif status == 'ignored': return repo.revs(ignored) elif status == "goods": return repo.revs(goods) elif status == "bads": return repo.revs(bads) else: raise error.ParseError(_('invalid bisect state')) def label(repo, node): rev = repo.changelog.rev(node) # Try explicit sets if rev in get(repo, 'good'): # i18n: bisect changeset status return _('good') if rev in get(repo, 'bad'): # i18n: bisect changeset status return _('bad') if rev in get(repo, 'skip'): # i18n: bisect changeset status return _('skipped') if rev in get(repo, 'untested') or rev in get(repo, 'current'): # i18n: bisect changeset status return _('untested') if rev in get(repo, 'ignored'): # i18n: bisect changeset status return _('ignored') # Try implicit sets if rev in get(repo, 'goods'): # i18n: bisect changeset status return _('good (implicit)') if rev in get(repo, 'bads'): # i18n: bisect changeset status return _('bad (implicit)') return None def shortlabel(label): if label: return label[0].upper() return None mercurial-3.7.3/mercurial/sshserver.py0000644000175000017500000000717712676531525017524 0ustar mpmmpm00000000000000# sshserver.py - ssh protocol server support for mercurial # # Copyright 2005-2007 Matt Mackall # Copyright 2006 Vadim Gelfer # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import os import sys from . import ( error, hook, util, wireproto, ) class sshserver(wireproto.abstractserverproto): def __init__(self, ui, repo): self.ui = ui self.repo = repo self.lock = None self.fin = ui.fin self.fout = ui.fout hook.redirect(True) ui.fout = repo.ui.fout = ui.ferr # Prevent insertion/deletion of CRs util.setbinary(self.fin) util.setbinary(self.fout) def getargs(self, args): data = {} keys = args.split() for n in xrange(len(keys)): argline = self.fin.readline()[:-1] arg, l = argline.split() if arg not in keys: raise error.Abort("unexpected parameter %r" % arg) if arg == '*': star = {} for k in xrange(int(l)): argline = self.fin.readline()[:-1] arg, l = argline.split() val = self.fin.read(int(l)) star[arg] = val data['*'] = star else: val = self.fin.read(int(l)) data[arg] = val return [data[k] for k in keys] def getarg(self, name): return self.getargs(name)[0] def getfile(self, fpout): self.sendresponse('') count = int(self.fin.readline()) while count: fpout.write(self.fin.read(count)) count = int(self.fin.readline()) def redirect(self): pass def groupchunks(self, changegroup): while True: d = changegroup.read(4096) if not d: break yield d def sendresponse(self, v): self.fout.write("%d\n" % len(v)) self.fout.write(v) self.fout.flush() def sendstream(self, source): write = self.fout.write for chunk in source.gen: write(chunk) self.fout.flush() def sendpushresponse(self, rsp): self.sendresponse('') self.sendresponse(str(rsp.res)) def sendpusherror(self, rsp): self.sendresponse(rsp.res) def sendooberror(self, rsp): self.ui.ferr.write('%s\n-\n' % rsp.message) self.ui.ferr.flush() self.fout.write('\n') self.fout.flush() def serve_forever(self): try: while self.serve_one(): pass finally: if self.lock is not None: self.lock.release() sys.exit(0) handlers = { str: sendresponse, wireproto.streamres: sendstream, wireproto.pushres: sendpushresponse, wireproto.pusherr: sendpusherror, wireproto.ooberror: sendooberror, } def serve_one(self): cmd = self.fin.readline()[:-1] if cmd and cmd in wireproto.commands: rsp = wireproto.dispatch(self.repo, self, cmd) self.handlers[rsp.__class__](self, rsp) elif cmd: impl = getattr(self, 'do_' + cmd, None) if impl: r = impl() if r is not None: self.sendresponse(r) else: self.sendresponse("") return cmd != '' def _client(self): client = os.environ.get('SSH_CLIENT', '').split(' ', 1)[0] return 'remote:ssh:' + client mercurial-3.7.3/mercurial/exchange.py0000644000175000017500000022022112676531525017245 0ustar mpmmpm00000000000000# exchange.py - utility to exchange data between repos. # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import urllib import urllib2 from .i18n import _ from .node import ( hex, nullid, ) from . import ( base85, bookmarks as bookmod, bundle2, changegroup, discovery, error, lock as lockmod, obsolete, phases, pushkey, scmutil, sslutil, streamclone, tags, url as urlmod, util, ) # Maps bundle compression human names to internal representation. _bundlespeccompressions = {'none': None, 'bzip2': 'BZ', 'gzip': 'GZ', } # Maps bundle version human names to changegroup versions. _bundlespeccgversions = {'v1': '01', 'v2': '02', 'packed1': 's1', 'bundle2': '02', #legacy } def parsebundlespec(repo, spec, strict=True, externalnames=False): """Parse a bundle string specification into parts. Bundle specifications denote a well-defined bundle/exchange format. The content of a given specification should not change over time in order to ensure that bundles produced by a newer version of Mercurial are readable from an older version. The string currently has the form: -[;[;]] Where is one of the supported compression formats and is (currently) a version string. A ";" can follow the type and all text afterwards is interpretted as URI encoded, ";" delimited key=value pairs. If ``strict`` is True (the default) is required. Otherwise, it is optional. If ``externalnames`` is False (the default), the human-centric names will be converted to their internal representation. Returns a 3-tuple of (compression, version, parameters). Compression will be ``None`` if not in strict mode and a compression isn't defined. An ``InvalidBundleSpecification`` is raised when the specification is not syntactically well formed. An ``UnsupportedBundleSpecification`` is raised when the compression or bundle type/version is not recognized. Note: this function will likely eventually return a more complex data structure, including bundle2 part information. """ def parseparams(s): if ';' not in s: return s, {} params = {} version, paramstr = s.split(';', 1) for p in paramstr.split(';'): if '=' not in p: raise error.InvalidBundleSpecification( _('invalid bundle specification: ' 'missing "=" in parameter: %s') % p) key, value = p.split('=', 1) key = urllib.unquote(key) value = urllib.unquote(value) params[key] = value return version, params if strict and '-' not in spec: raise error.InvalidBundleSpecification( _('invalid bundle specification; ' 'must be prefixed with compression: %s') % spec) if '-' in spec: compression, version = spec.split('-', 1) if compression not in _bundlespeccompressions: raise error.UnsupportedBundleSpecification( _('%s compression is not supported') % compression) version, params = parseparams(version) if version not in _bundlespeccgversions: raise error.UnsupportedBundleSpecification( _('%s is not a recognized bundle version') % version) else: # Value could be just the compression or just the version, in which # case some defaults are assumed (but only when not in strict mode). assert not strict spec, params = parseparams(spec) if spec in _bundlespeccompressions: compression = spec version = 'v1' if 'generaldelta' in repo.requirements: version = 'v2' elif spec in _bundlespeccgversions: if spec == 'packed1': compression = 'none' else: compression = 'bzip2' version = spec else: raise error.UnsupportedBundleSpecification( _('%s is not a recognized bundle specification') % spec) # The specification for packed1 can optionally declare the data formats # required to apply it. If we see this metadata, compare against what the # repo supports and error if the bundle isn't compatible. if version == 'packed1' and 'requirements' in params: requirements = set(params['requirements'].split(',')) missingreqs = requirements - repo.supportedformats if missingreqs: raise error.UnsupportedBundleSpecification( _('missing support for repository features: %s') % ', '.join(sorted(missingreqs))) if not externalnames: compression = _bundlespeccompressions[compression] version = _bundlespeccgversions[version] return compression, version, params def readbundle(ui, fh, fname, vfs=None): header = changegroup.readexactly(fh, 4) alg = None if not fname: fname = "stream" if not header.startswith('HG') and header.startswith('\0'): fh = changegroup.headerlessfixup(fh, header) header = "HG10" alg = 'UN' elif vfs: fname = vfs.join(fname) magic, version = header[0:2], header[2:4] if magic != 'HG': raise error.Abort(_('%s: not a Mercurial bundle') % fname) if version == '10': if alg is None: alg = changegroup.readexactly(fh, 2) return changegroup.cg1unpacker(fh, alg) elif version.startswith('2'): return bundle2.getunbundler(ui, fh, magicstring=magic + version) elif version == 'S1': return streamclone.streamcloneapplier(fh) else: raise error.Abort(_('%s: unknown bundle version %s') % (fname, version)) def getbundlespec(ui, fh): """Infer the bundlespec from a bundle file handle. The input file handle is seeked and the original seek position is not restored. """ def speccompression(alg): for k, v in _bundlespeccompressions.items(): if v == alg: return k return None b = readbundle(ui, fh, None) if isinstance(b, changegroup.cg1unpacker): alg = b._type if alg == '_truncatedBZ': alg = 'BZ' comp = speccompression(alg) if not comp: raise error.Abort(_('unknown compression algorithm: %s') % alg) return '%s-v1' % comp elif isinstance(b, bundle2.unbundle20): if 'Compression' in b.params: comp = speccompression(b.params['Compression']) if not comp: raise error.Abort(_('unknown compression algorithm: %s') % comp) else: comp = 'none' version = None for part in b.iterparts(): if part.type == 'changegroup': version = part.params['version'] if version in ('01', '02'): version = 'v2' else: raise error.Abort(_('changegroup version %s does not have ' 'a known bundlespec') % version, hint=_('try upgrading your Mercurial ' 'client')) if not version: raise error.Abort(_('could not identify changegroup version in ' 'bundle')) return '%s-%s' % (comp, version) elif isinstance(b, streamclone.streamcloneapplier): requirements = streamclone.readbundle1header(fh)[2] params = 'requirements=%s' % ','.join(sorted(requirements)) return 'none-packed1;%s' % urllib.quote(params) else: raise error.Abort(_('unknown bundle type: %s') % b) def buildobsmarkerspart(bundler, markers): """add an obsmarker part to the bundler with No part is created if markers is empty. Raises ValueError if the bundler doesn't support any known obsmarker format. """ if markers: remoteversions = bundle2.obsmarkersversion(bundler.capabilities) version = obsolete.commonversion(remoteversions) if version is None: raise ValueError('bundler does not support common obsmarker format') stream = obsolete.encodemarkers(markers, True, version=version) return bundler.newpart('obsmarkers', data=stream) return None def _canusebundle2(op): """return true if a pull/push can use bundle2 Feel free to nuke this function when we drop the experimental option""" return (op.repo.ui.configbool('experimental', 'bundle2-exp', True) and op.remote.capable('bundle2')) class pushoperation(object): """A object that represent a single push operation It purpose is to carry push related state and very common operation. A new should be created at the beginning of each push and discarded afterward. """ def __init__(self, repo, remote, force=False, revs=None, newbranch=False, bookmarks=()): # repo we push from self.repo = repo self.ui = repo.ui # repo we push to self.remote = remote # force option provided self.force = force # revs to be pushed (None is "all") self.revs = revs # bookmark explicitly pushed self.bookmarks = bookmarks # allow push of new branch self.newbranch = newbranch # did a local lock get acquired? self.locallocked = None # step already performed # (used to check what steps have been already performed through bundle2) self.stepsdone = set() # Integer version of the changegroup push result # - None means nothing to push # - 0 means HTTP error # - 1 means we pushed and remote head count is unchanged *or* # we have outgoing changesets but refused to push # - other values as described by addchangegroup() self.cgresult = None # Boolean value for the bookmark push self.bkresult = None # discover.outgoing object (contains common and outgoing data) self.outgoing = None # all remote heads before the push self.remoteheads = None # testable as a boolean indicating if any nodes are missing locally. self.incoming = None # phases changes that must be pushed along side the changesets self.outdatedphases = None # phases changes that must be pushed if changeset push fails self.fallbackoutdatedphases = None # outgoing obsmarkers self.outobsmarkers = set() # outgoing bookmarks self.outbookmarks = [] # transaction manager self.trmanager = None # map { pushkey partid -> callback handling failure} # used to handle exception from mandatory pushkey part failure self.pkfailcb = {} @util.propertycache def futureheads(self): """future remote heads if the changeset push succeeds""" return self.outgoing.missingheads @util.propertycache def fallbackheads(self): """future remote heads if the changeset push fails""" if self.revs is None: # not target to push, all common are relevant return self.outgoing.commonheads unfi = self.repo.unfiltered() # I want cheads = heads(::missingheads and ::commonheads) # (missingheads is revs with secret changeset filtered out) # # This can be expressed as: # cheads = ( (missingheads and ::commonheads) # + (commonheads and ::missingheads))" # ) # # while trying to push we already computed the following: # common = (::commonheads) # missing = ((commonheads::missingheads) - commonheads) # # We can pick: # * missingheads part of common (::commonheads) common = self.outgoing.common nm = self.repo.changelog.nodemap cheads = [node for node in self.revs if nm[node] in common] # and # * commonheads parents on missing revset = unfi.set('%ln and parents(roots(%ln))', self.outgoing.commonheads, self.outgoing.missing) cheads.extend(c.node() for c in revset) return cheads @property def commonheads(self): """set of all common heads after changeset bundle push""" if self.cgresult: return self.futureheads else: return self.fallbackheads # mapping of message used when pushing bookmark bookmsgmap = {'update': (_("updating bookmark %s\n"), _('updating bookmark %s failed!\n')), 'export': (_("exporting bookmark %s\n"), _('exporting bookmark %s failed!\n')), 'delete': (_("deleting remote bookmark %s\n"), _('deleting remote bookmark %s failed!\n')), } def push(repo, remote, force=False, revs=None, newbranch=False, bookmarks=(), opargs=None): '''Push outgoing changesets (limited by revs) from a local repository to remote. Return an integer: - None means nothing to push - 0 means HTTP error - 1 means we pushed and remote head count is unchanged *or* we have outgoing changesets but refused to push - other values as described by addchangegroup() ''' if opargs is None: opargs = {} pushop = pushoperation(repo, remote, force, revs, newbranch, bookmarks, **opargs) if pushop.remote.local(): missing = (set(pushop.repo.requirements) - pushop.remote.local().supported) if missing: msg = _("required features are not" " supported in the destination:" " %s") % (', '.join(sorted(missing))) raise error.Abort(msg) # there are two ways to push to remote repo: # # addchangegroup assumes local user can lock remote # repo (local filesystem, old ssh servers). # # unbundle assumes local user cannot lock remote repo (new ssh # servers, http servers). if not pushop.remote.canpush(): raise error.Abort(_("destination does not support push")) # get local lock as we might write phase data localwlock = locallock = None try: # bundle2 push may receive a reply bundle touching bookmarks or other # things requiring the wlock. Take it now to ensure proper ordering. maypushback = pushop.ui.configbool('experimental', 'bundle2.pushback') if _canusebundle2(pushop) and maypushback: localwlock = pushop.repo.wlock() locallock = pushop.repo.lock() pushop.locallocked = True except IOError as err: pushop.locallocked = False if err.errno != errno.EACCES: raise # source repo cannot be locked. # We do not abort the push, but just disable the local phase # synchronisation. msg = 'cannot lock source repository: %s\n' % err pushop.ui.debug(msg) try: if pushop.locallocked: pushop.trmanager = transactionmanager(pushop.repo, 'push-response', pushop.remote.url()) pushop.repo.checkpush(pushop) lock = None unbundle = pushop.remote.capable('unbundle') if not unbundle: lock = pushop.remote.lock() try: _pushdiscovery(pushop) if _canusebundle2(pushop): _pushbundle2(pushop) _pushchangeset(pushop) _pushsyncphase(pushop) _pushobsolete(pushop) _pushbookmark(pushop) finally: if lock is not None: lock.release() if pushop.trmanager: pushop.trmanager.close() finally: if pushop.trmanager: pushop.trmanager.release() if locallock is not None: locallock.release() if localwlock is not None: localwlock.release() return pushop # list of steps to perform discovery before push pushdiscoveryorder = [] # Mapping between step name and function # # This exists to help extensions wrap steps if necessary pushdiscoverymapping = {} def pushdiscovery(stepname): """decorator for function performing discovery before push The function is added to the step -> function mapping and appended to the list of steps. Beware that decorated function will be added in order (this may matter). You can only use this decorator for a new step, if you want to wrap a step from an extension, change the pushdiscovery dictionary directly.""" def dec(func): assert stepname not in pushdiscoverymapping pushdiscoverymapping[stepname] = func pushdiscoveryorder.append(stepname) return func return dec def _pushdiscovery(pushop): """Run all discovery steps""" for stepname in pushdiscoveryorder: step = pushdiscoverymapping[stepname] step(pushop) @pushdiscovery('changeset') def _pushdiscoverychangeset(pushop): """discover the changeset that need to be pushed""" fci = discovery.findcommonincoming commoninc = fci(pushop.repo, pushop.remote, force=pushop.force) common, inc, remoteheads = commoninc fco = discovery.findcommonoutgoing outgoing = fco(pushop.repo, pushop.remote, onlyheads=pushop.revs, commoninc=commoninc, force=pushop.force) pushop.outgoing = outgoing pushop.remoteheads = remoteheads pushop.incoming = inc @pushdiscovery('phase') def _pushdiscoveryphase(pushop): """discover the phase that needs to be pushed (computed for both success and failure case for changesets push)""" outgoing = pushop.outgoing unfi = pushop.repo.unfiltered() remotephases = pushop.remote.listkeys('phases') publishing = remotephases.get('publishing', False) if (pushop.ui.configbool('ui', '_usedassubrepo', False) and remotephases # server supports phases and not pushop.outgoing.missing # no changesets to be pushed and publishing): # When: # - this is a subrepo push # - and remote support phase # - and no changeset are to be pushed # - and remote is publishing # We may be in issue 3871 case! # We drop the possible phase synchronisation done by # courtesy to publish changesets possibly locally draft # on the remote. remotephases = {'publishing': 'True'} ana = phases.analyzeremotephases(pushop.repo, pushop.fallbackheads, remotephases) pheads, droots = ana extracond = '' if not publishing: extracond = ' and public()' revset = 'heads((%%ln::%%ln) %s)' % extracond # Get the list of all revs draft on remote by public here. # XXX Beware that revset break if droots is not strictly # XXX root we may want to ensure it is but it is costly fallback = list(unfi.set(revset, droots, pushop.fallbackheads)) if not outgoing.missing: future = fallback else: # adds changeset we are going to push as draft # # should not be necessary for publishing server, but because of an # issue fixed in xxxxx we have to do it anyway. fdroots = list(unfi.set('roots(%ln + %ln::)', outgoing.missing, droots)) fdroots = [f.node() for f in fdroots] future = list(unfi.set(revset, fdroots, pushop.futureheads)) pushop.outdatedphases = future pushop.fallbackoutdatedphases = fallback @pushdiscovery('obsmarker') def _pushdiscoveryobsmarkers(pushop): if (obsolete.isenabled(pushop.repo, obsolete.exchangeopt) and pushop.repo.obsstore and 'obsolete' in pushop.remote.listkeys('namespaces')): repo = pushop.repo # very naive computation, that can be quite expensive on big repo. # However: evolution is currently slow on them anyway. nodes = (c.node() for c in repo.set('::%ln', pushop.futureheads)) pushop.outobsmarkers = pushop.repo.obsstore.relevantmarkers(nodes) @pushdiscovery('bookmarks') def _pushdiscoverybookmarks(pushop): ui = pushop.ui repo = pushop.repo.unfiltered() remote = pushop.remote ui.debug("checking for updated bookmarks\n") ancestors = () if pushop.revs: revnums = map(repo.changelog.rev, pushop.revs) ancestors = repo.changelog.ancestors(revnums, inclusive=True) remotebookmark = remote.listkeys('bookmarks') explicit = set(pushop.bookmarks) comp = bookmod.compare(repo, repo._bookmarks, remotebookmark, srchex=hex) addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = comp for b, scid, dcid in advsrc: if b in explicit: explicit.remove(b) if not ancestors or repo[scid].rev() in ancestors: pushop.outbookmarks.append((b, dcid, scid)) # search added bookmark for b, scid, dcid in addsrc: if b in explicit: explicit.remove(b) pushop.outbookmarks.append((b, '', scid)) # search for overwritten bookmark for b, scid, dcid in advdst + diverge + differ: if b in explicit: explicit.remove(b) pushop.outbookmarks.append((b, dcid, scid)) # search for bookmark to delete for b, scid, dcid in adddst: if b in explicit: explicit.remove(b) # treat as "deleted locally" pushop.outbookmarks.append((b, dcid, '')) # identical bookmarks shouldn't get reported for b, scid, dcid in same: if b in explicit: explicit.remove(b) if explicit: explicit = sorted(explicit) # we should probably list all of them ui.warn(_('bookmark %s does not exist on the local ' 'or remote repository!\n') % explicit[0]) pushop.bkresult = 2 pushop.outbookmarks.sort() def _pushcheckoutgoing(pushop): outgoing = pushop.outgoing unfi = pushop.repo.unfiltered() if not outgoing.missing: # nothing to push scmutil.nochangesfound(unfi.ui, unfi, outgoing.excluded) return False # something to push if not pushop.force: # if repo.obsstore == False --> no obsolete # then, save the iteration if unfi.obsstore: # this message are here for 80 char limit reason mso = _("push includes obsolete changeset: %s!") mst = {"unstable": _("push includes unstable changeset: %s!"), "bumped": _("push includes bumped changeset: %s!"), "divergent": _("push includes divergent changeset: %s!")} # If we are to push if there is at least one # obsolete or unstable changeset in missing, at # least one of the missinghead will be obsolete or # unstable. So checking heads only is ok for node in outgoing.missingheads: ctx = unfi[node] if ctx.obsolete(): raise error.Abort(mso % ctx) elif ctx.troubled(): raise error.Abort(mst[ctx.troubles()[0]] % ctx) discovery.checkheads(pushop) return True # List of names of steps to perform for an outgoing bundle2, order matters. b2partsgenorder = [] # Mapping between step name and function # # This exists to help extensions wrap steps if necessary b2partsgenmapping = {} def b2partsgenerator(stepname, idx=None): """decorator for function generating bundle2 part The function is added to the step -> function mapping and appended to the list of steps. Beware that decorated functions will be added in order (this may matter). You can only use this decorator for new steps, if you want to wrap a step from an extension, attack the b2partsgenmapping dictionary directly.""" def dec(func): assert stepname not in b2partsgenmapping b2partsgenmapping[stepname] = func if idx is None: b2partsgenorder.append(stepname) else: b2partsgenorder.insert(idx, stepname) return func return dec def _pushb2ctxcheckheads(pushop, bundler): """Generate race condition checking parts Exists as an independent function to aid extensions """ if not pushop.force: bundler.newpart('check:heads', data=iter(pushop.remoteheads)) @b2partsgenerator('changeset') def _pushb2ctx(pushop, bundler): """handle changegroup push through bundle2 addchangegroup result is stored in the ``pushop.cgresult`` attribute. """ if 'changesets' in pushop.stepsdone: return pushop.stepsdone.add('changesets') # Send known heads to the server for race detection. if not _pushcheckoutgoing(pushop): return pushop.repo.prepushoutgoinghooks(pushop.repo, pushop.remote, pushop.outgoing) _pushb2ctxcheckheads(pushop, bundler) b2caps = bundle2.bundle2caps(pushop.remote) version = None cgversions = b2caps.get('changegroup') if not cgversions: # 3.1 and 3.2 ship with an empty value cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push', pushop.outgoing) else: cgversions = [v for v in cgversions if v in changegroup.supportedoutgoingversions( pushop.repo)] if not cgversions: raise ValueError(_('no common changegroup version')) version = max(cgversions) cg = changegroup.getlocalchangegroupraw(pushop.repo, 'push', pushop.outgoing, version=version) cgpart = bundler.newpart('changegroup', data=cg) if version is not None: cgpart.addparam('version', version) if 'treemanifest' in pushop.repo.requirements: cgpart.addparam('treemanifest', '1') def handlereply(op): """extract addchangegroup returns from server reply""" cgreplies = op.records.getreplies(cgpart.id) assert len(cgreplies['changegroup']) == 1 pushop.cgresult = cgreplies['changegroup'][0]['return'] return handlereply @b2partsgenerator('phase') def _pushb2phases(pushop, bundler): """handle phase push through bundle2""" if 'phases' in pushop.stepsdone: return b2caps = bundle2.bundle2caps(pushop.remote) if not 'pushkey' in b2caps: return pushop.stepsdone.add('phases') part2node = [] def handlefailure(pushop, exc): targetid = int(exc.partid) for partid, node in part2node: if partid == targetid: raise error.Abort(_('updating %s to public failed') % node) enc = pushkey.encode for newremotehead in pushop.outdatedphases: part = bundler.newpart('pushkey') part.addparam('namespace', enc('phases')) part.addparam('key', enc(newremotehead.hex())) part.addparam('old', enc(str(phases.draft))) part.addparam('new', enc(str(phases.public))) part2node.append((part.id, newremotehead)) pushop.pkfailcb[part.id] = handlefailure def handlereply(op): for partid, node in part2node: partrep = op.records.getreplies(partid) results = partrep['pushkey'] assert len(results) <= 1 msg = None if not results: msg = _('server ignored update of %s to public!\n') % node elif not int(results[0]['return']): msg = _('updating %s to public failed!\n') % node if msg is not None: pushop.ui.warn(msg) return handlereply @b2partsgenerator('obsmarkers') def _pushb2obsmarkers(pushop, bundler): if 'obsmarkers' in pushop.stepsdone: return remoteversions = bundle2.obsmarkersversion(bundler.capabilities) if obsolete.commonversion(remoteversions) is None: return pushop.stepsdone.add('obsmarkers') if pushop.outobsmarkers: markers = sorted(pushop.outobsmarkers) buildobsmarkerspart(bundler, markers) @b2partsgenerator('bookmarks') def _pushb2bookmarks(pushop, bundler): """handle bookmark push through bundle2""" if 'bookmarks' in pushop.stepsdone: return b2caps = bundle2.bundle2caps(pushop.remote) if 'pushkey' not in b2caps: return pushop.stepsdone.add('bookmarks') part2book = [] enc = pushkey.encode def handlefailure(pushop, exc): targetid = int(exc.partid) for partid, book, action in part2book: if partid == targetid: raise error.Abort(bookmsgmap[action][1].rstrip() % book) # we should not be called for part we did not generated assert False for book, old, new in pushop.outbookmarks: part = bundler.newpart('pushkey') part.addparam('namespace', enc('bookmarks')) part.addparam('key', enc(book)) part.addparam('old', enc(old)) part.addparam('new', enc(new)) action = 'update' if not old: action = 'export' elif not new: action = 'delete' part2book.append((part.id, book, action)) pushop.pkfailcb[part.id] = handlefailure def handlereply(op): ui = pushop.ui for partid, book, action in part2book: partrep = op.records.getreplies(partid) results = partrep['pushkey'] assert len(results) <= 1 if not results: pushop.ui.warn(_('server ignored bookmark %s update\n') % book) else: ret = int(results[0]['return']) if ret: ui.status(bookmsgmap[action][0] % book) else: ui.warn(bookmsgmap[action][1] % book) if pushop.bkresult is not None: pushop.bkresult = 1 return handlereply def _pushbundle2(pushop): """push data to the remote using bundle2 The only currently supported type of data is changegroup but this will evolve in the future.""" bundler = bundle2.bundle20(pushop.ui, bundle2.bundle2caps(pushop.remote)) pushback = (pushop.trmanager and pushop.ui.configbool('experimental', 'bundle2.pushback')) # create reply capability capsblob = bundle2.encodecaps(bundle2.getrepocaps(pushop.repo, allowpushback=pushback)) bundler.newpart('replycaps', data=capsblob) replyhandlers = [] for partgenname in b2partsgenorder: partgen = b2partsgenmapping[partgenname] ret = partgen(pushop, bundler) if callable(ret): replyhandlers.append(ret) # do not push if nothing to push if bundler.nbparts <= 1: return stream = util.chunkbuffer(bundler.getchunks()) try: try: reply = pushop.remote.unbundle(stream, ['force'], 'push') except error.BundleValueError as exc: raise error.Abort('missing support for %s' % exc) try: trgetter = None if pushback: trgetter = pushop.trmanager.transaction op = bundle2.processbundle(pushop.repo, reply, trgetter) except error.BundleValueError as exc: raise error.Abort('missing support for %s' % exc) except bundle2.AbortFromPart as exc: pushop.ui.status(_('remote: %s\n') % exc) raise error.Abort(_('push failed on remote'), hint=exc.hint) except error.PushkeyFailed as exc: partid = int(exc.partid) if partid not in pushop.pkfailcb: raise pushop.pkfailcb[partid](pushop, exc) for rephand in replyhandlers: rephand(op) def _pushchangeset(pushop): """Make the actual push of changeset bundle to remote repo""" if 'changesets' in pushop.stepsdone: return pushop.stepsdone.add('changesets') if not _pushcheckoutgoing(pushop): return pushop.repo.prepushoutgoinghooks(pushop.repo, pushop.remote, pushop.outgoing) outgoing = pushop.outgoing unbundle = pushop.remote.capable('unbundle') # TODO: get bundlecaps from remote bundlecaps = None # create a changegroup from local if pushop.revs is None and not (outgoing.excluded or pushop.repo.changelog.filteredrevs): # push everything, # use the fast path, no race possible on push bundler = changegroup.cg1packer(pushop.repo, bundlecaps) cg = changegroup.getsubset(pushop.repo, outgoing, bundler, 'push', fastpath=True) else: cg = changegroup.getlocalchangegroup(pushop.repo, 'push', outgoing, bundlecaps) # apply changegroup to remote if unbundle: # local repo finds heads on server, finds out what # revs it must push. once revs transferred, if server # finds it has different heads (someone else won # commit/push race), server aborts. if pushop.force: remoteheads = ['force'] else: remoteheads = pushop.remoteheads # ssh: return remote's addchangegroup() # http: return remote's addchangegroup() or 0 for error pushop.cgresult = pushop.remote.unbundle(cg, remoteheads, pushop.repo.url()) else: # we return an integer indicating remote head count # change pushop.cgresult = pushop.remote.addchangegroup(cg, 'push', pushop.repo.url()) def _pushsyncphase(pushop): """synchronise phase information locally and remotely""" cheads = pushop.commonheads # even when we don't push, exchanging phase data is useful remotephases = pushop.remote.listkeys('phases') if (pushop.ui.configbool('ui', '_usedassubrepo', False) and remotephases # server supports phases and pushop.cgresult is None # nothing was pushed and remotephases.get('publishing', False)): # When: # - this is a subrepo push # - and remote support phase # - and no changeset was pushed # - and remote is publishing # We may be in issue 3871 case! # We drop the possible phase synchronisation done by # courtesy to publish changesets possibly locally draft # on the remote. remotephases = {'publishing': 'True'} if not remotephases: # old server or public only reply from non-publishing _localphasemove(pushop, cheads) # don't push any phase data as there is nothing to push else: ana = phases.analyzeremotephases(pushop.repo, cheads, remotephases) pheads, droots = ana ### Apply remote phase on local if remotephases.get('publishing', False): _localphasemove(pushop, cheads) else: # publish = False _localphasemove(pushop, pheads) _localphasemove(pushop, cheads, phases.draft) ### Apply local phase on remote if pushop.cgresult: if 'phases' in pushop.stepsdone: # phases already pushed though bundle2 return outdated = pushop.outdatedphases else: outdated = pushop.fallbackoutdatedphases pushop.stepsdone.add('phases') # filter heads already turned public by the push outdated = [c for c in outdated if c.node() not in pheads] # fallback to independent pushkey command for newremotehead in outdated: r = pushop.remote.pushkey('phases', newremotehead.hex(), str(phases.draft), str(phases.public)) if not r: pushop.ui.warn(_('updating %s to public failed!\n') % newremotehead) def _localphasemove(pushop, nodes, phase=phases.public): """move to in the local source repo""" if pushop.trmanager: phases.advanceboundary(pushop.repo, pushop.trmanager.transaction(), phase, nodes) else: # repo is not locked, do not change any phases! # Informs the user that phases should have been moved when # applicable. actualmoves = [n for n in nodes if phase < pushop.repo[n].phase()] phasestr = phases.phasenames[phase] if actualmoves: pushop.ui.status(_('cannot lock source repo, skipping ' 'local %s phase update\n') % phasestr) def _pushobsolete(pushop): """utility function to push obsolete markers to a remote""" if 'obsmarkers' in pushop.stepsdone: return repo = pushop.repo remote = pushop.remote pushop.stepsdone.add('obsmarkers') if pushop.outobsmarkers: pushop.ui.debug('try to push obsolete markers to remote\n') rslts = [] remotedata = obsolete._pushkeyescape(sorted(pushop.outobsmarkers)) for key in sorted(remotedata, reverse=True): # reverse sort to ensure we end with dump0 data = remotedata[key] rslts.append(remote.pushkey('obsolete', key, '', data)) if [r for r in rslts if not r]: msg = _('failed to push some obsolete markers!\n') repo.ui.warn(msg) def _pushbookmark(pushop): """Update bookmark position on remote""" if pushop.cgresult == 0 or 'bookmarks' in pushop.stepsdone: return pushop.stepsdone.add('bookmarks') ui = pushop.ui remote = pushop.remote for b, old, new in pushop.outbookmarks: action = 'update' if not old: action = 'export' elif not new: action = 'delete' if remote.pushkey('bookmarks', b, old, new): ui.status(bookmsgmap[action][0] % b) else: ui.warn(bookmsgmap[action][1] % b) # discovery can have set the value form invalid entry if pushop.bkresult is not None: pushop.bkresult = 1 class pulloperation(object): """A object that represent a single pull operation It purpose is to carry pull related state and very common operation. A new should be created at the beginning of each pull and discarded afterward. """ def __init__(self, repo, remote, heads=None, force=False, bookmarks=(), remotebookmarks=None, streamclonerequested=None): # repo we pull into self.repo = repo # repo we pull from self.remote = remote # revision we try to pull (None is "all") self.heads = heads # bookmark pulled explicitly self.explicitbookmarks = bookmarks # do we force pull? self.force = force # whether a streaming clone was requested self.streamclonerequested = streamclonerequested # transaction manager self.trmanager = None # set of common changeset between local and remote before pull self.common = None # set of pulled head self.rheads = None # list of missing changeset to fetch remotely self.fetch = None # remote bookmarks data self.remotebookmarks = remotebookmarks # result of changegroup pulling (used as return code by pull) self.cgresult = None # list of step already done self.stepsdone = set() # Whether we attempted a clone from pre-generated bundles. self.clonebundleattempted = False @util.propertycache def pulledsubset(self): """heads of the set of changeset target by the pull""" # compute target subset if self.heads is None: # We pulled every thing possible # sync on everything common c = set(self.common) ret = list(self.common) for n in self.rheads: if n not in c: ret.append(n) return ret else: # We pulled a specific subset # sync on this subset return self.heads @util.propertycache def canusebundle2(self): return _canusebundle2(self) @util.propertycache def remotebundle2caps(self): return bundle2.bundle2caps(self.remote) def gettransaction(self): # deprecated; talk to trmanager directly return self.trmanager.transaction() class transactionmanager(object): """An object to manage the life cycle of a transaction It creates the transaction on demand and calls the appropriate hooks when closing the transaction.""" def __init__(self, repo, source, url): self.repo = repo self.source = source self.url = url self._tr = None def transaction(self): """Return an open transaction object, constructing if necessary""" if not self._tr: trname = '%s\n%s' % (self.source, util.hidepassword(self.url)) self._tr = self.repo.transaction(trname) self._tr.hookargs['source'] = self.source self._tr.hookargs['url'] = self.url return self._tr def close(self): """close transaction if created""" if self._tr is not None: self._tr.close() def release(self): """release transaction if created""" if self._tr is not None: self._tr.release() def pull(repo, remote, heads=None, force=False, bookmarks=(), opargs=None, streamclonerequested=None): """Fetch repository data from a remote. This is the main function used to retrieve data from a remote repository. ``repo`` is the local repository to clone into. ``remote`` is a peer instance. ``heads`` is an iterable of revisions we want to pull. ``None`` (the default) means to pull everything from the remote. ``bookmarks`` is an iterable of bookmarks requesting to be pulled. By default, all remote bookmarks are pulled. ``opargs`` are additional keyword arguments to pass to ``pulloperation`` initialization. ``streamclonerequested`` is a boolean indicating whether a "streaming clone" is requested. A "streaming clone" is essentially a raw file copy of revlogs from the server. This only works when the local repository is empty. The default value of ``None`` means to respect the server configuration for preferring stream clones. Returns the ``pulloperation`` created for this pull. """ if opargs is None: opargs = {} pullop = pulloperation(repo, remote, heads, force, bookmarks=bookmarks, streamclonerequested=streamclonerequested, **opargs) if pullop.remote.local(): missing = set(pullop.remote.requirements) - pullop.repo.supported if missing: msg = _("required features are not" " supported in the destination:" " %s") % (', '.join(sorted(missing))) raise error.Abort(msg) lock = pullop.repo.lock() try: pullop.trmanager = transactionmanager(repo, 'pull', remote.url()) streamclone.maybeperformlegacystreamclone(pullop) # This should ideally be in _pullbundle2(). However, it needs to run # before discovery to avoid extra work. _maybeapplyclonebundle(pullop) _pulldiscovery(pullop) if pullop.canusebundle2: _pullbundle2(pullop) _pullchangeset(pullop) _pullphase(pullop) _pullbookmarks(pullop) _pullobsolete(pullop) pullop.trmanager.close() finally: pullop.trmanager.release() lock.release() return pullop # list of steps to perform discovery before pull pulldiscoveryorder = [] # Mapping between step name and function # # This exists to help extensions wrap steps if necessary pulldiscoverymapping = {} def pulldiscovery(stepname): """decorator for function performing discovery before pull The function is added to the step -> function mapping and appended to the list of steps. Beware that decorated function will be added in order (this may matter). You can only use this decorator for a new step, if you want to wrap a step from an extension, change the pulldiscovery dictionary directly.""" def dec(func): assert stepname not in pulldiscoverymapping pulldiscoverymapping[stepname] = func pulldiscoveryorder.append(stepname) return func return dec def _pulldiscovery(pullop): """Run all discovery steps""" for stepname in pulldiscoveryorder: step = pulldiscoverymapping[stepname] step(pullop) @pulldiscovery('b1:bookmarks') def _pullbookmarkbundle1(pullop): """fetch bookmark data in bundle1 case If not using bundle2, we have to fetch bookmarks before changeset discovery to reduce the chance and impact of race conditions.""" if pullop.remotebookmarks is not None: return if pullop.canusebundle2 and 'listkeys' in pullop.remotebundle2caps: # all known bundle2 servers now support listkeys, but lets be nice with # new implementation. return pullop.remotebookmarks = pullop.remote.listkeys('bookmarks') @pulldiscovery('changegroup') def _pulldiscoverychangegroup(pullop): """discovery phase for the pull Current handle changeset discovery only, will change handle all discovery at some point.""" tmp = discovery.findcommonincoming(pullop.repo, pullop.remote, heads=pullop.heads, force=pullop.force) common, fetch, rheads = tmp nm = pullop.repo.unfiltered().changelog.nodemap if fetch and rheads: # If a remote heads in filtered locally, lets drop it from the unknown # remote heads and put in back in common. # # This is a hackish solution to catch most of "common but locally # hidden situation". We do not performs discovery on unfiltered # repository because it end up doing a pathological amount of round # trip for w huge amount of changeset we do not care about. # # If a set of such "common but filtered" changeset exist on the server # but are not including a remote heads, we'll not be able to detect it, scommon = set(common) filteredrheads = [] for n in rheads: if n in nm: if n not in scommon: common.append(n) else: filteredrheads.append(n) if not filteredrheads: fetch = [] rheads = filteredrheads pullop.common = common pullop.fetch = fetch pullop.rheads = rheads def _pullbundle2(pullop): """pull data using bundle2 For now, the only supported data are changegroup.""" kwargs = {'bundlecaps': caps20to10(pullop.repo)} streaming, streamreqs = streamclone.canperformstreamclone(pullop) # pulling changegroup pullop.stepsdone.add('changegroup') kwargs['common'] = pullop.common kwargs['heads'] = pullop.heads or pullop.rheads kwargs['cg'] = pullop.fetch if 'listkeys' in pullop.remotebundle2caps: kwargs['listkeys'] = ['phase'] if pullop.remotebookmarks is None: # make sure to always includes bookmark data when migrating # `hg incoming --bundle` to using this function. kwargs['listkeys'].append('bookmarks') # If this is a full pull / clone and the server supports the clone bundles # feature, tell the server whether we attempted a clone bundle. The # presence of this flag indicates the client supports clone bundles. This # will enable the server to treat clients that support clone bundles # differently from those that don't. if (pullop.remote.capable('clonebundles') and pullop.heads is None and list(pullop.common) == [nullid]): kwargs['cbattempted'] = pullop.clonebundleattempted if streaming: pullop.repo.ui.status(_('streaming all changes\n')) elif not pullop.fetch: pullop.repo.ui.status(_("no changes found\n")) pullop.cgresult = 0 else: if pullop.heads is None and list(pullop.common) == [nullid]: pullop.repo.ui.status(_("requesting all changes\n")) if obsolete.isenabled(pullop.repo, obsolete.exchangeopt): remoteversions = bundle2.obsmarkersversion(pullop.remotebundle2caps) if obsolete.commonversion(remoteversions) is not None: kwargs['obsmarkers'] = True pullop.stepsdone.add('obsmarkers') _pullbundle2extraprepare(pullop, kwargs) bundle = pullop.remote.getbundle('pull', **kwargs) try: op = bundle2.processbundle(pullop.repo, bundle, pullop.gettransaction) except error.BundleValueError as exc: raise error.Abort('missing support for %s' % exc) if pullop.fetch: results = [cg['return'] for cg in op.records['changegroup']] pullop.cgresult = changegroup.combineresults(results) # processing phases change for namespace, value in op.records['listkeys']: if namespace == 'phases': _pullapplyphases(pullop, value) # processing bookmark update for namespace, value in op.records['listkeys']: if namespace == 'bookmarks': pullop.remotebookmarks = value # bookmark data were either already there or pulled in the bundle if pullop.remotebookmarks is not None: _pullbookmarks(pullop) def _pullbundle2extraprepare(pullop, kwargs): """hook function so that extensions can extend the getbundle call""" pass def _pullchangeset(pullop): """pull changeset from unbundle into the local repo""" # We delay the open of the transaction as late as possible so we # don't open transaction for nothing or you break future useful # rollback call if 'changegroup' in pullop.stepsdone: return pullop.stepsdone.add('changegroup') if not pullop.fetch: pullop.repo.ui.status(_("no changes found\n")) pullop.cgresult = 0 return pullop.gettransaction() if pullop.heads is None and list(pullop.common) == [nullid]: pullop.repo.ui.status(_("requesting all changes\n")) elif pullop.heads is None and pullop.remote.capable('changegroupsubset'): # issue1320, avoid a race if remote changed after discovery pullop.heads = pullop.rheads if pullop.remote.capable('getbundle'): # TODO: get bundlecaps from remote cg = pullop.remote.getbundle('pull', common=pullop.common, heads=pullop.heads or pullop.rheads) elif pullop.heads is None: cg = pullop.remote.changegroup(pullop.fetch, 'pull') elif not pullop.remote.capable('changegroupsubset'): raise error.Abort(_("partial pull cannot be done because " "other repository doesn't support " "changegroupsubset.")) else: cg = pullop.remote.changegroupsubset(pullop.fetch, pullop.heads, 'pull') pullop.cgresult = cg.apply(pullop.repo, 'pull', pullop.remote.url()) def _pullphase(pullop): # Get remote phases data from remote if 'phases' in pullop.stepsdone: return remotephases = pullop.remote.listkeys('phases') _pullapplyphases(pullop, remotephases) def _pullapplyphases(pullop, remotephases): """apply phase movement from observed remote state""" if 'phases' in pullop.stepsdone: return pullop.stepsdone.add('phases') publishing = bool(remotephases.get('publishing', False)) if remotephases and not publishing: # remote is new and unpublishing pheads, _dr = phases.analyzeremotephases(pullop.repo, pullop.pulledsubset, remotephases) dheads = pullop.pulledsubset else: # Remote is old or publishing all common changesets # should be seen as public pheads = pullop.pulledsubset dheads = [] unfi = pullop.repo.unfiltered() phase = unfi._phasecache.phase rev = unfi.changelog.nodemap.get public = phases.public draft = phases.draft # exclude changesets already public locally and update the others pheads = [pn for pn in pheads if phase(unfi, rev(pn)) > public] if pheads: tr = pullop.gettransaction() phases.advanceboundary(pullop.repo, tr, public, pheads) # exclude changesets already draft locally and update the others dheads = [pn for pn in dheads if phase(unfi, rev(pn)) > draft] if dheads: tr = pullop.gettransaction() phases.advanceboundary(pullop.repo, tr, draft, dheads) def _pullbookmarks(pullop): """process the remote bookmark information to update the local one""" if 'bookmarks' in pullop.stepsdone: return pullop.stepsdone.add('bookmarks') repo = pullop.repo remotebookmarks = pullop.remotebookmarks bookmod.updatefromremote(repo.ui, repo, remotebookmarks, pullop.remote.url(), pullop.gettransaction, explicit=pullop.explicitbookmarks) def _pullobsolete(pullop): """utility function to pull obsolete markers from a remote The `gettransaction` is function that return the pull transaction, creating one if necessary. We return the transaction to inform the calling code that a new transaction have been created (when applicable). Exists mostly to allow overriding for experimentation purpose""" if 'obsmarkers' in pullop.stepsdone: return pullop.stepsdone.add('obsmarkers') tr = None if obsolete.isenabled(pullop.repo, obsolete.exchangeopt): pullop.repo.ui.debug('fetching remote obsolete markers\n') remoteobs = pullop.remote.listkeys('obsolete') if 'dump0' in remoteobs: tr = pullop.gettransaction() markers = [] for key in sorted(remoteobs, reverse=True): if key.startswith('dump'): data = base85.b85decode(remoteobs[key]) version, newmarks = obsolete._readmarkers(data) markers += newmarks if markers: pullop.repo.obsstore.add(tr, markers) pullop.repo.invalidatevolatilesets() return tr def caps20to10(repo): """return a set with appropriate options to use bundle20 during getbundle""" caps = set(['HG20']) capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo)) caps.add('bundle2=' + urllib.quote(capsblob)) return caps # List of names of steps to perform for a bundle2 for getbundle, order matters. getbundle2partsorder = [] # Mapping between step name and function # # This exists to help extensions wrap steps if necessary getbundle2partsmapping = {} def getbundle2partsgenerator(stepname, idx=None): """decorator for function generating bundle2 part for getbundle The function is added to the step -> function mapping and appended to the list of steps. Beware that decorated functions will be added in order (this may matter). You can only use this decorator for new steps, if you want to wrap a step from an extension, attack the getbundle2partsmapping dictionary directly.""" def dec(func): assert stepname not in getbundle2partsmapping getbundle2partsmapping[stepname] = func if idx is None: getbundle2partsorder.append(stepname) else: getbundle2partsorder.insert(idx, stepname) return func return dec def bundle2requested(bundlecaps): if bundlecaps is not None: return any(cap.startswith('HG2') for cap in bundlecaps) return False def getbundle(repo, source, heads=None, common=None, bundlecaps=None, **kwargs): """return a full bundle (with potentially multiple kind of parts) Could be a bundle HG10 or a bundle HG20 depending on bundlecaps passed. For now, the bundle can contain only changegroup, but this will changes when more part type will be available for bundle2. This is different from changegroup.getchangegroup that only returns an HG10 changegroup bundle. They may eventually get reunited in the future when we have a clearer idea of the API we what to query different data. The implementation is at a very early stage and will get massive rework when the API of bundle is refined. """ usebundle2 = bundle2requested(bundlecaps) # bundle10 case if not usebundle2: if bundlecaps and not kwargs.get('cg', True): raise ValueError(_('request for bundle10 must include changegroup')) if kwargs: raise ValueError(_('unsupported getbundle arguments: %s') % ', '.join(sorted(kwargs.keys()))) return changegroup.getchangegroup(repo, source, heads=heads, common=common, bundlecaps=bundlecaps) # bundle20 case b2caps = {} for bcaps in bundlecaps: if bcaps.startswith('bundle2='): blob = urllib.unquote(bcaps[len('bundle2='):]) b2caps.update(bundle2.decodecaps(blob)) bundler = bundle2.bundle20(repo.ui, b2caps) kwargs['heads'] = heads kwargs['common'] = common for name in getbundle2partsorder: func = getbundle2partsmapping[name] func(bundler, repo, source, bundlecaps=bundlecaps, b2caps=b2caps, **kwargs) return util.chunkbuffer(bundler.getchunks()) @getbundle2partsgenerator('changegroup') def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, common=None, **kwargs): """add a changegroup part to the requested bundle""" cg = None if kwargs.get('cg', True): # build changegroup bundle here. version = None cgversions = b2caps.get('changegroup') getcgkwargs = {} if cgversions: # 3.1 and 3.2 ship with an empty value cgversions = [v for v in cgversions if v in changegroup.supportedoutgoingversions(repo)] if not cgversions: raise ValueError(_('no common changegroup version')) version = getcgkwargs['version'] = max(cgversions) outgoing = changegroup.computeoutgoing(repo, heads, common) cg = changegroup.getlocalchangegroupraw(repo, source, outgoing, bundlecaps=bundlecaps, **getcgkwargs) if cg: part = bundler.newpart('changegroup', data=cg) if version is not None: part.addparam('version', version) part.addparam('nbchanges', str(len(outgoing.missing)), mandatory=False) if 'treemanifest' in repo.requirements: part.addparam('treemanifest', '1') @getbundle2partsgenerator('listkeys') def _getbundlelistkeysparts(bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs): """add parts containing listkeys namespaces to the requested bundle""" listkeys = kwargs.get('listkeys', ()) for namespace in listkeys: part = bundler.newpart('listkeys') part.addparam('namespace', namespace) keys = repo.listkeys(namespace).items() part.data = pushkey.encodekeys(keys) @getbundle2partsgenerator('obsmarkers') def _getbundleobsmarkerpart(bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, **kwargs): """add an obsolescence markers part to the requested bundle""" if kwargs.get('obsmarkers', False): if heads is None: heads = repo.heads() subset = [c.node() for c in repo.set('::%ln', heads)] markers = repo.obsstore.relevantmarkers(subset) markers = sorted(markers) buildobsmarkerspart(bundler, markers) @getbundle2partsgenerator('hgtagsfnodes') def _getbundletagsfnodes(bundler, repo, source, bundlecaps=None, b2caps=None, heads=None, common=None, **kwargs): """Transfer the .hgtags filenodes mapping. Only values for heads in this bundle will be transferred. The part data consists of pairs of 20 byte changeset node and .hgtags filenodes raw values. """ # Don't send unless: # - changeset are being exchanged, # - the client supports it. if not (kwargs.get('cg', True) and 'hgtagsfnodes' in b2caps): return outgoing = changegroup.computeoutgoing(repo, heads, common) if not outgoing.missingheads: return cache = tags.hgtagsfnodescache(repo.unfiltered()) chunks = [] # .hgtags fnodes are only relevant for head changesets. While we could # transfer values for all known nodes, there will likely be little to # no benefit. # # We don't bother using a generator to produce output data because # a) we only have 40 bytes per head and even esoteric numbers of heads # consume little memory (1M heads is 40MB) b) we don't want to send the # part if we don't have entries and knowing if we have entries requires # cache lookups. for node in outgoing.missingheads: # Don't compute missing, as this may slow down serving. fnode = cache.getfnode(node, computemissing=False) if fnode is not None: chunks.extend([node, fnode]) if chunks: bundler.newpart('hgtagsfnodes', data=''.join(chunks)) def check_heads(repo, their_heads, context): """check if the heads of a repo have been modified Used by peer for unbundling. """ heads = repo.heads() heads_hash = util.sha1(''.join(sorted(heads))).digest() if not (their_heads == ['force'] or their_heads == heads or their_heads == ['hashed', heads_hash]): # someone else committed/pushed/unbundled while we # were transferring data raise error.PushRaced('repository changed while %s - ' 'please try again' % context) def unbundle(repo, cg, heads, source, url): """Apply a bundle to a repo. this function makes sure the repo is locked during the application and have mechanism to check that no push race occurred between the creation of the bundle and its application. If the push was raced as PushRaced exception is raised.""" r = 0 # need a transaction when processing a bundle2 stream # [wlock, lock, tr] - needs to be an array so nested functions can modify it lockandtr = [None, None, None] recordout = None # quick fix for output mismatch with bundle2 in 3.4 captureoutput = repo.ui.configbool('experimental', 'bundle2-output-capture', False) if url.startswith('remote:http:') or url.startswith('remote:https:'): captureoutput = True try: check_heads(repo, heads, 'uploading changes') # push can proceed if util.safehasattr(cg, 'params'): r = None try: def gettransaction(): if not lockandtr[2]: lockandtr[0] = repo.wlock() lockandtr[1] = repo.lock() lockandtr[2] = repo.transaction(source) lockandtr[2].hookargs['source'] = source lockandtr[2].hookargs['url'] = url lockandtr[2].hookargs['bundle2'] = '1' return lockandtr[2] # Do greedy locking by default until we're satisfied with lazy # locking. if not repo.ui.configbool('experimental', 'bundle2lazylocking'): gettransaction() op = bundle2.bundleoperation(repo, gettransaction, captureoutput=captureoutput) try: op = bundle2.processbundle(repo, cg, op=op) finally: r = op.reply if captureoutput and r is not None: repo.ui.pushbuffer(error=True, subproc=True) def recordout(output): r.newpart('output', data=output, mandatory=False) if lockandtr[2] is not None: lockandtr[2].close() except BaseException as exc: exc.duringunbundle2 = True if captureoutput and r is not None: parts = exc._bundle2salvagedoutput = r.salvageoutput() def recordout(output): part = bundle2.bundlepart('output', data=output, mandatory=False) parts.append(part) raise else: lockandtr[1] = repo.lock() r = cg.apply(repo, source, url) finally: lockmod.release(lockandtr[2], lockandtr[1], lockandtr[0]) if recordout is not None: recordout(repo.ui.popbuffer()) return r def _maybeapplyclonebundle(pullop): """Apply a clone bundle from a remote, if possible.""" repo = pullop.repo remote = pullop.remote if not repo.ui.configbool('ui', 'clonebundles', True): return # Only run if local repo is empty. if len(repo): return if pullop.heads: return if not remote.capable('clonebundles'): return res = remote._call('clonebundles') # If we call the wire protocol command, that's good enough to record the # attempt. pullop.clonebundleattempted = True entries = parseclonebundlesmanifest(repo, res) if not entries: repo.ui.note(_('no clone bundles available on remote; ' 'falling back to regular clone\n')) return entries = filterclonebundleentries(repo, entries) if not entries: # There is a thundering herd concern here. However, if a server # operator doesn't advertise bundles appropriate for its clients, # they deserve what's coming. Furthermore, from a client's # perspective, no automatic fallback would mean not being able to # clone! repo.ui.warn(_('no compatible clone bundles available on server; ' 'falling back to regular clone\n')) repo.ui.warn(_('(you may want to report this to the server ' 'operator)\n')) return entries = sortclonebundleentries(repo.ui, entries) url = entries[0]['URL'] repo.ui.status(_('applying clone bundle from %s\n') % url) if trypullbundlefromurl(repo.ui, repo, url): repo.ui.status(_('finished applying clone bundle\n')) # Bundle failed. # # We abort by default to avoid the thundering herd of # clients flooding a server that was expecting expensive # clone load to be offloaded. elif repo.ui.configbool('ui', 'clonebundlefallback', False): repo.ui.warn(_('falling back to normal clone\n')) else: raise error.Abort(_('error applying bundle'), hint=_('if this error persists, consider contacting ' 'the server operator or disable clone ' 'bundles via ' '"--config ui.clonebundles=false"')) def parseclonebundlesmanifest(repo, s): """Parses the raw text of a clone bundles manifest. Returns a list of dicts. The dicts have a ``URL`` key corresponding to the URL and other keys are the attributes for the entry. """ m = [] for line in s.splitlines(): fields = line.split() if not fields: continue attrs = {'URL': fields[0]} for rawattr in fields[1:]: key, value = rawattr.split('=', 1) key = urllib.unquote(key) value = urllib.unquote(value) attrs[key] = value # Parse BUNDLESPEC into components. This makes client-side # preferences easier to specify since you can prefer a single # component of the BUNDLESPEC. if key == 'BUNDLESPEC': try: comp, version, params = parsebundlespec(repo, value, externalnames=True) attrs['COMPRESSION'] = comp attrs['VERSION'] = version except error.InvalidBundleSpecification: pass except error.UnsupportedBundleSpecification: pass m.append(attrs) return m def filterclonebundleentries(repo, entries): """Remove incompatible clone bundle manifest entries. Accepts a list of entries parsed with ``parseclonebundlesmanifest`` and returns a new list consisting of only the entries that this client should be able to apply. There is no guarantee we'll be able to apply all returned entries because the metadata we use to filter on may be missing or wrong. """ newentries = [] for entry in entries: spec = entry.get('BUNDLESPEC') if spec: try: parsebundlespec(repo, spec, strict=True) except error.InvalidBundleSpecification as e: repo.ui.debug(str(e) + '\n') continue except error.UnsupportedBundleSpecification as e: repo.ui.debug('filtering %s because unsupported bundle ' 'spec: %s\n' % (entry['URL'], str(e))) continue if 'REQUIRESNI' in entry and not sslutil.hassni: repo.ui.debug('filtering %s because SNI not supported\n' % entry['URL']) continue newentries.append(entry) return newentries def sortclonebundleentries(ui, entries): prefers = ui.configlist('ui', 'clonebundleprefers', default=[]) if not prefers: return list(entries) prefers = [p.split('=', 1) for p in prefers] # Our sort function. def compareentry(a, b): for prefkey, prefvalue in prefers: avalue = a.get(prefkey) bvalue = b.get(prefkey) # Special case for b missing attribute and a matches exactly. if avalue is not None and bvalue is None and avalue == prefvalue: return -1 # Special case for a missing attribute and b matches exactly. if bvalue is not None and avalue is None and bvalue == prefvalue: return 1 # We can't compare unless attribute present on both. if avalue is None or bvalue is None: continue # Same values should fall back to next attribute. if avalue == bvalue: continue # Exact matches come first. if avalue == prefvalue: return -1 if bvalue == prefvalue: return 1 # Fall back to next attribute. continue # If we got here we couldn't sort by attributes and prefers. Fall # back to index order. return 0 return sorted(entries, cmp=compareentry) def trypullbundlefromurl(ui, repo, url): """Attempt to apply a bundle from a URL.""" lock = repo.lock() try: tr = repo.transaction('bundleurl') try: try: fh = urlmod.open(ui, url) cg = readbundle(ui, fh, 'stream') if isinstance(cg, bundle2.unbundle20): bundle2.processbundle(repo, cg, lambda: tr) elif isinstance(cg, streamclone.streamcloneapplier): cg.apply(repo) else: cg.apply(repo, 'clonebundles', url) tr.close() return True except urllib2.HTTPError as e: ui.warn(_('HTTP error fetching bundle: %s\n') % str(e)) except urllib2.URLError as e: ui.warn(_('error fetching bundle: %s\n') % e.reason[1]) return False finally: tr.release() finally: lock.release() mercurial-3.7.3/mercurial/dispatch.py0000644000175000017500000011160712676531525017271 0ustar mpmmpm00000000000000# dispatch.py - command dispatching for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import, print_function import atexit import difflib import errno import os import pdb import re import shlex import signal import socket import sys import time import traceback from .i18n import _ from . import ( cmdutil, commands, demandimport, encoding, error, extensions, fancyopts, hg, hook, ui as uimod, util, ) class request(object): def __init__(self, args, ui=None, repo=None, fin=None, fout=None, ferr=None): self.args = args self.ui = ui self.repo = repo # input/output/error streams self.fin = fin self.fout = fout self.ferr = ferr def run(): "run the command in sys.argv" sys.exit((dispatch(request(sys.argv[1:])) or 0) & 255) def _getsimilar(symbols, value): sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio() # The cutoff for similarity here is pretty arbitrary. It should # probably be investigated and tweaked. return [s for s in symbols if sim(s) > 0.6] def _reportsimilar(write, similar): if len(similar) == 1: write(_("(did you mean %s?)\n") % similar[0]) elif similar: ss = ", ".join(sorted(similar)) write(_("(did you mean one of %s?)\n") % ss) def _formatparse(write, inst): similar = [] if isinstance(inst, error.UnknownIdentifier): # make sure to check fileset first, as revset can invoke fileset similar = _getsimilar(inst.symbols, inst.function) if len(inst.args) > 1: write(_("hg: parse error at %s: %s\n") % (inst.args[1], inst.args[0])) if (inst.args[0][0] == ' '): write(_("unexpected leading whitespace\n")) else: write(_("hg: parse error: %s\n") % inst.args[0]) _reportsimilar(write, similar) def dispatch(req): "run the command specified in req.args" if req.ferr: ferr = req.ferr elif req.ui: ferr = req.ui.ferr else: ferr = sys.stderr try: if not req.ui: req.ui = uimod.ui() if '--traceback' in req.args: req.ui.setconfig('ui', 'traceback', 'on', '--traceback') # set ui streams from the request if req.fin: req.ui.fin = req.fin if req.fout: req.ui.fout = req.fout if req.ferr: req.ui.ferr = req.ferr except error.Abort as inst: ferr.write(_("abort: %s\n") % inst) if inst.hint: ferr.write(_("(%s)\n") % inst.hint) return -1 except error.ParseError as inst: _formatparse(ferr.write, inst) if inst.hint: ferr.write(_("(%s)\n") % inst.hint) return -1 msg = ' '.join(' ' in a and repr(a) or a for a in req.args) starttime = time.time() ret = None try: ret = _runcatch(req) return ret finally: duration = time.time() - starttime req.ui.log("commandfinish", "%s exited %s after %0.2f seconds\n", msg, ret or 0, duration) def _runcatch(req): def catchterm(*args): raise error.SignalInterrupt ui = req.ui try: for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM': num = getattr(signal, name, None) if num: signal.signal(num, catchterm) except ValueError: pass # happens if called in a thread try: try: debugger = 'pdb' debugtrace = { 'pdb' : pdb.set_trace } debugmortem = { 'pdb' : pdb.post_mortem } # read --config before doing anything else # (e.g. to change trust settings for reading .hg/hgrc) cfgs = _parseconfig(req.ui, _earlygetopt(['--config'], req.args)) if req.repo: # copy configs that were passed on the cmdline (--config) to # the repo ui for sec, name, val in cfgs: req.repo.ui.setconfig(sec, name, val, source='--config') # developer config: ui.debugger debugger = ui.config("ui", "debugger") debugmod = pdb if not debugger or ui.plain(): # if we are in HGPLAIN mode, then disable custom debugging debugger = 'pdb' elif '--debugger' in req.args: # This import can be slow for fancy debuggers, so only # do it when absolutely necessary, i.e. when actual # debugging has been requested with demandimport.deactivated(): try: debugmod = __import__(debugger) except ImportError: pass # Leave debugmod = pdb debugtrace[debugger] = debugmod.set_trace debugmortem[debugger] = debugmod.post_mortem # enter the debugger before command execution if '--debugger' in req.args: ui.warn(_("entering debugger - " "type c to continue starting hg or h for help\n")) if (debugger != 'pdb' and debugtrace[debugger] == debugtrace['pdb']): ui.warn(_("%s debugger specified " "but its module was not found\n") % debugger) with demandimport.deactivated(): debugtrace[debugger]() try: return _dispatch(req) finally: ui.flush() except: # re-raises # enter the debugger when we hit an exception if '--debugger' in req.args: traceback.print_exc() debugmortem[debugger](sys.exc_info()[2]) ui.traceback() raise # Global exception handling, alphabetically # Mercurial-specific first, followed by built-in and library exceptions except error.AmbiguousCommand as inst: ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") % (inst.args[0], " ".join(inst.args[1]))) except error.ParseError as inst: _formatparse(ui.warn, inst) if inst.hint: ui.warn(_("(%s)\n") % inst.hint) return -1 except error.LockHeld as inst: if inst.errno == errno.ETIMEDOUT: reason = _('timed out waiting for lock held by %s') % inst.locker else: reason = _('lock held by %s') % inst.locker ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason)) except error.LockUnavailable as inst: ui.warn(_("abort: could not lock %s: %s\n") % (inst.desc or inst.filename, inst.strerror)) except error.CommandError as inst: if inst.args[0]: ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1])) commands.help_(ui, inst.args[0], full=False, command=True) else: ui.warn(_("hg: %s\n") % inst.args[1]) commands.help_(ui, 'shortlist') except error.OutOfBandError as inst: if inst.args: msg = _("abort: remote error:\n") else: msg = _("abort: remote error\n") ui.warn(msg) if inst.args: ui.warn(''.join(inst.args)) if inst.hint: ui.warn('(%s)\n' % inst.hint) except error.RepoError as inst: ui.warn(_("abort: %s!\n") % inst) if inst.hint: ui.warn(_("(%s)\n") % inst.hint) except error.ResponseError as inst: ui.warn(_("abort: %s") % inst.args[0]) if not isinstance(inst.args[1], basestring): ui.warn(" %r\n" % (inst.args[1],)) elif not inst.args[1]: ui.warn(_(" empty string\n")) else: ui.warn("\n%r\n" % util.ellipsis(inst.args[1])) except error.CensoredNodeError as inst: ui.warn(_("abort: file censored %s!\n") % inst) except error.RevlogError as inst: ui.warn(_("abort: %s!\n") % inst) except error.SignalInterrupt: ui.warn(_("killed!\n")) except error.UnknownCommand as inst: ui.warn(_("hg: unknown command '%s'\n") % inst.args[0]) try: # check if the command is in a disabled extension # (but don't check for extensions themselves) commands.help_(ui, inst.args[0], unknowncmd=True) except (error.UnknownCommand, error.Abort): suggested = False if len(inst.args) == 2: sim = _getsimilar(inst.args[1], inst.args[0]) if sim: _reportsimilar(ui.warn, sim) suggested = True if not suggested: commands.help_(ui, 'shortlist') except error.InterventionRequired as inst: ui.warn("%s\n" % inst) if inst.hint: ui.warn(_("(%s)\n") % inst.hint) return 1 except error.Abort as inst: ui.warn(_("abort: %s\n") % inst) if inst.hint: ui.warn(_("(%s)\n") % inst.hint) except ImportError as inst: ui.warn(_("abort: %s!\n") % inst) m = str(inst).split()[-1] if m in "mpatch bdiff".split(): ui.warn(_("(did you forget to compile extensions?)\n")) elif m in "zlib".split(): ui.warn(_("(is your Python install correct?)\n")) except IOError as inst: if util.safehasattr(inst, "code"): ui.warn(_("abort: %s\n") % inst) elif util.safehasattr(inst, "reason"): try: # usually it is in the form (errno, strerror) reason = inst.reason.args[1] except (AttributeError, IndexError): # it might be anything, for example a string reason = inst.reason if isinstance(reason, unicode): # SSLError of Python 2.7.9 contains a unicode reason = reason.encode(encoding.encoding, 'replace') ui.warn(_("abort: error: %s\n") % reason) elif (util.safehasattr(inst, "args") and inst.args and inst.args[0] == errno.EPIPE): pass elif getattr(inst, "strerror", None): if getattr(inst, "filename", None): ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) else: ui.warn(_("abort: %s\n") % inst.strerror) else: raise except OSError as inst: if getattr(inst, "filename", None) is not None: ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename)) else: ui.warn(_("abort: %s\n") % inst.strerror) except KeyboardInterrupt: try: ui.warn(_("interrupted!\n")) except IOError as inst: if inst.errno != errno.EPIPE: raise except MemoryError: ui.warn(_("abort: out of memory\n")) except SystemExit as inst: # Commands shouldn't sys.exit directly, but give a return code. # Just in case catch this and and pass exit code to caller. return inst.code except socket.error as inst: ui.warn(_("abort: %s\n") % inst.args[-1]) except: # re-raises # For compatibility checking, we discard the portion of the hg # version after the + on the assumption that if a "normal # user" is running a build with a + in it the packager # probably built from fairly close to a tag and anyone with a # 'make local' copy of hg (where the version number can be out # of date) will be clueful enough to notice the implausible # version number and try updating. ct = util.versiontuple(n=2) worst = None, ct, '' if ui.config('ui', 'supportcontact', None) is None: for name, mod in extensions.extensions(): testedwith = getattr(mod, 'testedwith', '') report = getattr(mod, 'buglink', _('the extension author.')) if not testedwith.strip(): # We found an untested extension. It's likely the culprit. worst = name, 'unknown', report break # Never blame on extensions bundled with Mercurial. if testedwith == 'internal': continue tested = [util.versiontuple(t, 2) for t in testedwith.split()] if ct in tested: continue lower = [t for t in tested if t < ct] nearest = max(lower or tested) if worst[0] is None or nearest < worst[1]: worst = name, nearest, report if worst[0] is not None: name, testedwith, report = worst if not isinstance(testedwith, str): testedwith = '.'.join([str(c) for c in testedwith]) warning = (_('** Unknown exception encountered with ' 'possibly-broken third-party extension %s\n' '** which supports versions %s of Mercurial.\n' '** Please disable %s and try your action again.\n' '** If that fixes the bug please report it to %s\n') % (name, testedwith, name, report)) else: bugtracker = ui.config('ui', 'supportcontact', None) if bugtracker is None: bugtracker = _("https://mercurial-scm.org/wiki/BugTracker") warning = (_("** unknown exception encountered, " "please report by visiting\n** ") + bugtracker + '\n') warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) + (_("** Mercurial Distributed SCM (version %s)\n") % util.version()) + (_("** Extensions loaded: %s\n") % ", ".join([x[0] for x in extensions.extensions()]))) ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc()) ui.warn(warning) raise return -1 def aliasargs(fn, givenargs): args = getattr(fn, 'args', []) if args: cmd = ' '.join(map(util.shellquote, args)) nums = [] def replacer(m): num = int(m.group(1)) - 1 nums.append(num) if num < len(givenargs): return givenargs[num] raise error.Abort(_('too few arguments for command alias')) cmd = re.sub(r'\$(\d+|\$)', replacer, cmd) givenargs = [x for i, x in enumerate(givenargs) if i not in nums] args = shlex.split(cmd) return args + givenargs def aliasinterpolate(name, args, cmd): '''interpolate args into cmd for shell aliases This also handles $0, $@ and "$@". ''' # util.interpolate can't deal with "$@" (with quotes) because it's only # built to match prefix + patterns. replacemap = dict(('$%d' % (i + 1), arg) for i, arg in enumerate(args)) replacemap['$0'] = name replacemap['$$'] = '$' replacemap['$@'] = ' '.join(args) # Typical Unix shells interpolate "$@" (with quotes) as all the positional # parameters, separated out into words. Emulate the same behavior here by # quoting the arguments individually. POSIX shells will then typically # tokenize each argument into exactly one word. replacemap['"$@"'] = ' '.join(util.shellquote(arg) for arg in args) # escape '\$' for regex regex = '|'.join(replacemap.keys()).replace('$', r'\$') r = re.compile(regex) return r.sub(lambda x: replacemap[x.group()], cmd) class cmdalias(object): def __init__(self, name, definition, cmdtable): self.name = self.cmd = name self.cmdname = '' self.definition = definition self.fn = None self.args = [] self.opts = [] self.help = '' self.norepo = True self.optionalrepo = False self.inferrepo = False self.badalias = None self.unknowncmd = False try: aliases, entry = cmdutil.findcmd(self.name, cmdtable) for alias, e in cmdtable.iteritems(): if e is entry: self.cmd = alias break self.shadows = True except error.UnknownCommand: self.shadows = False if not self.definition: self.badalias = _("no definition for alias '%s'") % self.name return if self.definition.startswith('!'): self.shell = True def fn(ui, *args): env = {'HG_ARGS': ' '.join((self.name,) + args)} def _checkvar(m): if m.groups()[0] == '$': return m.group() elif int(m.groups()[0]) <= len(args): return m.group() else: ui.debug("No argument found for substitution " "of %i variable in alias '%s' definition." % (int(m.groups()[0]), self.name)) return '' cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:]) cmd = aliasinterpolate(self.name, args, cmd) return ui.system(cmd, environ=env) self.fn = fn return try: args = shlex.split(self.definition) except ValueError as inst: self.badalias = (_("error in definition for alias '%s': %s") % (self.name, inst)) return self.cmdname = cmd = args.pop(0) args = map(util.expandpath, args) for invalidarg in ("--cwd", "-R", "--repository", "--repo", "--config"): if _earlygetopt([invalidarg], args): self.badalias = (_("error in definition for alias '%s': %s may " "only be given on the command line") % (self.name, invalidarg)) return try: tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1] if len(tableentry) > 2: self.fn, self.opts, self.help = tableentry else: self.fn, self.opts = tableentry self.args = aliasargs(self.fn, args) if cmd not in commands.norepo.split(' '): self.norepo = False if cmd in commands.optionalrepo.split(' '): self.optionalrepo = True if cmd in commands.inferrepo.split(' '): self.inferrepo = True if self.help.startswith("hg " + cmd): # drop prefix in old-style help lines so hg shows the alias self.help = self.help[4 + len(cmd):] self.__doc__ = self.fn.__doc__ except error.UnknownCommand: self.badalias = (_("alias '%s' resolves to unknown command '%s'") % (self.name, cmd)) self.unknowncmd = True except error.AmbiguousCommand: self.badalias = (_("alias '%s' resolves to ambiguous command '%s'") % (self.name, cmd)) def __call__(self, ui, *args, **opts): if self.badalias: hint = None if self.unknowncmd: try: # check if the command is in a disabled extension cmd, ext = extensions.disabledcmd(ui, self.cmdname)[:2] hint = _("'%s' is provided by '%s' extension") % (cmd, ext) except error.UnknownCommand: pass raise error.Abort(self.badalias, hint=hint) if self.shadows: ui.debug("alias '%s' shadows command '%s'\n" % (self.name, self.cmdname)) if util.safehasattr(self, 'shell'): return self.fn(ui, *args, **opts) else: try: return util.checksignature(self.fn)(ui, *args, **opts) except error.SignatureError: args = ' '.join([self.cmdname] + self.args) ui.debug("alias '%s' expands to '%s'\n" % (self.name, args)) raise def addaliases(ui, cmdtable): # aliases are processed after extensions have been loaded, so they # may use extension commands. Aliases can also use other alias definitions, # but only if they have been defined prior to the current definition. for alias, definition in ui.configitems('alias'): aliasdef = cmdalias(alias, definition, cmdtable) try: olddef = cmdtable[aliasdef.cmd][0] if olddef.definition == aliasdef.definition: continue except (KeyError, AttributeError): # definition might not exist or it might not be a cmdalias pass cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help) if aliasdef.norepo: commands.norepo += ' %s' % alias if aliasdef.optionalrepo: commands.optionalrepo += ' %s' % alias if aliasdef.inferrepo: commands.inferrepo += ' %s' % alias def _parse(ui, args): options = {} cmdoptions = {} try: args = fancyopts.fancyopts(args, commands.globalopts, options) except fancyopts.getopt.GetoptError as inst: raise error.CommandError(None, inst) if args: cmd, args = args[0], args[1:] aliases, entry = cmdutil.findcmd(cmd, commands.table, ui.configbool("ui", "strict")) cmd = aliases[0] args = aliasargs(entry[0], args) defaults = ui.config("defaults", cmd) if defaults: args = map(util.expandpath, shlex.split(defaults)) + args c = list(entry[1]) else: cmd = None c = [] # combine global options into local for o in commands.globalopts: c.append((o[0], o[1], options[o[1]], o[3])) try: args = fancyopts.fancyopts(args, c, cmdoptions, True) except fancyopts.getopt.GetoptError as inst: raise error.CommandError(cmd, inst) # separate global options back out for o in commands.globalopts: n = o[1] options[n] = cmdoptions[n] del cmdoptions[n] return (cmd, cmd and entry[0] or None, args, options, cmdoptions) def _parseconfig(ui, config): """parse the --config options from the command line""" configs = [] for cfg in config: try: name, value = cfg.split('=', 1) section, name = name.split('.', 1) if not section or not name: raise IndexError ui.setconfig(section, name, value, '--config') configs.append((section, name, value)) except (IndexError, ValueError): raise error.Abort(_('malformed --config option: %r ' '(use --config section.name=value)') % cfg) return configs def _earlygetopt(aliases, args): """Return list of values for an option (or aliases). The values are listed in the order they appear in args. The options and values are removed from args. >>> args = ['x', '--cwd', 'foo', 'y'] >>> _earlygetopt(['--cwd'], args), args (['foo'], ['x', 'y']) >>> args = ['x', '--cwd=bar', 'y'] >>> _earlygetopt(['--cwd'], args), args (['bar'], ['x', 'y']) >>> args = ['x', '-R', 'foo', 'y'] >>> _earlygetopt(['-R'], args), args (['foo'], ['x', 'y']) >>> args = ['x', '-Rbar', 'y'] >>> _earlygetopt(['-R'], args), args (['bar'], ['x', 'y']) """ try: argcount = args.index("--") except ValueError: argcount = len(args) shortopts = [opt for opt in aliases if len(opt) == 2] values = [] pos = 0 while pos < argcount: fullarg = arg = args[pos] equals = arg.find('=') if equals > -1: arg = arg[:equals] if arg in aliases: del args[pos] if equals > -1: values.append(fullarg[equals + 1:]) argcount -= 1 else: if pos + 1 >= argcount: # ignore and let getopt report an error if there is no value break values.append(args.pop(pos)) argcount -= 2 elif arg[:2] in shortopts: # short option can have no following space, e.g. hg log -Rfoo values.append(args.pop(pos)[2:]) argcount -= 1 else: pos += 1 return values def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions): # run pre-hook, and abort if it fails hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs), pats=cmdpats, opts=cmdoptions) ret = _runcommand(ui, options, cmd, d) # run post-hook, passing command result hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs), result=ret, pats=cmdpats, opts=cmdoptions) return ret def _getlocal(ui, rpath): """Return (path, local ui object) for the given target path. Takes paths in [cwd]/.hg/hgrc into account." """ try: wd = os.getcwd() except OSError as e: raise error.Abort(_("error getting current working directory: %s") % e.strerror) path = cmdutil.findrepo(wd) or "" if not path: lui = ui else: lui = ui.copy() lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) if rpath and rpath[-1]: path = lui.expandpath(rpath[-1]) lui = ui.copy() lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) return path, lui def _checkshellalias(lui, ui, args, precheck=True): """Return the function to run the shell alias, if it is required 'precheck' is whether this function is invoked before adding aliases or not. """ options = {} try: args = fancyopts.fancyopts(args, commands.globalopts, options) except fancyopts.getopt.GetoptError: return if not args: return if precheck: strict = True norepo = commands.norepo optionalrepo = commands.optionalrepo inferrepo = commands.inferrepo def restorecommands(): commands.norepo = norepo commands.optionalrepo = optionalrepo commands.inferrepo = inferrepo cmdtable = commands.table.copy() addaliases(lui, cmdtable) else: strict = False def restorecommands(): pass cmdtable = commands.table cmd = args[0] try: aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict) except (error.AmbiguousCommand, error.UnknownCommand): restorecommands() return cmd = aliases[0] fn = entry[0] if cmd and util.safehasattr(fn, 'shell'): d = lambda: fn(ui, *args[1:]) return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {}) restorecommands() _loaded = set() def _dispatch(req): args = req.args ui = req.ui # check for cwd cwd = _earlygetopt(['--cwd'], args) if cwd: os.chdir(cwd[-1]) rpath = _earlygetopt(["-R", "--repository", "--repo"], args) path, lui = _getlocal(ui, rpath) # Now that we're operating in the right directory/repository with # the right config settings, check for shell aliases shellaliasfn = _checkshellalias(lui, ui, args) if shellaliasfn: return shellaliasfn() # Configure extensions in phases: uisetup, extsetup, cmdtable, and # reposetup. Programs like TortoiseHg will call _dispatch several # times so we keep track of configured extensions in _loaded. extensions.loadall(lui) exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded] # Propagate any changes to lui.__class__ by extensions ui.__class__ = lui.__class__ # (uisetup and extsetup are handled in extensions.loadall) for name, module in exts: cmdtable = getattr(module, 'cmdtable', {}) overrides = [cmd for cmd in cmdtable if cmd in commands.table] if overrides: ui.warn(_("extension '%s' overrides commands: %s\n") % (name, " ".join(overrides))) commands.table.update(cmdtable) _loaded.add(name) # (reposetup is handled in hg.repository) addaliases(lui, commands.table) if not lui.configbool("ui", "strict"): # All aliases and commands are completely defined, now. # Check abbreviation/ambiguity of shell alias again, because shell # alias may cause failure of "_parse" (see issue4355) shellaliasfn = _checkshellalias(lui, ui, args, precheck=False) if shellaliasfn: return shellaliasfn() # check for fallback encoding fallback = lui.config('ui', 'fallbackencoding') if fallback: encoding.fallbackencoding = fallback fullargs = args cmd, func, args, options, cmdoptions = _parse(lui, args) if options["config"]: raise error.Abort(_("option --config may not be abbreviated!")) if options["cwd"]: raise error.Abort(_("option --cwd may not be abbreviated!")) if options["repository"]: raise error.Abort(_( "option -R has to be separated from other options (e.g. not -qR) " "and --repository may only be abbreviated as --repo!")) if options["encoding"]: encoding.encoding = options["encoding"] if options["encodingmode"]: encoding.encodingmode = options["encodingmode"] if options["time"]: def get_times(): t = os.times() if t[4] == 0.0: # Windows leaves this as zero, so use time.clock() t = (t[0], t[1], t[2], t[3], time.clock()) return t s = get_times() def print_time(): t = get_times() ui.warn(_("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") % (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3])) atexit.register(print_time) uis = set([ui, lui]) if req.repo: uis.add(req.repo.ui) if options['verbose'] or options['debug'] or options['quiet']: for opt in ('verbose', 'debug', 'quiet'): val = str(bool(options[opt])) for ui_ in uis: ui_.setconfig('ui', opt, val, '--' + opt) if options['traceback']: for ui_ in uis: ui_.setconfig('ui', 'traceback', 'on', '--traceback') if options['noninteractive']: for ui_ in uis: ui_.setconfig('ui', 'interactive', 'off', '-y') if cmdoptions.get('insecure', False): for ui_ in uis: ui_.setconfig('web', 'cacerts', '!', '--insecure') if options['version']: return commands.version_(ui) if options['help']: return commands.help_(ui, cmd, command=cmd is not None) elif not cmd: return commands.help_(ui, 'shortlist') repo = None cmdpats = args[:] if cmd not in commands.norepo.split(): # use the repo from the request only if we don't have -R if not rpath and not cwd: repo = req.repo if repo: # set the descriptors of the repo ui to those of ui repo.ui.fin = ui.fin repo.ui.fout = ui.fout repo.ui.ferr = ui.ferr else: try: repo = hg.repository(ui, path=path) if not repo.local(): raise error.Abort(_("repository '%s' is not local") % path) repo.ui.setconfig("bundle", "mainreporoot", repo.root, 'repo') except error.RequirementError: raise except error.RepoError: if rpath and rpath[-1]: # invalid -R path raise if cmd not in commands.optionalrepo.split(): if (cmd in commands.inferrepo.split() and args and not path): # try to infer -R from command args repos = map(cmdutil.findrepo, args) guess = repos[0] if guess and repos.count(guess) == len(repos): req.args = ['--repository', guess] + fullargs return _dispatch(req) if not path: raise error.RepoError(_("no repository found in '%s'" " (.hg not found)") % os.getcwd()) raise if repo: ui = repo.ui if options['hidden']: repo = repo.unfiltered() args.insert(0, repo) elif rpath: ui.warn(_("warning: --repository ignored\n")) msg = ' '.join(' ' in a and repr(a) or a for a in fullargs) ui.log("command", '%s\n', msg) d = lambda: util.checksignature(func)(ui, *args, **cmdoptions) try: return runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions) finally: if repo and repo != req.repo: repo.close() def lsprofile(ui, func, fp): format = ui.config('profiling', 'format', default='text') field = ui.config('profiling', 'sort', default='inlinetime') limit = ui.configint('profiling', 'limit', default=30) climit = ui.configint('profiling', 'nested', default=0) if format not in ['text', 'kcachegrind']: ui.warn(_("unrecognized profiling format '%s'" " - Ignored\n") % format) format = 'text' try: from . import lsprof except ImportError: raise error.Abort(_( 'lsprof not available - install from ' 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/')) p = lsprof.Profiler() p.enable(subcalls=True) try: return func() finally: p.disable() if format == 'kcachegrind': from . import lsprofcalltree calltree = lsprofcalltree.KCacheGrind(p) calltree.output(fp) else: # format == 'text' stats = lsprof.Stats(p.getstats()) stats.sort(field) stats.pprint(limit=limit, file=fp, climit=climit) def flameprofile(ui, func, fp): try: from flamegraph import flamegraph except ImportError: raise error.Abort(_( 'flamegraph not available - install from ' 'https://github.com/evanhempel/python-flamegraph')) # developer config: profiling.freq freq = ui.configint('profiling', 'freq', default=1000) filter_ = None collapse_recursion = True thread = flamegraph.ProfileThread(fp, 1.0 / freq, filter_, collapse_recursion) start_time = time.clock() try: thread.start() func() finally: thread.stop() thread.join() print('Collected %d stack frames (%d unique) in %2.2f seconds.' % ( time.clock() - start_time, thread.num_frames(), thread.num_frames(unique=True))) def statprofile(ui, func, fp): try: import statprof except ImportError: raise error.Abort(_( 'statprof not available - install using "easy_install statprof"')) freq = ui.configint('profiling', 'freq', default=1000) if freq > 0: statprof.reset(freq) else: ui.warn(_("invalid sampling frequency '%s' - ignoring\n") % freq) statprof.start() try: return func() finally: statprof.stop() statprof.display(fp) def _runcommand(ui, options, cmd, cmdfunc): """Enables the profiler if applicable. ``profiling.enabled`` - boolean config that enables or disables profiling """ def checkargs(): try: return cmdfunc() except error.SignatureError: raise error.CommandError(cmd, _("invalid arguments")) if options['profile'] or ui.configbool('profiling', 'enabled'): profiler = os.getenv('HGPROF') if profiler is None: profiler = ui.config('profiling', 'type', default='ls') if profiler not in ('ls', 'stat', 'flame'): ui.warn(_("unrecognized profiler '%s' - ignored\n") % profiler) profiler = 'ls' output = ui.config('profiling', 'output') if output == 'blackbox': import StringIO fp = StringIO.StringIO() elif output: path = ui.expandpath(output) fp = open(path, 'wb') else: fp = sys.stderr try: if profiler == 'ls': return lsprofile(ui, checkargs, fp) elif profiler == 'flame': return flameprofile(ui, checkargs, fp) else: return statprofile(ui, checkargs, fp) finally: if output: if output == 'blackbox': val = "Profile:\n%s" % fp.getvalue() # ui.log treats the input as a format string, # so we need to escape any % signs. val = val.replace('%', '%%') ui.log('profile', val) fp.close() else: return checkargs() mercurial-3.7.3/mercurial/pathutil.py0000644000175000017500000001647712676531525017335 0ustar mpmmpm00000000000000from __future__ import absolute_import import errno import os import posixpath import stat from .i18n import _ from . import ( encoding, error, util, ) def _lowerclean(s): return encoding.hfsignoreclean(s.lower()) class pathauditor(object): '''ensure that a filesystem path contains no banned components. the following properties of a path are checked: - ends with a directory separator - under top-level .hg - starts at the root of a windows drive - contains ".." More check are also done about the file system states: - traverses a symlink (e.g. a/symlink_here/b) - inside a nested repository (a callback can be used to approve some nested repositories, e.g., subrepositories) The file system checks are only done when 'realfs' is set to True (the default). They should be disable then we are auditing path for operation on stored history. ''' def __init__(self, root, callback=None, realfs=True): self.audited = set() self.auditeddir = set() self.root = root self._realfs = realfs self.callback = callback if os.path.lexists(root) and not util.checkcase(root): self.normcase = util.normcase else: self.normcase = lambda x: x def __call__(self, path): '''Check the relative path. path may contain a pattern (e.g. foodir/**.txt)''' path = util.localpath(path) normpath = self.normcase(path) if normpath in self.audited: return # AIX ignores "/" at end of path, others raise EISDIR. if util.endswithsep(path): raise error.Abort(_("path ends in directory separator: %s") % path) parts = util.splitpath(path) if (os.path.splitdrive(path)[0] or _lowerclean(parts[0]) in ('.hg', '.hg.', '') or os.pardir in parts): raise error.Abort(_("path contains illegal component: %s") % path) # Windows shortname aliases for p in parts: if "~" in p: first, last = p.split("~", 1) if last.isdigit() and first.upper() in ["HG", "HG8B6C"]: raise error.Abort(_("path contains illegal component: %s") % path) if '.hg' in _lowerclean(path): lparts = [_lowerclean(p.lower()) for p in parts] for p in '.hg', '.hg.': if p in lparts[1:]: pos = lparts.index(p) base = os.path.join(*parts[:pos]) raise error.Abort(_("path '%s' is inside nested repo %r") % (path, base)) normparts = util.splitpath(normpath) assert len(parts) == len(normparts) parts.pop() normparts.pop() prefixes = [] while parts: prefix = os.sep.join(parts) normprefix = os.sep.join(normparts) if normprefix in self.auditeddir: break if self._realfs: self._checkfs(prefix, path) prefixes.append(normprefix) parts.pop() normparts.pop() self.audited.add(normpath) # only add prefixes to the cache after checking everything: we don't # want to add "foo/bar/baz" before checking if there's a "foo/.hg" self.auditeddir.update(prefixes) def _checkfs(self, prefix, path): """raise exception if a file system backed check fails""" curpath = os.path.join(self.root, prefix) try: st = os.lstat(curpath) except OSError as err: # EINVAL can be raised as invalid path syntax under win32. # They must be ignored for patterns can be checked too. if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL): raise else: if stat.S_ISLNK(st.st_mode): msg = _('path %r traverses symbolic link %r') % (path, prefix) raise error.Abort(msg) elif (stat.S_ISDIR(st.st_mode) and os.path.isdir(os.path.join(curpath, '.hg'))): if not self.callback or not self.callback(curpath): msg = _("path '%s' is inside nested repo %r") raise error.Abort(msg % (path, prefix)) def check(self, path): try: self(path) return True except (OSError, error.Abort): return False def canonpath(root, cwd, myname, auditor=None): '''return the canonical path of myname, given cwd and root''' if util.endswithsep(root): rootsep = root else: rootsep = root + os.sep name = myname if not os.path.isabs(name): name = os.path.join(root, cwd, name) name = os.path.normpath(name) if auditor is None: auditor = pathauditor(root) if name != rootsep and name.startswith(rootsep): name = name[len(rootsep):] auditor(name) return util.pconvert(name) elif name == root: return '' else: # Determine whether `name' is in the hierarchy at or beneath `root', # by iterating name=dirname(name) until that causes no change (can't # check name == '/', because that doesn't work on windows). The list # `rel' holds the reversed list of components making up the relative # file name we want. rel = [] while True: try: s = util.samefile(name, root) except OSError: s = False if s: if not rel: # name was actually the same as root (maybe a symlink) return '' rel.reverse() name = os.path.join(*rel) auditor(name) return util.pconvert(name) dirname, basename = util.split(name) rel.append(basename) if dirname == name: break name = dirname # A common mistake is to use -R, but specify a file relative to the repo # instead of cwd. Detect that case, and provide a hint to the user. hint = None try: if cwd != root: canonpath(root, root, myname, auditor) hint = (_("consider using '--cwd %s'") % os.path.relpath(root, cwd)) except error.Abort: pass raise error.Abort(_("%s not under root '%s'") % (myname, root), hint=hint) def normasprefix(path): '''normalize the specified path as path prefix Returned value can be used safely for "p.startswith(prefix)", "p[len(prefix):]", and so on. For efficiency, this expects "path" argument to be already normalized by "os.path.normpath", "os.path.realpath", and so on. See also issue3033 for detail about need of this function. >>> normasprefix('/foo/bar').replace(os.sep, '/') '/foo/bar/' >>> normasprefix('/').replace(os.sep, '/') '/' ''' d, p = os.path.splitdrive(path) if len(p) != len(os.sep): return path + os.sep else: return path # forward two methods from posixpath that do what we need, but we'd # rather not let our internals know that we're thinking in posix terms # - instead we'll let them be oblivious. join = posixpath.join dirname = posixpath.dirname mercurial-3.7.3/mercurial/help.py0000644000175000017500000005157212676531525016426 0ustar mpmmpm00000000000000# help.py - help data for mercurial # # Copyright 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import itertools import os import textwrap from .i18n import ( _, gettext, ) from . import ( cmdutil, encoding, error, extensions, filemerge, fileset, minirst, revset, templatefilters, templatekw, templater, util, ) from .hgweb import ( webcommands, ) _exclkeywords = [ "(DEPRECATED)", "(EXPERIMENTAL)", # i18n: "(DEPRECATED)" is a keyword, must be translated consistently _("(DEPRECATED)"), # i18n: "(EXPERIMENTAL)" is a keyword, must be translated consistently _("(EXPERIMENTAL)"), ] def listexts(header, exts, indent=1, showdeprecated=False): '''return a text listing of the given extensions''' rst = [] if exts: for name, desc in sorted(exts.iteritems()): if not showdeprecated and any(w in desc for w in _exclkeywords): continue rst.append('%s:%s: %s\n' % (' ' * indent, name, desc)) if rst: rst.insert(0, '\n%s\n\n' % header) return rst def extshelp(ui): rst = loaddoc('extensions')(ui).splitlines(True) rst.extend(listexts( _('enabled extensions:'), extensions.enabled(), showdeprecated=True)) rst.extend(listexts(_('disabled extensions:'), extensions.disabled())) doc = ''.join(rst) return doc def optrst(header, options, verbose): data = [] multioccur = False for option in options: if len(option) == 5: shortopt, longopt, default, desc, optlabel = option else: shortopt, longopt, default, desc = option optlabel = _("VALUE") # default label if not verbose and any(w in desc for w in _exclkeywords): continue so = '' if shortopt: so = '-' + shortopt lo = '--' + longopt if default: desc += _(" (default: %s)") % default if isinstance(default, list): lo += " %s [+]" % optlabel multioccur = True elif (default is not None) and not isinstance(default, bool): lo += " %s" % optlabel data.append((so, lo, desc)) if multioccur: header += (_(" ([+] can be repeated)")) rst = ['\n%s:\n\n' % header] rst.extend(minirst.maketable(data, 1)) return ''.join(rst) def indicateomitted(rst, omitted, notomitted=None): rst.append('\n\n.. container:: omitted\n\n %s\n\n' % omitted) if notomitted: rst.append('\n\n.. container:: notomitted\n\n %s\n\n' % notomitted) def filtercmd(ui, cmd, kw, doc): if not ui.debugflag and cmd.startswith("debug") and kw != "debug": return True if not ui.verbose and doc and any(w in doc for w in _exclkeywords): return True return False def topicmatch(ui, kw): """Return help topics matching kw. Returns {'section': [(name, summary), ...], ...} where section is one of topics, commands, extensions, or extensioncommands. """ kw = encoding.lower(kw) def lowercontains(container): return kw in encoding.lower(container) # translated in helptable results = {'topics': [], 'commands': [], 'extensions': [], 'extensioncommands': [], } for names, header, doc in helptable: # Old extensions may use a str as doc. if (sum(map(lowercontains, names)) or lowercontains(header) or (callable(doc) and lowercontains(doc(ui)))): results['topics'].append((names[0], header)) from . import commands # avoid cycle for cmd, entry in commands.table.iteritems(): if len(entry) == 3: summary = entry[2] else: summary = '' # translate docs *before* searching there docs = _(getattr(entry[0], '__doc__', None)) or '' if kw in cmd or lowercontains(summary) or lowercontains(docs): doclines = docs.splitlines() if doclines: summary = doclines[0] cmdname = cmd.partition('|')[0].lstrip('^') if filtercmd(ui, cmdname, kw, docs): continue results['commands'].append((cmdname, summary)) for name, docs in itertools.chain( extensions.enabled(False).iteritems(), extensions.disabled().iteritems()): mod = extensions.load(ui, name, '') name = name.rpartition('.')[-1] if lowercontains(name) or lowercontains(docs): # extension docs are already translated results['extensions'].append((name, docs.splitlines()[0])) for cmd, entry in getattr(mod, 'cmdtable', {}).iteritems(): if kw in cmd or (len(entry) > 2 and lowercontains(entry[2])): cmdname = cmd.partition('|')[0].lstrip('^') if entry[0].__doc__: cmddoc = gettext(entry[0].__doc__).splitlines()[0] else: cmddoc = _('(no help text available)') if filtercmd(ui, cmdname, kw, cmddoc): continue results['extensioncommands'].append((cmdname, cmddoc)) return results def loaddoc(topic, subdir=None): """Return a delayed loader for help/topic.txt.""" def loader(ui): docdir = os.path.join(util.datapath, 'help') if subdir: docdir = os.path.join(docdir, subdir) path = os.path.join(docdir, topic + ".txt") doc = gettext(util.readfile(path)) for rewriter in helphooks.get(topic, []): doc = rewriter(ui, topic, doc) return doc return loader internalstable = sorted([ (['bundles'], _('container for exchange of repository data'), loaddoc('bundles', subdir='internals')), (['changegroups'], _('representation of revlog data'), loaddoc('changegroups', subdir='internals')), (['revlogs'], _('revision storage mechanism'), loaddoc('revlogs', subdir='internals')), ]) def internalshelp(ui): """Generate the index for the "internals" topic.""" lines = [] for names, header, doc in internalstable: lines.append(' :%s: %s\n' % (names[0], header)) return ''.join(lines) helptable = sorted([ (["config", "hgrc"], _("Configuration Files"), loaddoc('config')), (["dates"], _("Date Formats"), loaddoc('dates')), (["patterns"], _("File Name Patterns"), loaddoc('patterns')), (['environment', 'env'], _('Environment Variables'), loaddoc('environment')), (['revisions', 'revs'], _('Specifying Single Revisions'), loaddoc('revisions')), (['multirevs', 'mrevs'], _('Specifying Multiple Revisions'), loaddoc('multirevs')), (['revsets', 'revset'], _("Specifying Revision Sets"), loaddoc('revsets')), (['filesets', 'fileset'], _("Specifying File Sets"), loaddoc('filesets')), (['diffs'], _('Diff Formats'), loaddoc('diffs')), (['merge-tools', 'mergetools'], _('Merge Tools'), loaddoc('merge-tools')), (['templating', 'templates', 'template', 'style'], _('Template Usage'), loaddoc('templates')), (['urls'], _('URL Paths'), loaddoc('urls')), (["extensions"], _("Using Additional Features"), extshelp), (["subrepos", "subrepo"], _("Subrepositories"), loaddoc('subrepos')), (["hgweb"], _("Configuring hgweb"), loaddoc('hgweb')), (["glossary"], _("Glossary"), loaddoc('glossary')), (["hgignore", "ignore"], _("Syntax for Mercurial Ignore Files"), loaddoc('hgignore')), (["phases"], _("Working with Phases"), loaddoc('phases')), (['scripting'], _('Using Mercurial from scripts and automation'), loaddoc('scripting')), (['internals'], _("Technical implementation topics"), internalshelp), ]) # Maps topics with sub-topics to a list of their sub-topics. subtopics = { 'internals': internalstable, } # Map topics to lists of callable taking the current topic help and # returning the updated version helphooks = {} def addtopichook(topic, rewriter): helphooks.setdefault(topic, []).append(rewriter) def makeitemsdoc(ui, topic, doc, marker, items, dedent=False): """Extract docstring from the items key to function mapping, build a single documentation block and use it to overwrite the marker in doc. """ entries = [] for name in sorted(items): text = (items[name].__doc__ or '').rstrip() if (not text or not ui.verbose and any(w in text for w in _exclkeywords)): continue text = gettext(text) if dedent: text = textwrap.dedent(text) lines = text.splitlines() doclines = [(lines[0])] for l in lines[1:]: # Stop once we find some Python doctest if l.strip().startswith('>>>'): break if dedent: doclines.append(l.rstrip()) else: doclines.append(' ' + l.strip()) entries.append('\n'.join(doclines)) entries = '\n\n'.join(entries) return doc.replace(marker, entries) def addtopicsymbols(topic, marker, symbols, dedent=False): def add(ui, topic, doc): return makeitemsdoc(ui, topic, doc, marker, symbols, dedent=dedent) addtopichook(topic, add) addtopicsymbols('filesets', '.. predicatesmarker', fileset.symbols) addtopicsymbols('merge-tools', '.. internaltoolsmarker', filemerge.internalsdoc) addtopicsymbols('revsets', '.. predicatesmarker', revset.symbols) addtopicsymbols('templates', '.. keywordsmarker', templatekw.keywords) addtopicsymbols('templates', '.. filtersmarker', templatefilters.filters) addtopicsymbols('templates', '.. functionsmarker', templater.funcs) addtopicsymbols('hgweb', '.. webcommandsmarker', webcommands.commands, dedent=True) def help_(ui, name, unknowncmd=False, full=True, subtopic=None, **opts): ''' Generate the help for 'name' as unformatted restructured text. If 'name' is None, describe the commands available. ''' from . import commands # avoid cycle def helpcmd(name, subtopic=None): try: aliases, entry = cmdutil.findcmd(name, commands.table, strict=unknowncmd) except error.AmbiguousCommand as inst: # py3k fix: except vars can't be used outside the scope of the # except block, nor can be used inside a lambda. python issue4617 prefix = inst.args[0] select = lambda c: c.lstrip('^').startswith(prefix) rst = helplist(select) return rst rst = [] # check if it's an invalid alias and display its error if it is if getattr(entry[0], 'badalias', None): rst.append(entry[0].badalias + '\n') if entry[0].unknowncmd: try: rst.extend(helpextcmd(entry[0].cmdname)) except error.UnknownCommand: pass return rst # synopsis if len(entry) > 2: if entry[2].startswith('hg'): rst.append("%s\n" % entry[2]) else: rst.append('hg %s %s\n' % (aliases[0], entry[2])) else: rst.append('hg %s\n' % aliases[0]) # aliases if full and not ui.quiet and len(aliases) > 1: rst.append(_("\naliases: %s\n") % ', '.join(aliases[1:])) rst.append('\n') # description doc = gettext(entry[0].__doc__) if not doc: doc = _("(no help text available)") if util.safehasattr(entry[0], 'definition'): # aliased command if entry[0].definition.startswith('!'): # shell alias doc = _('shell alias for::\n\n %s') % entry[0].definition[1:] else: doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc) doc = doc.splitlines(True) if ui.quiet or not full: rst.append(doc[0]) else: rst.extend(doc) rst.append('\n') # check if this command shadows a non-trivial (multi-line) # extension help text try: mod = extensions.find(name) doc = gettext(mod.__doc__) or '' if '\n' in doc.strip(): msg = _('(use "hg help -e %s" to show help for ' 'the %s extension)') % (name, name) rst.append('\n%s\n' % msg) except KeyError: pass # options if not ui.quiet and entry[1]: rst.append(optrst(_("options"), entry[1], ui.verbose)) if ui.verbose: rst.append(optrst(_("global options"), commands.globalopts, ui.verbose)) if not ui.verbose: if not full: rst.append(_('\n(use "hg %s -h" to show more help)\n') % name) elif not ui.quiet: rst.append(_('\n(some details hidden, use --verbose ' 'to show complete help)')) return rst def helplist(select=None, **opts): # list of commands if name == "shortlist": header = _('basic commands:\n\n') elif name == "debug": header = _('debug commands (internal and unsupported):\n\n') else: header = _('list of commands:\n\n') h = {} cmds = {} for c, e in commands.table.iteritems(): f = c.partition("|")[0] if select and not select(f): continue if (not select and name != 'shortlist' and e[0].__module__ != commands.__name__): continue if name == "shortlist" and not f.startswith("^"): continue f = f.lstrip("^") doc = e[0].__doc__ if filtercmd(ui, f, name, doc): continue doc = gettext(doc) if not doc: doc = _("(no help text available)") h[f] = doc.splitlines()[0].rstrip() cmds[f] = c.lstrip("^") rst = [] if not h: if not ui.quiet: rst.append(_('no commands defined\n')) return rst if not ui.quiet: rst.append(header) fns = sorted(h) for f in fns: if ui.verbose: commacmds = cmds[f].replace("|",", ") rst.append(" :%s: %s\n" % (commacmds, h[f])) else: rst.append(' :%s: %s\n' % (f, h[f])) ex = opts.get anyopts = (ex('keyword') or not (ex('command') or ex('extension'))) if not name and anyopts: exts = listexts(_('enabled extensions:'), extensions.enabled()) if exts: rst.append('\n') rst.extend(exts) rst.append(_("\nadditional help topics:\n\n")) topics = [] for names, header, doc in helptable: topics.append((names[0], header)) for t, desc in topics: rst.append(" :%s: %s\n" % (t, desc)) if ui.quiet: pass elif ui.verbose: rst.append('\n%s\n' % optrst(_("global options"), commands.globalopts, ui.verbose)) if name == 'shortlist': rst.append(_('\n(use "hg help" for the full list ' 'of commands)\n')) else: if name == 'shortlist': rst.append(_('\n(use "hg help" for the full list of commands ' 'or "hg -v" for details)\n')) elif name and not full: rst.append(_('\n(use "hg help %s" to show the full help ' 'text)\n') % name) elif name and cmds and name in cmds.keys(): rst.append(_('\n(use "hg help -v -e %s" to show built-in ' 'aliases and global options)\n') % name) else: rst.append(_('\n(use "hg help -v%s" to show built-in aliases ' 'and global options)\n') % (name and " " + name or "")) return rst def helptopic(name, subtopic=None): # Look for sub-topic entry first. header, doc = None, None if subtopic and name in subtopics: for names, header, doc in subtopics[name]: if subtopic in names: break if not header: for names, header, doc in helptable: if name in names: break else: raise error.UnknownCommand(name) rst = [minirst.section(header)] # description if not doc: rst.append(" %s\n" % _("(no help text available)")) if callable(doc): rst += [" %s\n" % l for l in doc(ui).splitlines()] if not ui.verbose: omitted = _('(some details hidden, use --verbose' ' to show complete help)') indicateomitted(rst, omitted) try: cmdutil.findcmd(name, commands.table) rst.append(_('\nuse "hg help -c %s" to see help for ' 'the %s command\n') % (name, name)) except error.UnknownCommand: pass return rst def helpext(name, subtopic=None): try: mod = extensions.find(name) doc = gettext(mod.__doc__) or _('no help text available') except KeyError: mod = None doc = extensions.disabledext(name) if not doc: raise error.UnknownCommand(name) if '\n' not in doc: head, tail = doc, "" else: head, tail = doc.split('\n', 1) rst = [_('%s extension - %s\n\n') % (name.rpartition('.')[-1], head)] if tail: rst.extend(tail.splitlines(True)) rst.append('\n') if not ui.verbose: omitted = _('(some details hidden, use --verbose' ' to show complete help)') indicateomitted(rst, omitted) if mod: try: ct = mod.cmdtable except AttributeError: ct = {} modcmds = set([c.partition('|')[0] for c in ct]) rst.extend(helplist(modcmds.__contains__)) else: rst.append(_('(use "hg help extensions" for information on enabling' ' extensions)\n')) return rst def helpextcmd(name, subtopic=None): cmd, ext, mod = extensions.disabledcmd(ui, name, ui.configbool('ui', 'strict')) doc = gettext(mod.__doc__).splitlines()[0] rst = listexts(_("'%s' is provided by the following " "extension:") % cmd, {ext: doc}, indent=4, showdeprecated=True) rst.append('\n') rst.append(_('(use "hg help extensions" for information on enabling ' 'extensions)\n')) return rst rst = [] kw = opts.get('keyword') if kw or name is None and any(opts[o] for o in opts): matches = topicmatch(ui, name or '') helpareas = [] if opts.get('extension'): helpareas += [('extensions', _('Extensions'))] if opts.get('command'): helpareas += [('commands', _('Commands'))] if not helpareas: helpareas = [('topics', _('Topics')), ('commands', _('Commands')), ('extensions', _('Extensions')), ('extensioncommands', _('Extension Commands'))] for t, title in helpareas: if matches[t]: rst.append('%s:\n\n' % title) rst.extend(minirst.maketable(sorted(matches[t]), 1)) rst.append('\n') if not rst: msg = _('no matches') hint = _('try "hg help" for a list of topics') raise error.Abort(msg, hint=hint) elif name and name != 'shortlist': queries = [] if unknowncmd: queries += [helpextcmd] if opts.get('extension'): queries += [helpext] if opts.get('command'): queries += [helpcmd] if not queries: queries = (helptopic, helpcmd, helpext, helpextcmd) for f in queries: try: rst = f(name, subtopic) break except error.UnknownCommand: pass else: if unknowncmd: raise error.UnknownCommand(name) else: msg = _('no such help topic: %s') % name hint = _('try "hg help --keyword %s"') % name raise error.Abort(msg, hint=hint) else: # program name if not ui.quiet: rst = [_("Mercurial Distributed SCM\n"), '\n'] rst.extend(helplist(None, **opts)) return ''.join(rst) mercurial-3.7.3/mercurial/i18n.py0000644000175000017500000000640512676531525016250 0ustar mpmmpm00000000000000# i18n.py - internationalization support for mercurial # # Copyright 2005, 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import gettext as gettextmod import locale import os import sys from . import encoding # modelled after templater.templatepath: if getattr(sys, 'frozen', None) is not None: module = sys.executable else: module = __file__ _languages = None if (os.name == 'nt' and 'LANGUAGE' not in os.environ and 'LC_ALL' not in os.environ and 'LC_MESSAGES' not in os.environ and 'LANG' not in os.environ): # Try to detect UI language by "User Interface Language Management" API # if no locale variables are set. Note that locale.getdefaultlocale() # uses GetLocaleInfo(), which may be different from UI language. # (See http://msdn.microsoft.com/en-us/library/dd374098(v=VS.85).aspx ) try: import ctypes langid = ctypes.windll.kernel32.GetUserDefaultUILanguage() _languages = [locale.windows_locale[langid]] except (ImportError, AttributeError, KeyError): # ctypes not found or unknown langid pass _ugettext = None def setdatapath(datapath): localedir = os.path.join(datapath, 'locale') t = gettextmod.translation('hg', localedir, _languages, fallback=True) global _ugettext _ugettext = t.ugettext _msgcache = {} def gettext(message): """Translate message. The message is looked up in the catalog to get a Unicode string, which is encoded in the local encoding before being returned. Important: message is restricted to characters in the encoding given by sys.getdefaultencoding() which is most likely 'ascii'. """ # If message is None, t.ugettext will return u'None' as the # translation whereas our callers expect us to return None. if message is None or not _ugettext: return message if message not in _msgcache: if type(message) is unicode: # goofy unicode docstrings in test paragraphs = message.split(u'\n\n') else: paragraphs = [p.decode("ascii") for p in message.split('\n\n')] # Be careful not to translate the empty string -- it holds the # meta data of the .po file. u = u'\n\n'.join([p and _ugettext(p) or '' for p in paragraphs]) try: # encoding.tolocal cannot be used since it will first try to # decode the Unicode string. Calling u.decode(enc) really # means u.encode(sys.getdefaultencoding()).decode(enc). Since # the Python encoding defaults to 'ascii', this fails if the # translated string use non-ASCII characters. _msgcache[message] = u.encode(encoding.encoding, "replace") except LookupError: # An unknown encoding results in a LookupError. _msgcache[message] = message return _msgcache[message] def _plain(): if 'HGPLAIN' not in os.environ and 'HGPLAINEXCEPT' not in os.environ: return False exceptions = os.environ.get('HGPLAINEXCEPT', '').strip().split(',') return 'i18n' not in exceptions if _plain(): _ = lambda message: message else: _ = gettext mercurial-3.7.3/mercurial/cmdutil.py0000644000175000017500000037077612676531524017150 0ustar mpmmpm00000000000000# cmdutil.py - help for command processing in mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from node import hex, bin, nullid, nullrev, short from i18n import _ import os, sys, errno, re, tempfile, cStringIO import util, scmutil, templater, patch, error, templatekw, revlog, copies import match as matchmod import repair, graphmod, revset, phases, obsolete, pathutil import changelog import bookmarks import encoding import formatter import crecord as crecordmod import lock as lockmod def ishunk(x): hunkclasses = (crecordmod.uihunk, patch.recordhunk) return isinstance(x, hunkclasses) def newandmodified(chunks, originalchunks): newlyaddedandmodifiedfiles = set() for chunk in chunks: if ishunk(chunk) and chunk.header.isnewfile() and chunk not in \ originalchunks: newlyaddedandmodifiedfiles.add(chunk.header.filename()) return newlyaddedandmodifiedfiles def parsealiases(cmd): return cmd.lstrip("^").split("|") def setupwrapcolorwrite(ui): # wrap ui.write so diff output can be labeled/colorized def wrapwrite(orig, *args, **kw): label = kw.pop('label', '') for chunk, l in patch.difflabel(lambda: args): orig(chunk, label=label + l) oldwrite = ui.write def wrap(*args, **kwargs): return wrapwrite(oldwrite, *args, **kwargs) setattr(ui, 'write', wrap) return oldwrite def filterchunks(ui, originalhunks, usecurses, testfile, operation=None): if usecurses: if testfile: recordfn = crecordmod.testdecorator(testfile, crecordmod.testchunkselector) else: recordfn = crecordmod.chunkselector return crecordmod.filterpatch(ui, originalhunks, recordfn, operation) else: return patch.filterpatch(ui, originalhunks, operation) def recordfilter(ui, originalhunks, operation=None): """ Prompts the user to filter the originalhunks and return a list of selected hunks. *operation* is used for ui purposes to indicate the user what kind of filtering they are doing: reverting, committing, shelving, etc. *operation* has to be a translated string. """ usecurses = crecordmod.checkcurses(ui) testfile = ui.config('experimental', 'crecordtest', None) oldwrite = setupwrapcolorwrite(ui) try: newchunks, newopts = filterchunks(ui, originalhunks, usecurses, testfile, operation) finally: ui.write = oldwrite return newchunks, newopts def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opts): import merge as mergemod if not ui.interactive(): if cmdsuggest: msg = _('running non-interactively, use %s instead') % cmdsuggest else: msg = _('running non-interactively') raise error.Abort(msg) # make sure username is set before going interactive if not opts.get('user'): ui.username() # raise exception, username not provided def recordfunc(ui, repo, message, match, opts): """This is generic record driver. Its job is to interactively filter local changes, and accordingly prepare working directory into a state in which the job can be delegated to a non-interactive commit command such as 'commit' or 'qrefresh'. After the actual job is done by non-interactive command, the working directory is restored to its original state. In the end we'll record interesting changes, and everything else will be left in place, so the user can continue working. """ checkunfinished(repo, commit=True) merge = len(repo[None].parents()) > 1 if merge: raise error.Abort(_('cannot partially commit a merge ' '(use "hg commit" instead)')) status = repo.status(match=match) diffopts = patch.difffeatureopts(ui, opts=opts, whitespace=True) diffopts.nodates = True diffopts.git = True diffopts.showfunc = True originaldiff = patch.diff(repo, changes=status, opts=diffopts) originalchunks = patch.parsepatch(originaldiff) # 1. filter patch, so we have intending-to apply subset of it try: chunks, newopts = filterfn(ui, originalchunks) except patch.PatchError as err: raise error.Abort(_('error parsing patch: %s') % err) opts.update(newopts) # We need to keep a backup of files that have been newly added and # modified during the recording process because there is a previous # version without the edit in the workdir newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks) contenders = set() for h in chunks: try: contenders.update(set(h.files())) except AttributeError: pass changed = status.modified + status.added + status.removed newfiles = [f for f in changed if f in contenders] if not newfiles: ui.status(_('no changes to record\n')) return 0 modified = set(status.modified) # 2. backup changed files, so we can restore them in the end if backupall: tobackup = changed else: tobackup = [f for f in newfiles if f in modified or f in \ newlyaddedandmodifiedfiles] backups = {} if tobackup: backupdir = repo.join('record-backups') try: os.mkdir(backupdir) except OSError as err: if err.errno != errno.EEXIST: raise try: # backup continues for f in tobackup: fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.', dir=backupdir) os.close(fd) ui.debug('backup %r as %r\n' % (f, tmpname)) util.copyfile(repo.wjoin(f), tmpname, copystat=True) backups[f] = tmpname fp = cStringIO.StringIO() for c in chunks: fname = c.filename() if fname in backups: c.write(fp) dopatch = fp.tell() fp.seek(0) [os.unlink(repo.wjoin(c)) for c in newlyaddedandmodifiedfiles] # 3a. apply filtered patch to clean repo (clean) if backups: # Equivalent to hg.revert m = scmutil.matchfiles(repo, backups.keys()) mergemod.update(repo, repo.dirstate.p1(), False, True, matcher=m) # 3b. (apply) if dopatch: try: ui.debug('applying patch\n') ui.debug(fp.getvalue()) patch.internalpatch(ui, repo, fp, 1, eolmode=None) except patch.PatchError as err: raise error.Abort(str(err)) del fp # 4. We prepared working directory according to filtered # patch. Now is the time to delegate the job to # commit/qrefresh or the like! # Make all of the pathnames absolute. newfiles = [repo.wjoin(nf) for nf in newfiles] return commitfunc(ui, repo, *newfiles, **opts) finally: # 5. finally restore backed-up files try: dirstate = repo.dirstate for realname, tmpname in backups.iteritems(): ui.debug('restoring %r to %r\n' % (tmpname, realname)) if dirstate[realname] == 'n': # without normallookup, restoring timestamp # may cause partially committed files # to be treated as unmodified dirstate.normallookup(realname) # copystat=True here and above are a hack to trick any # editors that have f open that we haven't modified them. # # Also note that this racy as an editor could notice the # file's mtime before we've finished writing it. util.copyfile(tmpname, repo.wjoin(realname), copystat=True) os.unlink(tmpname) if tobackup: os.rmdir(backupdir) except OSError: pass def recordinwlock(ui, repo, message, match, opts): with repo.wlock(): return recordfunc(ui, repo, message, match, opts) return commit(ui, repo, recordinwlock, pats, opts) def findpossible(cmd, table, strict=False): """ Return cmd -> (aliases, command table entry) for each matching command. Return debug commands (or their aliases) only if no normal command matches. """ choice = {} debugchoice = {} if cmd in table: # short-circuit exact matches, "log" alias beats "^log|history" keys = [cmd] else: keys = table.keys() allcmds = [] for e in keys: aliases = parsealiases(e) allcmds.extend(aliases) found = None if cmd in aliases: found = cmd elif not strict: for a in aliases: if a.startswith(cmd): found = a break if found is not None: if aliases[0].startswith("debug") or found.startswith("debug"): debugchoice[found] = (aliases, table[e]) else: choice[found] = (aliases, table[e]) if not choice and debugchoice: choice = debugchoice return choice, allcmds def findcmd(cmd, table, strict=True): """Return (aliases, command table entry) for command string.""" choice, allcmds = findpossible(cmd, table, strict) if cmd in choice: return choice[cmd] if len(choice) > 1: clist = choice.keys() clist.sort() raise error.AmbiguousCommand(cmd, clist) if choice: return choice.values()[0] raise error.UnknownCommand(cmd, allcmds) def findrepo(p): while not os.path.isdir(os.path.join(p, ".hg")): oldp, p = p, os.path.dirname(p) if p == oldp: return None return p def bailifchanged(repo, merge=True): if merge and repo.dirstate.p2() != nullid: raise error.Abort(_('outstanding uncommitted merge')) modified, added, removed, deleted = repo.status()[:4] if modified or added or removed or deleted: raise error.Abort(_('uncommitted changes')) ctx = repo[None] for s in sorted(ctx.substate): ctx.sub(s).bailifchanged() def logmessage(ui, opts): """ get the log message according to -m and -l option """ message = opts.get('message') logfile = opts.get('logfile') if message and logfile: raise error.Abort(_('options --message and --logfile are mutually ' 'exclusive')) if not message and logfile: try: if logfile == '-': message = ui.fin.read() else: message = '\n'.join(util.readfile(logfile).splitlines()) except IOError as inst: raise error.Abort(_("can't read commit message '%s': %s") % (logfile, inst.strerror)) return message def mergeeditform(ctxorbool, baseformname): """return appropriate editform name (referencing a committemplate) 'ctxorbool' is either a ctx to be committed, or a bool indicating whether merging is committed. This returns baseformname with '.merge' appended if it is a merge, otherwise '.normal' is appended. """ if isinstance(ctxorbool, bool): if ctxorbool: return baseformname + ".merge" elif 1 < len(ctxorbool.parents()): return baseformname + ".merge" return baseformname + ".normal" def getcommiteditor(edit=False, finishdesc=None, extramsg=None, editform='', **opts): """get appropriate commit message editor according to '--edit' option 'finishdesc' is a function to be called with edited commit message (= 'description' of the new changeset) just after editing, but before checking empty-ness. It should return actual text to be stored into history. This allows to change description before storing. 'extramsg' is a extra message to be shown in the editor instead of 'Leave message empty to abort commit' line. 'HG: ' prefix and EOL is automatically added. 'editform' is a dot-separated list of names, to distinguish the purpose of commit text editing. 'getcommiteditor' returns 'commitforceeditor' regardless of 'edit', if one of 'finishdesc' or 'extramsg' is specified, because they are specific for usage in MQ. """ if edit or finishdesc or extramsg: return lambda r, c, s: commitforceeditor(r, c, s, finishdesc=finishdesc, extramsg=extramsg, editform=editform) elif editform: return lambda r, c, s: commiteditor(r, c, s, editform=editform) else: return commiteditor def loglimit(opts): """get the log limit according to option -l/--limit""" limit = opts.get('limit') if limit: try: limit = int(limit) except ValueError: raise error.Abort(_('limit must be a positive integer')) if limit <= 0: raise error.Abort(_('limit must be positive')) else: limit = None return limit def makefilename(repo, pat, node, desc=None, total=None, seqno=None, revwidth=None, pathname=None): node_expander = { 'H': lambda: hex(node), 'R': lambda: str(repo.changelog.rev(node)), 'h': lambda: short(node), 'm': lambda: re.sub('[^\w]', '_', str(desc)) } expander = { '%': lambda: '%', 'b': lambda: os.path.basename(repo.root), } try: if node: expander.update(node_expander) if node: expander['r'] = (lambda: str(repo.changelog.rev(node)).zfill(revwidth or 0)) if total is not None: expander['N'] = lambda: str(total) if seqno is not None: expander['n'] = lambda: str(seqno) if total is not None and seqno is not None: expander['n'] = lambda: str(seqno).zfill(len(str(total))) if pathname is not None: expander['s'] = lambda: os.path.basename(pathname) expander['d'] = lambda: os.path.dirname(pathname) or '.' expander['p'] = lambda: pathname newname = [] patlen = len(pat) i = 0 while i < patlen: c = pat[i] if c == '%': i += 1 c = pat[i] c = expander[c]() newname.append(c) i += 1 return ''.join(newname) except KeyError as inst: raise error.Abort(_("invalid format spec '%%%s' in output filename") % inst.args[0]) class _unclosablefile(object): def __init__(self, fp): self._fp = fp def close(self): pass def __iter__(self): return iter(self._fp) def __getattr__(self, attr): return getattr(self._fp, attr) def makefileobj(repo, pat, node=None, desc=None, total=None, seqno=None, revwidth=None, mode='wb', modemap=None, pathname=None): writable = mode not in ('r', 'rb') if not pat or pat == '-': if writable: fp = repo.ui.fout else: fp = repo.ui.fin return _unclosablefile(fp) if util.safehasattr(pat, 'write') and writable: return pat if util.safehasattr(pat, 'read') and 'r' in mode: return pat fn = makefilename(repo, pat, node, desc, total, seqno, revwidth, pathname) if modemap is not None: mode = modemap.get(fn, mode) if mode == 'wb': modemap[fn] = 'ab' return open(fn, mode) def openrevlog(repo, cmd, file_, opts): """opens the changelog, manifest, a filelog or a given revlog""" cl = opts['changelog'] mf = opts['manifest'] dir = opts['dir'] msg = None if cl and mf: msg = _('cannot specify --changelog and --manifest at the same time') elif cl and dir: msg = _('cannot specify --changelog and --dir at the same time') elif cl or mf: if file_: msg = _('cannot specify filename with --changelog or --manifest') elif not repo: msg = _('cannot specify --changelog or --manifest or --dir ' 'without a repository') if msg: raise error.Abort(msg) r = None if repo: if cl: r = repo.unfiltered().changelog elif dir: if 'treemanifest' not in repo.requirements: raise error.Abort(_("--dir can only be used on repos with " "treemanifest enabled")) dirlog = repo.dirlog(file_) if len(dirlog): r = dirlog elif mf: r = repo.manifest elif file_: filelog = repo.file(file_) if len(filelog): r = filelog if not r: if not file_: raise error.CommandError(cmd, _('invalid arguments')) if not os.path.isfile(file_): raise error.Abort(_("revlog '%s' not found") % file_) r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_[:-2] + ".i") return r def copy(ui, repo, pats, opts, rename=False): # called with the repo lock held # # hgsep => pathname that uses "/" to separate directories # ossep => pathname that uses os.sep to separate directories cwd = repo.getcwd() targets = {} after = opts.get("after") dryrun = opts.get("dry_run") wctx = repo[None] def walkpat(pat): srcs = [] if after: badstates = '?' else: badstates = '?r' m = scmutil.match(repo[None], [pat], opts, globbed=True) for abs in repo.walk(m): state = repo.dirstate[abs] rel = m.rel(abs) exact = m.exact(abs) if state in badstates: if exact and state == '?': ui.warn(_('%s: not copying - file is not managed\n') % rel) if exact and state == 'r': ui.warn(_('%s: not copying - file has been marked for' ' remove\n') % rel) continue # abs: hgsep # rel: ossep srcs.append((abs, rel, exact)) return srcs # abssrc: hgsep # relsrc: ossep # otarget: ossep def copyfile(abssrc, relsrc, otarget, exact): abstarget = pathutil.canonpath(repo.root, cwd, otarget) if '/' in abstarget: # We cannot normalize abstarget itself, this would prevent # case only renames, like a => A. abspath, absname = abstarget.rsplit('/', 1) abstarget = repo.dirstate.normalize(abspath) + '/' + absname reltarget = repo.pathto(abstarget, cwd) target = repo.wjoin(abstarget) src = repo.wjoin(abssrc) state = repo.dirstate[abstarget] scmutil.checkportable(ui, abstarget) # check for collisions prevsrc = targets.get(abstarget) if prevsrc is not None: ui.warn(_('%s: not overwriting - %s collides with %s\n') % (reltarget, repo.pathto(abssrc, cwd), repo.pathto(prevsrc, cwd))) return # check for overwrites exists = os.path.lexists(target) samefile = False if exists and abssrc != abstarget: if (repo.dirstate.normalize(abssrc) == repo.dirstate.normalize(abstarget)): if not rename: ui.warn(_("%s: can't copy - same file\n") % reltarget) return exists = False samefile = True if not after and exists or after and state in 'mn': if not opts['force']: ui.warn(_('%s: not overwriting - file exists\n') % reltarget) return if after: if not exists: if rename: ui.warn(_('%s: not recording move - %s does not exist\n') % (relsrc, reltarget)) else: ui.warn(_('%s: not recording copy - %s does not exist\n') % (relsrc, reltarget)) return elif not dryrun: try: if exists: os.unlink(target) targetdir = os.path.dirname(target) or '.' if not os.path.isdir(targetdir): os.makedirs(targetdir) if samefile: tmp = target + "~hgrename" os.rename(src, tmp) os.rename(tmp, target) else: util.copyfile(src, target) srcexists = True except IOError as inst: if inst.errno == errno.ENOENT: ui.warn(_('%s: deleted in working directory\n') % relsrc) srcexists = False else: ui.warn(_('%s: cannot copy - %s\n') % (relsrc, inst.strerror)) return True # report a failure if ui.verbose or not exact: if rename: ui.status(_('moving %s to %s\n') % (relsrc, reltarget)) else: ui.status(_('copying %s to %s\n') % (relsrc, reltarget)) targets[abstarget] = abssrc # fix up dirstate scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd) if rename and not dryrun: if not after and srcexists and not samefile: util.unlinkpath(repo.wjoin(abssrc)) wctx.forget([abssrc]) # pat: ossep # dest ossep # srcs: list of (hgsep, hgsep, ossep, bool) # return: function that takes hgsep and returns ossep def targetpathfn(pat, dest, srcs): if os.path.isdir(pat): abspfx = pathutil.canonpath(repo.root, cwd, pat) abspfx = util.localpath(abspfx) if destdirexists: striplen = len(os.path.split(abspfx)[0]) else: striplen = len(abspfx) if striplen: striplen += len(os.sep) res = lambda p: os.path.join(dest, util.localpath(p)[striplen:]) elif destdirexists: res = lambda p: os.path.join(dest, os.path.basename(util.localpath(p))) else: res = lambda p: dest return res # pat: ossep # dest ossep # srcs: list of (hgsep, hgsep, ossep, bool) # return: function that takes hgsep and returns ossep def targetpathafterfn(pat, dest, srcs): if matchmod.patkind(pat): # a mercurial pattern res = lambda p: os.path.join(dest, os.path.basename(util.localpath(p))) else: abspfx = pathutil.canonpath(repo.root, cwd, pat) if len(abspfx) < len(srcs[0][0]): # A directory. Either the target path contains the last # component of the source path or it does not. def evalpath(striplen): score = 0 for s in srcs: t = os.path.join(dest, util.localpath(s[0])[striplen:]) if os.path.lexists(t): score += 1 return score abspfx = util.localpath(abspfx) striplen = len(abspfx) if striplen: striplen += len(os.sep) if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])): score = evalpath(striplen) striplen1 = len(os.path.split(abspfx)[0]) if striplen1: striplen1 += len(os.sep) if evalpath(striplen1) > score: striplen = striplen1 res = lambda p: os.path.join(dest, util.localpath(p)[striplen:]) else: # a file if destdirexists: res = lambda p: os.path.join(dest, os.path.basename(util.localpath(p))) else: res = lambda p: dest return res pats = scmutil.expandpats(pats) if not pats: raise error.Abort(_('no source or destination specified')) if len(pats) == 1: raise error.Abort(_('no destination specified')) dest = pats.pop() destdirexists = os.path.isdir(dest) and not os.path.islink(dest) if not destdirexists: if len(pats) > 1 or matchmod.patkind(pats[0]): raise error.Abort(_('with multiple sources, destination must be an ' 'existing directory')) if util.endswithsep(dest): raise error.Abort(_('destination %s is not a directory') % dest) tfn = targetpathfn if after: tfn = targetpathafterfn copylist = [] for pat in pats: srcs = walkpat(pat) if not srcs: continue copylist.append((tfn(pat, dest, srcs), srcs)) if not copylist: raise error.Abort(_('no files to copy')) errors = 0 for targetpath, srcs in copylist: for abssrc, relsrc, exact in srcs: if copyfile(abssrc, relsrc, targetpath(abssrc), exact): errors += 1 if errors: ui.warn(_('(consider using --after)\n')) return errors != 0 def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None, runargs=None, appendpid=False): '''Run a command as a service.''' def writepid(pid): if opts['pid_file']: if appendpid: mode = 'a' else: mode = 'w' fp = open(opts['pid_file'], mode) fp.write(str(pid) + '\n') fp.close() if opts['daemon'] and not opts['daemon_pipefds']: # Signal child process startup with file removal lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-') os.close(lockfd) try: if not runargs: runargs = util.hgcmd() + sys.argv[1:] runargs.append('--daemon-pipefds=%s' % lockpath) # Don't pass --cwd to the child process, because we've already # changed directory. for i in xrange(1, len(runargs)): if runargs[i].startswith('--cwd='): del runargs[i] break elif runargs[i].startswith('--cwd'): del runargs[i:i + 2] break def condfn(): return not os.path.exists(lockpath) pid = util.rundetached(runargs, condfn) if pid < 0: raise error.Abort(_('child process failed to start')) writepid(pid) finally: try: os.unlink(lockpath) except OSError as e: if e.errno != errno.ENOENT: raise if parentfn: return parentfn(pid) else: return if initfn: initfn() if not opts['daemon']: writepid(os.getpid()) if opts['daemon_pipefds']: lockpath = opts['daemon_pipefds'] try: os.setsid() except AttributeError: pass os.unlink(lockpath) util.hidewindow() sys.stdout.flush() sys.stderr.flush() nullfd = os.open(os.devnull, os.O_RDWR) logfilefd = nullfd if logfile: logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND) os.dup2(nullfd, 0) os.dup2(logfilefd, 1) os.dup2(logfilefd, 2) if nullfd not in (0, 1, 2): os.close(nullfd) if logfile and logfilefd not in (0, 1, 2): os.close(logfilefd) if runfn: return runfn() ## facility to let extension process additional data into an import patch # list of identifier to be executed in order extrapreimport = [] # run before commit extrapostimport = [] # run after commit # mapping from identifier to actual import function # # 'preimport' are run before the commit is made and are provided the following # arguments: # - repo: the localrepository instance, # - patchdata: data extracted from patch header (cf m.patch.patchheadermap), # - extra: the future extra dictionary of the changeset, please mutate it, # - opts: the import options. # XXX ideally, we would just pass an ctx ready to be computed, that would allow # mutation of in memory commit and more. Feel free to rework the code to get # there. extrapreimportmap = {} # 'postimport' are run after the commit is made and are provided the following # argument: # - ctx: the changectx created by import. extrapostimportmap = {} def tryimportone(ui, repo, hunk, parents, opts, msgs, updatefunc): """Utility function used by commands.import to import a single patch This function is explicitly defined here to help the evolve extension to wrap this part of the import logic. The API is currently a bit ugly because it a simple code translation from the import command. Feel free to make it better. :hunk: a patch (as a binary string) :parents: nodes that will be parent of the created commit :opts: the full dict of option passed to the import command :msgs: list to save commit message to. (used in case we need to save it when failing) :updatefunc: a function that update a repo to a given node updatefunc(, ) """ # avoid cycle context -> subrepo -> cmdutil import context extractdata = patch.extract(ui, hunk) tmpname = extractdata.get('filename') message = extractdata.get('message') user = opts.get('user') or extractdata.get('user') date = opts.get('date') or extractdata.get('date') branch = extractdata.get('branch') nodeid = extractdata.get('nodeid') p1 = extractdata.get('p1') p2 = extractdata.get('p2') nocommit = opts.get('no_commit') importbranch = opts.get('import_branch') update = not opts.get('bypass') strip = opts["strip"] prefix = opts["prefix"] sim = float(opts.get('similarity') or 0) if not tmpname: return (None, None, False) rejects = False try: cmdline_message = logmessage(ui, opts) if cmdline_message: # pickup the cmdline msg message = cmdline_message elif message: # pickup the patch msg message = message.strip() else: # launch the editor message = None ui.debug('message:\n%s\n' % message) if len(parents) == 1: parents.append(repo[nullid]) if opts.get('exact'): if not nodeid or not p1: raise error.Abort(_('not a Mercurial patch')) p1 = repo[p1] p2 = repo[p2 or nullid] elif p2: try: p1 = repo[p1] p2 = repo[p2] # Without any options, consider p2 only if the # patch is being applied on top of the recorded # first parent. if p1 != parents[0]: p1 = parents[0] p2 = repo[nullid] except error.RepoError: p1, p2 = parents if p2.node() == nullid: ui.warn(_("warning: import the patch as a normal revision\n" "(use --exact to import the patch as a merge)\n")) else: p1, p2 = parents n = None if update: if p1 != parents[0]: updatefunc(repo, p1.node()) if p2 != parents[1]: repo.setparents(p1.node(), p2.node()) if opts.get('exact') or importbranch: repo.dirstate.setbranch(branch or 'default') partial = opts.get('partial', False) files = set() try: patch.patch(ui, repo, tmpname, strip=strip, prefix=prefix, files=files, eolmode=None, similarity=sim / 100.0) except patch.PatchError as e: if not partial: raise error.Abort(str(e)) if partial: rejects = True files = list(files) if nocommit: if message: msgs.append(message) else: if opts.get('exact') or p2: # If you got here, you either use --force and know what # you are doing or used --exact or a merge patch while # being updated to its first parent. m = None else: m = scmutil.matchfiles(repo, files or []) editform = mergeeditform(repo[None], 'import.normal') if opts.get('exact'): editor = None else: editor = getcommiteditor(editform=editform, **opts) allowemptyback = repo.ui.backupconfig('ui', 'allowemptycommit') extra = {} for idfunc in extrapreimport: extrapreimportmap[idfunc](repo, extractdata, extra, opts) try: if partial: repo.ui.setconfig('ui', 'allowemptycommit', True) n = repo.commit(message, user, date, match=m, editor=editor, extra=extra) for idfunc in extrapostimport: extrapostimportmap[idfunc](repo[n]) finally: repo.ui.restoreconfig(allowemptyback) else: if opts.get('exact') or importbranch: branch = branch or 'default' else: branch = p1.branch() store = patch.filestore() try: files = set() try: patch.patchrepo(ui, repo, p1, store, tmpname, strip, prefix, files, eolmode=None) except patch.PatchError as e: raise error.Abort(str(e)) if opts.get('exact'): editor = None else: editor = getcommiteditor(editform='import.bypass') memctx = context.makememctx(repo, (p1.node(), p2.node()), message, user, date, branch, files, store, editor=editor) n = memctx.commit() finally: store.close() if opts.get('exact') and nocommit: # --exact with --no-commit is still useful in that it does merge # and branch bits ui.warn(_("warning: can't check exact import with --no-commit\n")) elif opts.get('exact') and hex(n) != nodeid: raise error.Abort(_('patch is damaged or loses information')) msg = _('applied to working directory') if n: # i18n: refers to a short changeset id msg = _('created %s') % short(n) return (msg, n, rejects) finally: os.unlink(tmpname) # facility to let extensions include additional data in an exported patch # list of identifiers to be executed in order extraexport = [] # mapping from identifier to actual export function # function as to return a string to be added to the header or None # it is given two arguments (sequencenumber, changectx) extraexportmap = {} def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False, opts=None, match=None): '''export changesets as hg patches.''' total = len(revs) revwidth = max([len(str(rev)) for rev in revs]) filemode = {} def single(rev, seqno, fp): ctx = repo[rev] node = ctx.node() parents = [p.node() for p in ctx.parents() if p] branch = ctx.branch() if switch_parent: parents.reverse() if parents: prev = parents[0] else: prev = nullid shouldclose = False if not fp and len(template) > 0: desc_lines = ctx.description().rstrip().split('\n') desc = desc_lines[0] #Commit always has a first line. fp = makefileobj(repo, template, node, desc=desc, total=total, seqno=seqno, revwidth=revwidth, mode='wb', modemap=filemode) shouldclose = True if fp and not getattr(fp, 'name', '').startswith('<'): repo.ui.note("%s\n" % fp.name) if not fp: write = repo.ui.write else: def write(s, **kw): fp.write(s) write("# HG changeset patch\n") write("# User %s\n" % ctx.user()) write("# Date %d %d\n" % ctx.date()) write("# %s\n" % util.datestr(ctx.date())) if branch and branch != 'default': write("# Branch %s\n" % branch) write("# Node ID %s\n" % hex(node)) write("# Parent %s\n" % hex(prev)) if len(parents) > 1: write("# Parent %s\n" % hex(parents[1])) for headerid in extraexport: header = extraexportmap[headerid](seqno, ctx) if header is not None: write('# %s\n' % header) write(ctx.description().rstrip()) write("\n\n") for chunk, label in patch.diffui(repo, prev, node, match, opts=opts): write(chunk, label=label) if shouldclose: fp.close() for seqno, rev in enumerate(revs): single(rev, seqno + 1, fp) def diffordiffstat(ui, repo, diffopts, node1, node2, match, changes=None, stat=False, fp=None, prefix='', root='', listsubrepos=False): '''show diff or diffstat.''' if fp is None: write = ui.write else: def write(s, **kw): fp.write(s) if root: relroot = pathutil.canonpath(repo.root, repo.getcwd(), root) else: relroot = '' if relroot != '': # XXX relative roots currently don't work if the root is within a # subrepo uirelroot = match.uipath(relroot) relroot += '/' for matchroot in match.files(): if not matchroot.startswith(relroot): ui.warn(_('warning: %s not inside relative root %s\n') % ( match.uipath(matchroot), uirelroot)) if stat: diffopts = diffopts.copy(context=0) width = 80 if not ui.plain(): width = ui.termwidth() chunks = patch.diff(repo, node1, node2, match, changes, diffopts, prefix=prefix, relroot=relroot) for chunk, label in patch.diffstatui(util.iterlines(chunks), width=width, git=diffopts.git): write(chunk, label=label) else: for chunk, label in patch.diffui(repo, node1, node2, match, changes, diffopts, prefix=prefix, relroot=relroot): write(chunk, label=label) if listsubrepos: ctx1 = repo[node1] ctx2 = repo[node2] for subpath, sub in scmutil.itersubrepos(ctx1, ctx2): tempnode2 = node2 try: if node2 is not None: tempnode2 = ctx2.substate[subpath][1] except KeyError: # A subrepo that existed in node1 was deleted between node1 and # node2 (inclusive). Thus, ctx2's substate won't contain that # subpath. The best we can do is to ignore it. tempnode2 = None submatch = matchmod.narrowmatcher(subpath, match) sub.diff(ui, diffopts, tempnode2, submatch, changes=changes, stat=stat, fp=fp, prefix=prefix) class changeset_printer(object): '''show changeset information when templating not requested.''' def __init__(self, ui, repo, matchfn, diffopts, buffered): self.ui = ui self.repo = repo self.buffered = buffered self.matchfn = matchfn self.diffopts = diffopts self.header = {} self.hunk = {} self.lastheader = None self.footer = None def flush(self, ctx): rev = ctx.rev() if rev in self.header: h = self.header[rev] if h != self.lastheader: self.lastheader = h self.ui.write(h) del self.header[rev] if rev in self.hunk: self.ui.write(self.hunk[rev]) del self.hunk[rev] return 1 return 0 def close(self): if self.footer: self.ui.write(self.footer) def show(self, ctx, copies=None, matchfn=None, **props): if self.buffered: self.ui.pushbuffer(labeled=True) self._show(ctx, copies, matchfn, props) self.hunk[ctx.rev()] = self.ui.popbuffer() else: self._show(ctx, copies, matchfn, props) def _show(self, ctx, copies, matchfn, props): '''show a single changeset or file revision''' changenode = ctx.node() rev = ctx.rev() if self.ui.debugflag: hexfunc = hex else: hexfunc = short # as of now, wctx.node() and wctx.rev() return None, but we want to # show the same values as {node} and {rev} templatekw revnode = (scmutil.intrev(rev), hexfunc(bin(ctx.hex()))) if self.ui.quiet: self.ui.write("%d:%s\n" % revnode, label='log.node') return date = util.datestr(ctx.date()) # i18n: column positioning for "hg log" self.ui.write(_("changeset: %d:%s\n") % revnode, label='log.changeset changeset.%s' % ctx.phasestr()) # branches are shown first before any other names due to backwards # compatibility branch = ctx.branch() # don't show the default branch name if branch != 'default': # i18n: column positioning for "hg log" self.ui.write(_("branch: %s\n") % branch, label='log.branch') for name, ns in self.repo.names.iteritems(): # branches has special logic already handled above, so here we just # skip it if name == 'branches': continue # we will use the templatename as the color name since those two # should be the same for name in ns.names(self.repo, changenode): self.ui.write(ns.logfmt % name, label='log.%s' % ns.colorname) if self.ui.debugflag: # i18n: column positioning for "hg log" self.ui.write(_("phase: %s\n") % ctx.phasestr(), label='log.phase') for pctx in scmutil.meaningfulparents(self.repo, ctx): label = 'log.parent changeset.%s' % pctx.phasestr() # i18n: column positioning for "hg log" self.ui.write(_("parent: %d:%s\n") % (pctx.rev(), hexfunc(pctx.node())), label=label) if self.ui.debugflag and rev is not None: mnode = ctx.manifestnode() # i18n: column positioning for "hg log" self.ui.write(_("manifest: %d:%s\n") % (self.repo.manifest.rev(mnode), hex(mnode)), label='ui.debug log.manifest') # i18n: column positioning for "hg log" self.ui.write(_("user: %s\n") % ctx.user(), label='log.user') # i18n: column positioning for "hg log" self.ui.write(_("date: %s\n") % date, label='log.date') if self.ui.debugflag: files = ctx.p1().status(ctx)[:3] for key, value in zip([# i18n: column positioning for "hg log" _("files:"), # i18n: column positioning for "hg log" _("files+:"), # i18n: column positioning for "hg log" _("files-:")], files): if value: self.ui.write("%-12s %s\n" % (key, " ".join(value)), label='ui.debug log.files') elif ctx.files() and self.ui.verbose: # i18n: column positioning for "hg log" self.ui.write(_("files: %s\n") % " ".join(ctx.files()), label='ui.note log.files') if copies and self.ui.verbose: copies = ['%s (%s)' % c for c in copies] # i18n: column positioning for "hg log" self.ui.write(_("copies: %s\n") % ' '.join(copies), label='ui.note log.copies') extra = ctx.extra() if extra and self.ui.debugflag: for key, value in sorted(extra.items()): # i18n: column positioning for "hg log" self.ui.write(_("extra: %s=%s\n") % (key, value.encode('string_escape')), label='ui.debug log.extra') description = ctx.description().strip() if description: if self.ui.verbose: self.ui.write(_("description:\n"), label='ui.note log.description') self.ui.write(description, label='ui.note log.description') self.ui.write("\n\n") else: # i18n: column positioning for "hg log" self.ui.write(_("summary: %s\n") % description.splitlines()[0], label='log.summary') self.ui.write("\n") self.showpatch(ctx, matchfn) def showpatch(self, ctx, matchfn): if not matchfn: matchfn = self.matchfn if matchfn: stat = self.diffopts.get('stat') diff = self.diffopts.get('patch') diffopts = patch.diffallopts(self.ui, self.diffopts) node = ctx.node() prev = ctx.p1().node() if stat: diffordiffstat(self.ui, self.repo, diffopts, prev, node, match=matchfn, stat=True) if diff: if stat: self.ui.write("\n") diffordiffstat(self.ui, self.repo, diffopts, prev, node, match=matchfn, stat=False) self.ui.write("\n") class jsonchangeset(changeset_printer): '''format changeset information.''' def __init__(self, ui, repo, matchfn, diffopts, buffered): changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered) self.cache = {} self._first = True def close(self): if not self._first: self.ui.write("\n]\n") else: self.ui.write("[]\n") def _show(self, ctx, copies, matchfn, props): '''show a single changeset or file revision''' rev = ctx.rev() if rev is None: jrev = jnode = 'null' else: jrev = str(rev) jnode = '"%s"' % hex(ctx.node()) j = encoding.jsonescape if self._first: self.ui.write("[\n {") self._first = False else: self.ui.write(",\n {") if self.ui.quiet: self.ui.write('\n "rev": %s' % jrev) self.ui.write(',\n "node": %s' % jnode) self.ui.write('\n }') return self.ui.write('\n "rev": %s' % jrev) self.ui.write(',\n "node": %s' % jnode) self.ui.write(',\n "branch": "%s"' % j(ctx.branch())) self.ui.write(',\n "phase": "%s"' % ctx.phasestr()) self.ui.write(',\n "user": "%s"' % j(ctx.user())) self.ui.write(',\n "date": [%d, %d]' % ctx.date()) self.ui.write(',\n "desc": "%s"' % j(ctx.description())) self.ui.write(',\n "bookmarks": [%s]' % ", ".join('"%s"' % j(b) for b in ctx.bookmarks())) self.ui.write(',\n "tags": [%s]' % ", ".join('"%s"' % j(t) for t in ctx.tags())) self.ui.write(',\n "parents": [%s]' % ", ".join('"%s"' % c.hex() for c in ctx.parents())) if self.ui.debugflag: if rev is None: jmanifestnode = 'null' else: jmanifestnode = '"%s"' % hex(ctx.manifestnode()) self.ui.write(',\n "manifest": %s' % jmanifestnode) self.ui.write(',\n "extra": {%s}' % ", ".join('"%s": "%s"' % (j(k), j(v)) for k, v in ctx.extra().items())) files = ctx.p1().status(ctx) self.ui.write(',\n "modified": [%s]' % ", ".join('"%s"' % j(f) for f in files[0])) self.ui.write(',\n "added": [%s]' % ", ".join('"%s"' % j(f) for f in files[1])) self.ui.write(',\n "removed": [%s]' % ", ".join('"%s"' % j(f) for f in files[2])) elif self.ui.verbose: self.ui.write(',\n "files": [%s]' % ", ".join('"%s"' % j(f) for f in ctx.files())) if copies: self.ui.write(',\n "copies": {%s}' % ", ".join('"%s": "%s"' % (j(k), j(v)) for k, v in copies)) matchfn = self.matchfn if matchfn: stat = self.diffopts.get('stat') diff = self.diffopts.get('patch') diffopts = patch.difffeatureopts(self.ui, self.diffopts, git=True) node, prev = ctx.node(), ctx.p1().node() if stat: self.ui.pushbuffer() diffordiffstat(self.ui, self.repo, diffopts, prev, node, match=matchfn, stat=True) self.ui.write(',\n "diffstat": "%s"' % j(self.ui.popbuffer())) if diff: self.ui.pushbuffer() diffordiffstat(self.ui, self.repo, diffopts, prev, node, match=matchfn, stat=False) self.ui.write(',\n "diff": "%s"' % j(self.ui.popbuffer())) self.ui.write("\n }") class changeset_templater(changeset_printer): '''format changeset information.''' def __init__(self, ui, repo, matchfn, diffopts, tmpl, mapfile, buffered): changeset_printer.__init__(self, ui, repo, matchfn, diffopts, buffered) formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12]) defaulttempl = { 'parent': '{rev}:{node|formatnode} ', 'manifest': '{rev}:{node|formatnode}', 'file_copy': '{name} ({source})', 'extra': '{key}={value|stringescape}' } # filecopy is preserved for compatibility reasons defaulttempl['filecopy'] = defaulttempl['file_copy'] self.t = templater.templater(mapfile, {'formatnode': formatnode}, cache=defaulttempl) if tmpl: self.t.cache['changeset'] = tmpl self.cache = {} # find correct templates for current mode tmplmodes = [ (True, None), (self.ui.verbose, 'verbose'), (self.ui.quiet, 'quiet'), (self.ui.debugflag, 'debug'), ] self._parts = {'header': '', 'footer': '', 'changeset': 'changeset', 'docheader': '', 'docfooter': ''} for mode, postfix in tmplmodes: for t in self._parts: cur = t if postfix: cur += "_" + postfix if mode and cur in self.t: self._parts[t] = cur if self._parts['docheader']: self.ui.write(templater.stringify(self.t(self._parts['docheader']))) def close(self): if self._parts['docfooter']: if not self.footer: self.footer = "" self.footer += templater.stringify(self.t(self._parts['docfooter'])) return super(changeset_templater, self).close() def _show(self, ctx, copies, matchfn, props): '''show a single changeset or file revision''' props = props.copy() props.update(templatekw.keywords) props['templ'] = self.t props['ctx'] = ctx props['repo'] = self.repo props['revcache'] = {'copies': copies} props['cache'] = self.cache try: # write header if self._parts['header']: h = templater.stringify(self.t(self._parts['header'], **props)) if self.buffered: self.header[ctx.rev()] = h else: if self.lastheader != h: self.lastheader = h self.ui.write(h) # write changeset metadata, then patch if requested key = self._parts['changeset'] self.ui.write(templater.stringify(self.t(key, **props))) self.showpatch(ctx, matchfn) if self._parts['footer']: if not self.footer: self.footer = templater.stringify( self.t(self._parts['footer'], **props)) except KeyError as inst: msg = _("%s: no key named '%s'") raise error.Abort(msg % (self.t.mapfile, inst.args[0])) except SyntaxError as inst: raise error.Abort('%s: %s' % (self.t.mapfile, inst.args[0])) def gettemplate(ui, tmpl, style): """ Find the template matching the given template spec or style. """ # ui settings if not tmpl and not style: # template are stronger than style tmpl = ui.config('ui', 'logtemplate') if tmpl: try: tmpl = templater.unquotestring(tmpl) except SyntaxError: pass return tmpl, None else: style = util.expandpath(ui.config('ui', 'style', '')) if not tmpl and style: mapfile = style if not os.path.split(mapfile)[0]: mapname = (templater.templatepath('map-cmdline.' + mapfile) or templater.templatepath(mapfile)) if mapname: mapfile = mapname return None, mapfile if not tmpl: return None, None return formatter.lookuptemplate(ui, 'changeset', tmpl) def show_changeset(ui, repo, opts, buffered=False): """show one changeset using template or regular display. Display format will be the first non-empty hit of: 1. option 'template' 2. option 'style' 3. [ui] setting 'logtemplate' 4. [ui] setting 'style' If all of these values are either the unset or the empty string, regular display via changeset_printer() is done. """ # options matchfn = None if opts.get('patch') or opts.get('stat'): matchfn = scmutil.matchall(repo) if opts.get('template') == 'json': return jsonchangeset(ui, repo, matchfn, opts, buffered) tmpl, mapfile = gettemplate(ui, opts.get('template'), opts.get('style')) if not tmpl and not mapfile: return changeset_printer(ui, repo, matchfn, opts, buffered) try: t = changeset_templater(ui, repo, matchfn, opts, tmpl, mapfile, buffered) except SyntaxError as inst: raise error.Abort(inst.args[0]) return t def showmarker(ui, marker): """utility function to display obsolescence marker in a readable way To be used by debug function.""" ui.write(hex(marker.precnode())) for repl in marker.succnodes(): ui.write(' ') ui.write(hex(repl)) ui.write(' %X ' % marker.flags()) parents = marker.parentnodes() if parents is not None: ui.write('{%s} ' % ', '.join(hex(p) for p in parents)) ui.write('(%s) ' % util.datestr(marker.date())) ui.write('{%s}' % (', '.join('%r: %r' % t for t in sorted(marker.metadata().items()) if t[0] != 'date'))) ui.write('\n') def finddate(ui, repo, date): """Find the tipmost changeset that matches the given date spec""" df = util.matchdate(date) m = scmutil.matchall(repo) results = {} def prep(ctx, fns): d = ctx.date() if df(d[0]): results[ctx.rev()] = d for ctx in walkchangerevs(repo, m, {'rev': None}, prep): rev = ctx.rev() if rev in results: ui.status(_("found revision %s from %s\n") % (rev, util.datestr(results[rev]))) return str(rev) raise error.Abort(_("revision matching date not found")) def increasingwindows(windowsize=8, sizelimit=512): while True: yield windowsize if windowsize < sizelimit: windowsize *= 2 class FileWalkError(Exception): pass def walkfilerevs(repo, match, follow, revs, fncache): '''Walks the file history for the matched files. Returns the changeset revs that are involved in the file history. Throws FileWalkError if the file history can't be walked using filelogs alone. ''' wanted = set() copies = [] minrev, maxrev = min(revs), max(revs) def filerevgen(filelog, last): """ Only files, no patterns. Check the history of each file. Examines filelog entries within minrev, maxrev linkrev range Returns an iterator yielding (linkrev, parentlinkrevs, copied) tuples in backwards order """ cl_count = len(repo) revs = [] for j in xrange(0, last + 1): linkrev = filelog.linkrev(j) if linkrev < minrev: continue # only yield rev for which we have the changelog, it can # happen while doing "hg log" during a pull or commit if linkrev >= cl_count: break parentlinkrevs = [] for p in filelog.parentrevs(j): if p != nullrev: parentlinkrevs.append(filelog.linkrev(p)) n = filelog.node(j) revs.append((linkrev, parentlinkrevs, follow and filelog.renamed(n))) return reversed(revs) def iterfiles(): pctx = repo['.'] for filename in match.files(): if follow: if filename not in pctx: raise error.Abort(_('cannot follow file not in parent ' 'revision: "%s"') % filename) yield filename, pctx[filename].filenode() else: yield filename, None for filename_node in copies: yield filename_node for file_, node in iterfiles(): filelog = repo.file(file_) if not len(filelog): if node is None: # A zero count may be a directory or deleted file, so # try to find matching entries on the slow path. if follow: raise error.Abort( _('cannot follow nonexistent file: "%s"') % file_) raise FileWalkError("Cannot walk via filelog") else: continue if node is None: last = len(filelog) - 1 else: last = filelog.rev(node) # keep track of all ancestors of the file ancestors = set([filelog.linkrev(last)]) # iterate from latest to oldest revision for rev, flparentlinkrevs, copied in filerevgen(filelog, last): if not follow: if rev > maxrev: continue else: # Note that last might not be the first interesting # rev to us: # if the file has been changed after maxrev, we'll # have linkrev(last) > maxrev, and we still need # to explore the file graph if rev not in ancestors: continue # XXX insert 1327 fix here if flparentlinkrevs: ancestors.update(flparentlinkrevs) fncache.setdefault(rev, []).append(file_) wanted.add(rev) if copied: copies.append(copied) return wanted class _followfilter(object): def __init__(self, repo, onlyfirst=False): self.repo = repo self.startrev = nullrev self.roots = set() self.onlyfirst = onlyfirst def match(self, rev): def realparents(rev): if self.onlyfirst: return self.repo.changelog.parentrevs(rev)[0:1] else: return filter(lambda x: x != nullrev, self.repo.changelog.parentrevs(rev)) if self.startrev == nullrev: self.startrev = rev return True if rev > self.startrev: # forward: all descendants if not self.roots: self.roots.add(self.startrev) for parent in realparents(rev): if parent in self.roots: self.roots.add(rev) return True else: # backwards: all parents if not self.roots: self.roots.update(realparents(self.startrev)) if rev in self.roots: self.roots.remove(rev) self.roots.update(realparents(rev)) return True return False def walkchangerevs(repo, match, opts, prepare): '''Iterate over files and the revs in which they changed. Callers most commonly need to iterate backwards over the history in which they are interested. Doing so has awful (quadratic-looking) performance, so we use iterators in a "windowed" way. We walk a window of revisions in the desired order. Within the window, we first walk forwards to gather data, then in the desired order (usually backwards) to display it. This function returns an iterator yielding contexts. Before yielding each context, the iterator will first call the prepare function on each context in the window in forward order.''' follow = opts.get('follow') or opts.get('follow_first') revs = _logrevs(repo, opts) if not revs: return [] wanted = set() slowpath = match.anypats() or ((match.isexact() or match.prefix()) and opts.get('removed')) fncache = {} change = repo.changectx # First step is to fill wanted, the set of revisions that we want to yield. # When it does not induce extra cost, we also fill fncache for revisions in # wanted: a cache of filenames that were changed (ctx.files()) and that # match the file filtering conditions. if match.always(): # No files, no patterns. Display all revs. wanted = revs elif not slowpath: # We only have to read through the filelog to find wanted revisions try: wanted = walkfilerevs(repo, match, follow, revs, fncache) except FileWalkError: slowpath = True # We decided to fall back to the slowpath because at least one # of the paths was not a file. Check to see if at least one of them # existed in history, otherwise simply return for path in match.files(): if path == '.' or path in repo.store: break else: return [] if slowpath: # We have to read the changelog to match filenames against # changed files if follow: raise error.Abort(_('can only follow copies/renames for explicit ' 'filenames')) # The slow path checks files modified in every changeset. # This is really slow on large repos, so compute the set lazily. class lazywantedset(object): def __init__(self): self.set = set() self.revs = set(revs) # No need to worry about locality here because it will be accessed # in the same order as the increasing window below. def __contains__(self, value): if value in self.set: return True elif not value in self.revs: return False else: self.revs.discard(value) ctx = change(value) matches = filter(match, ctx.files()) if matches: fncache[value] = matches self.set.add(value) return True return False def discard(self, value): self.revs.discard(value) self.set.discard(value) wanted = lazywantedset() # it might be worthwhile to do this in the iterator if the rev range # is descending and the prune args are all within that range for rev in opts.get('prune', ()): rev = repo[rev].rev() ff = _followfilter(repo) stop = min(revs[0], revs[-1]) for x in xrange(rev, stop - 1, -1): if ff.match(x): wanted = wanted - [x] # Now that wanted is correctly initialized, we can iterate over the # revision range, yielding only revisions in wanted. def iterate(): if follow and match.always(): ff = _followfilter(repo, onlyfirst=opts.get('follow_first')) def want(rev): return ff.match(rev) and rev in wanted else: def want(rev): return rev in wanted it = iter(revs) stopiteration = False for windowsize in increasingwindows(): nrevs = [] for i in xrange(windowsize): rev = next(it, None) if rev is None: stopiteration = True break elif want(rev): nrevs.append(rev) for rev in sorted(nrevs): fns = fncache.get(rev) ctx = change(rev) if not fns: def fns_generator(): for f in ctx.files(): if match(f): yield f fns = fns_generator() prepare(ctx, fns) for rev in nrevs: yield change(rev) if stopiteration: break return iterate() def _makefollowlogfilematcher(repo, files, followfirst): # When displaying a revision with --patch --follow FILE, we have # to know which file of the revision must be diffed. With # --follow, we want the names of the ancestors of FILE in the # revision, stored in "fcache". "fcache" is populated by # reproducing the graph traversal already done by --follow revset # and relating linkrevs to file names (which is not "correct" but # good enough). fcache = {} fcacheready = [False] pctx = repo['.'] def populate(): for fn in files: for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)): for c in i: fcache.setdefault(c.linkrev(), set()).add(c.path()) def filematcher(rev): if not fcacheready[0]: # Lazy initialization fcacheready[0] = True populate() return scmutil.matchfiles(repo, fcache.get(rev, [])) return filematcher def _makenofollowlogfilematcher(repo, pats, opts): '''hook for extensions to override the filematcher for non-follow cases''' return None def _makelogrevset(repo, pats, opts, revs): """Return (expr, filematcher) where expr is a revset string built from log options and file patterns or None. If --stat or --patch are not passed filematcher is None. Otherwise it is a callable taking a revision number and returning a match objects filtering the files to be detailed when displaying the revision. """ opt2revset = { 'no_merges': ('not merge()', None), 'only_merges': ('merge()', None), '_ancestors': ('ancestors(%(val)s)', None), '_fancestors': ('_firstancestors(%(val)s)', None), '_descendants': ('descendants(%(val)s)', None), '_fdescendants': ('_firstdescendants(%(val)s)', None), '_matchfiles': ('_matchfiles(%(val)s)', None), 'date': ('date(%(val)r)', None), 'branch': ('branch(%(val)r)', ' or '), '_patslog': ('filelog(%(val)r)', ' or '), '_patsfollow': ('follow(%(val)r)', ' or '), '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '), 'keyword': ('keyword(%(val)r)', ' or '), 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '), 'user': ('user(%(val)r)', ' or '), } opts = dict(opts) # follow or not follow? follow = opts.get('follow') or opts.get('follow_first') if opts.get('follow_first'): followfirst = 1 else: followfirst = 0 # --follow with FILE behavior depends on revs... it = iter(revs) startrev = it.next() followdescendants = startrev < next(it, startrev) # branch and only_branch are really aliases and must be handled at # the same time opts['branch'] = opts.get('branch', []) + opts.get('only_branch', []) opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']] # pats/include/exclude are passed to match.match() directly in # _matchfiles() revset but walkchangerevs() builds its matcher with # scmutil.match(). The difference is input pats are globbed on # platforms without shell expansion (windows). wctx = repo[None] match, pats = scmutil.matchandpats(wctx, pats, opts) slowpath = match.anypats() or ((match.isexact() or match.prefix()) and opts.get('removed')) if not slowpath: for f in match.files(): if follow and f not in wctx: # If the file exists, it may be a directory, so let it # take the slow path. if os.path.exists(repo.wjoin(f)): slowpath = True continue else: raise error.Abort(_('cannot follow file not in parent ' 'revision: "%s"') % f) filelog = repo.file(f) if not filelog: # A zero count may be a directory or deleted file, so # try to find matching entries on the slow path. if follow: raise error.Abort( _('cannot follow nonexistent file: "%s"') % f) slowpath = True # We decided to fall back to the slowpath because at least one # of the paths was not a file. Check to see if at least one of them # existed in history - in that case, we'll continue down the # slowpath; otherwise, we can turn off the slowpath if slowpath: for path in match.files(): if path == '.' or path in repo.store: break else: slowpath = False fpats = ('_patsfollow', '_patsfollowfirst') fnopats = (('_ancestors', '_fancestors'), ('_descendants', '_fdescendants')) if slowpath: # See walkchangerevs() slow path. # # pats/include/exclude cannot be represented as separate # revset expressions as their filtering logic applies at file # level. For instance "-I a -X a" matches a revision touching # "a" and "b" while "file(a) and not file(b)" does # not. Besides, filesets are evaluated against the working # directory. matchargs = ['r:', 'd:relpath'] for p in pats: matchargs.append('p:' + p) for p in opts.get('include', []): matchargs.append('i:' + p) for p in opts.get('exclude', []): matchargs.append('x:' + p) matchargs = ','.join(('%r' % p) for p in matchargs) opts['_matchfiles'] = matchargs if follow: opts[fnopats[0][followfirst]] = '.' else: if follow: if pats: # follow() revset interprets its file argument as a # manifest entry, so use match.files(), not pats. opts[fpats[followfirst]] = list(match.files()) else: op = fnopats[followdescendants][followfirst] opts[op] = 'rev(%d)' % startrev else: opts['_patslog'] = list(pats) filematcher = None if opts.get('patch') or opts.get('stat'): # When following files, track renames via a special matcher. # If we're forced to take the slowpath it means we're following # at least one pattern/directory, so don't bother with rename tracking. if follow and not match.always() and not slowpath: # _makefollowlogfilematcher expects its files argument to be # relative to the repo root, so use match.files(), not pats. filematcher = _makefollowlogfilematcher(repo, match.files(), followfirst) else: filematcher = _makenofollowlogfilematcher(repo, pats, opts) if filematcher is None: filematcher = lambda rev: match expr = [] for op, val in sorted(opts.iteritems()): if not val: continue if op not in opt2revset: continue revop, andor = opt2revset[op] if '%(val)' not in revop: expr.append(revop) else: if not isinstance(val, list): e = revop % {'val': val} else: e = '(' + andor.join((revop % {'val': v}) for v in val) + ')' expr.append(e) if expr: expr = '(' + ' and '.join(expr) + ')' else: expr = None return expr, filematcher def _logrevs(repo, opts): # Default --rev value depends on --follow but --follow behavior # depends on revisions resolved from --rev... follow = opts.get('follow') or opts.get('follow_first') if opts.get('rev'): revs = scmutil.revrange(repo, opts['rev']) elif follow and repo.dirstate.p1() == nullid: revs = revset.baseset() elif follow: revs = repo.revs('reverse(:.)') else: revs = revset.spanset(repo) revs.reverse() return revs def getgraphlogrevs(repo, pats, opts): """Return (revs, expr, filematcher) where revs is an iterable of revision numbers, expr is a revset string built from log options and file patterns or None, and used to filter 'revs'. If --stat or --patch are not passed filematcher is None. Otherwise it is a callable taking a revision number and returning a match objects filtering the files to be detailed when displaying the revision. """ limit = loglimit(opts) revs = _logrevs(repo, opts) if not revs: return revset.baseset(), None, None expr, filematcher = _makelogrevset(repo, pats, opts, revs) if opts.get('rev'): # User-specified revs might be unsorted, but don't sort before # _makelogrevset because it might depend on the order of revs revs.sort(reverse=True) if expr: # Revset matchers often operate faster on revisions in changelog # order, because most filters deal with the changelog. revs.reverse() matcher = revset.match(repo.ui, expr) # Revset matches can reorder revisions. "A or B" typically returns # returns the revision matching A then the revision matching B. Sort # again to fix that. revs = matcher(repo, revs) revs.sort(reverse=True) if limit is not None: limitedrevs = [] for idx, rev in enumerate(revs): if idx >= limit: break limitedrevs.append(rev) revs = revset.baseset(limitedrevs) return revs, expr, filematcher def getlogrevs(repo, pats, opts): """Return (revs, expr, filematcher) where revs is an iterable of revision numbers, expr is a revset string built from log options and file patterns or None, and used to filter 'revs'. If --stat or --patch are not passed filematcher is None. Otherwise it is a callable taking a revision number and returning a match objects filtering the files to be detailed when displaying the revision. """ limit = loglimit(opts) revs = _logrevs(repo, opts) if not revs: return revset.baseset([]), None, None expr, filematcher = _makelogrevset(repo, pats, opts, revs) if expr: # Revset matchers often operate faster on revisions in changelog # order, because most filters deal with the changelog. if not opts.get('rev'): revs.reverse() matcher = revset.match(repo.ui, expr) # Revset matches can reorder revisions. "A or B" typically returns # returns the revision matching A then the revision matching B. Sort # again to fix that. fixopts = ['branch', 'only_branch', 'keyword', 'user'] oldrevs = revs revs = matcher(repo, revs) if not opts.get('rev'): revs.sort(reverse=True) elif len(pats) > 1 or any(len(opts.get(op, [])) > 1 for op in fixopts): # XXX "A or B" is known to change the order; fix it by filtering # matched set again (issue5100) revs = oldrevs & revs if limit is not None: limitedrevs = [] for idx, r in enumerate(revs): if limit <= idx: break limitedrevs.append(r) revs = revset.baseset(limitedrevs) return revs, expr, filematcher def _graphnodeformatter(ui, displayer): spec = ui.config('ui', 'graphnodetemplate') if not spec: return templatekw.showgraphnode # fast path for "{graphnode}" templ = formatter.gettemplater(ui, 'graphnode', spec) cache = {} if isinstance(displayer, changeset_templater): cache = displayer.cache # reuse cache of slow templates props = templatekw.keywords.copy() props['templ'] = templ props['cache'] = cache def formatnode(repo, ctx): props['ctx'] = ctx props['repo'] = repo props['revcache'] = {} return templater.stringify(templ('graphnode', **props)) return formatnode def displaygraph(ui, repo, dag, displayer, edgefn, getrenamed=None, filematcher=None): formatnode = _graphnodeformatter(ui, displayer) seen, state = [], graphmod.asciistate() for rev, type, ctx, parents in dag: char = formatnode(repo, ctx) copies = None if getrenamed and ctx.rev(): copies = [] for fn in ctx.files(): rename = getrenamed(fn, ctx.rev()) if rename: copies.append((fn, rename[0])) revmatchfn = None if filematcher is not None: revmatchfn = filematcher(ctx.rev()) displayer.show(ctx, copies=copies, matchfn=revmatchfn) lines = displayer.hunk.pop(rev).split('\n') if not lines[-1]: del lines[-1] displayer.flush(ctx) edges = edgefn(type, char, lines, seen, rev, parents) for type, char, lines, coldata in edges: graphmod.ascii(ui, state, type, char, lines, coldata) displayer.close() def graphlog(ui, repo, *pats, **opts): # Parameters are identical to log command ones revs, expr, filematcher = getgraphlogrevs(repo, pats, opts) revdag = graphmod.dagwalker(repo, revs) getrenamed = None if opts.get('copies'): endrev = None if opts.get('rev'): endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev) displayer = show_changeset(ui, repo, opts, buffered=True) displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges, getrenamed, filematcher) def checkunsupportedgraphflags(pats, opts): for op in ["newest_first"]: if op in opts and opts[op]: raise error.Abort(_("-G/--graph option is incompatible with --%s") % op.replace("_", "-")) def graphrevs(repo, nodes, opts): limit = loglimit(opts) nodes.reverse() if limit is not None: nodes = nodes[:limit] return graphmod.nodes(repo, nodes) def add(ui, repo, match, prefix, explicitonly, **opts): join = lambda f: os.path.join(prefix, f) bad = [] badfn = lambda x, y: bad.append(x) or match.bad(x, y) names = [] wctx = repo[None] cca = None abort, warn = scmutil.checkportabilityalert(ui) if abort or warn: cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate) badmatch = matchmod.badmatch(match, badfn) dirstate = repo.dirstate # We don't want to just call wctx.walk here, since it would return a lot of # clean files, which we aren't interested in and takes time. for f in sorted(dirstate.walk(badmatch, sorted(wctx.substate), True, False, full=False)): exact = match.exact(f) if exact or not explicitonly and f not in wctx and repo.wvfs.lexists(f): if cca: cca(f) names.append(f) if ui.verbose or not exact: ui.status(_('adding %s\n') % match.rel(f)) for subpath in sorted(wctx.substate): sub = wctx.sub(subpath) try: submatch = matchmod.narrowmatcher(subpath, match) if opts.get('subrepos'): bad.extend(sub.add(ui, submatch, prefix, False, **opts)) else: bad.extend(sub.add(ui, submatch, prefix, True, **opts)) except error.LookupError: ui.status(_("skipping missing subrepository: %s\n") % join(subpath)) if not opts.get('dry_run'): rejected = wctx.add(names, prefix) bad.extend(f for f in rejected if f in match.files()) return bad def forget(ui, repo, match, prefix, explicitonly): join = lambda f: os.path.join(prefix, f) bad = [] badfn = lambda x, y: bad.append(x) or match.bad(x, y) wctx = repo[None] forgot = [] s = repo.status(match=matchmod.badmatch(match, badfn), clean=True) forget = sorted(s[0] + s[1] + s[3] + s[6]) if explicitonly: forget = [f for f in forget if match.exact(f)] for subpath in sorted(wctx.substate): sub = wctx.sub(subpath) try: submatch = matchmod.narrowmatcher(subpath, match) subbad, subforgot = sub.forget(submatch, prefix) bad.extend([subpath + '/' + f for f in subbad]) forgot.extend([subpath + '/' + f for f in subforgot]) except error.LookupError: ui.status(_("skipping missing subrepository: %s\n") % join(subpath)) if not explicitonly: for f in match.files(): if f not in repo.dirstate and not repo.wvfs.isdir(f): if f not in forgot: if repo.wvfs.exists(f): # Don't complain if the exact case match wasn't given. # But don't do this until after checking 'forgot', so # that subrepo files aren't normalized, and this op is # purely from data cached by the status walk above. if repo.dirstate.normalize(f) in repo.dirstate: continue ui.warn(_('not removing %s: ' 'file is already untracked\n') % match.rel(f)) bad.append(f) for f in forget: if ui.verbose or not match.exact(f): ui.status(_('removing %s\n') % match.rel(f)) rejected = wctx.forget(forget, prefix) bad.extend(f for f in rejected if f in match.files()) forgot.extend(f for f in forget if f not in rejected) return bad, forgot def files(ui, ctx, m, fm, fmt, subrepos): rev = ctx.rev() ret = 1 ds = ctx.repo().dirstate for f in ctx.matches(m): if rev is None and ds[f] == 'r': continue fm.startitem() if ui.verbose: fc = ctx[f] fm.write('size flags', '% 10d % 1s ', fc.size(), fc.flags()) fm.data(abspath=f) fm.write('path', fmt, m.rel(f)) ret = 0 for subpath in sorted(ctx.substate): def matchessubrepo(subpath): return (m.exact(subpath) or any(f.startswith(subpath + '/') for f in m.files())) if subrepos or matchessubrepo(subpath): sub = ctx.sub(subpath) try: submatch = matchmod.narrowmatcher(subpath, m) recurse = m.exact(subpath) or subrepos if sub.printfiles(ui, submatch, fm, fmt, recurse) == 0: ret = 0 except error.LookupError: ui.status(_("skipping missing subrepository: %s\n") % m.abs(subpath)) return ret def remove(ui, repo, m, prefix, after, force, subrepos): join = lambda f: os.path.join(prefix, f) ret = 0 s = repo.status(match=m, clean=True) modified, added, deleted, clean = s[0], s[1], s[3], s[6] wctx = repo[None] for subpath in sorted(wctx.substate): def matchessubrepo(matcher, subpath): if matcher.exact(subpath): return True for f in matcher.files(): if f.startswith(subpath): return True return False if subrepos or matchessubrepo(m, subpath): sub = wctx.sub(subpath) try: submatch = matchmod.narrowmatcher(subpath, m) if sub.removefiles(submatch, prefix, after, force, subrepos): ret = 1 except error.LookupError: ui.status(_("skipping missing subrepository: %s\n") % join(subpath)) # warn about failure to delete explicit files/dirs deleteddirs = util.dirs(deleted) for f in m.files(): def insubrepo(): for subpath in wctx.substate: if f.startswith(subpath): return True return False isdir = f in deleteddirs or wctx.hasdir(f) if f in repo.dirstate or isdir or f == '.' or insubrepo(): continue if repo.wvfs.exists(f): if repo.wvfs.isdir(f): ui.warn(_('not removing %s: no tracked files\n') % m.rel(f)) else: ui.warn(_('not removing %s: file is untracked\n') % m.rel(f)) # missing files will generate a warning elsewhere ret = 1 if force: list = modified + deleted + clean + added elif after: list = deleted for f in modified + added + clean: ui.warn(_('not removing %s: file still exists\n') % m.rel(f)) ret = 1 else: list = deleted + clean for f in modified: ui.warn(_('not removing %s: file is modified (use -f' ' to force removal)\n') % m.rel(f)) ret = 1 for f in added: ui.warn(_('not removing %s: file has been marked for add' ' (use forget to undo)\n') % m.rel(f)) ret = 1 for f in sorted(list): if ui.verbose or not m.exact(f): ui.status(_('removing %s\n') % m.rel(f)) with repo.wlock(): if not after: for f in list: if f in added: continue # we never unlink added files on remove util.unlinkpath(repo.wjoin(f), ignoremissing=True) repo[None].forget(list) return ret def cat(ui, repo, ctx, matcher, prefix, **opts): err = 1 def write(path): fp = makefileobj(repo, opts.get('output'), ctx.node(), pathname=os.path.join(prefix, path)) data = ctx[path].data() if opts.get('decode'): data = repo.wwritedata(path, data) fp.write(data) fp.close() # Automation often uses hg cat on single files, so special case it # for performance to avoid the cost of parsing the manifest. if len(matcher.files()) == 1 and not matcher.anypats(): file = matcher.files()[0] mf = repo.manifest mfnode = ctx.manifestnode() if mfnode and mf.find(mfnode, file)[0]: write(file) return 0 # Don't warn about "missing" files that are really in subrepos def badfn(path, msg): for subpath in ctx.substate: if path.startswith(subpath): return matcher.bad(path, msg) for abs in ctx.walk(matchmod.badmatch(matcher, badfn)): write(abs) err = 0 for subpath in sorted(ctx.substate): sub = ctx.sub(subpath) try: submatch = matchmod.narrowmatcher(subpath, matcher) if not sub.cat(submatch, os.path.join(prefix, sub._path), **opts): err = 0 except error.RepoLookupError: ui.status(_("skipping missing subrepository: %s\n") % os.path.join(prefix, subpath)) return err def commit(ui, repo, commitfunc, pats, opts): '''commit the specified files or all outstanding changes''' date = opts.get('date') if date: opts['date'] = util.parsedate(date) message = logmessage(ui, opts) matcher = scmutil.match(repo[None], pats, opts) # extract addremove carefully -- this function can be called from a command # that doesn't support addremove if opts.get('addremove'): if scmutil.addremove(repo, matcher, "", opts) != 0: raise error.Abort( _("failed to mark all new/missing files as added/removed")) return commitfunc(ui, repo, message, matcher, opts) def amend(ui, repo, commitfunc, old, extra, pats, opts): # avoid cycle context -> subrepo -> cmdutil import context # amend will reuse the existing user if not specified, but the obsolete # marker creation requires that the current user's name is specified. if obsolete.isenabled(repo, obsolete.createmarkersopt): ui.username() # raise exception if username not set ui.note(_('amending changeset %s\n') % old) base = old.p1() createmarkers = obsolete.isenabled(repo, obsolete.createmarkersopt) wlock = lock = newid = None try: wlock = repo.wlock() lock = repo.lock() with repo.transaction('amend') as tr: # See if we got a message from -m or -l, if not, open the editor # with the message of the changeset to amend message = logmessage(ui, opts) # ensure logfile does not conflict with later enforcement of the # message. potential logfile content has been processed by # `logmessage` anyway. opts.pop('logfile') # First, do a regular commit to record all changes in the working # directory (if there are any) ui.callhooks = False activebookmark = repo._bookmarks.active try: repo._bookmarks.active = None opts['message'] = 'temporary amend commit for %s' % old node = commit(ui, repo, commitfunc, pats, opts) finally: repo._bookmarks.active = activebookmark repo._bookmarks.recordchange(tr) ui.callhooks = True ctx = repo[node] # Participating changesets: # # node/ctx o - new (intermediate) commit that contains changes # | from working dir to go into amending commit # | (or a workingctx if there were no changes) # | # old o - changeset to amend # | # base o - parent of amending changeset # Update extra dict from amended commit (e.g. to preserve graft # source) extra.update(old.extra()) # Also update it from the intermediate commit or from the wctx extra.update(ctx.extra()) if len(old.parents()) > 1: # ctx.files() isn't reliable for merges, so fall back to the # slower repo.status() method files = set([fn for st in repo.status(base, old)[:3] for fn in st]) else: files = set(old.files()) # Second, we use either the commit we just did, or if there were no # changes the parent of the working directory as the version of the # files in the final amend commit if node: ui.note(_('copying changeset %s to %s\n') % (ctx, base)) user = ctx.user() date = ctx.date() # Recompute copies (avoid recording a -> b -> a) copied = copies.pathcopies(base, ctx) if old.p2: copied.update(copies.pathcopies(old.p2(), ctx)) # Prune files which were reverted by the updates: if old # introduced file X and our intermediate commit, node, # renamed that file, then those two files are the same and # we can discard X from our list of files. Likewise if X # was deleted, it's no longer relevant files.update(ctx.files()) def samefile(f): if f in ctx.manifest(): a = ctx.filectx(f) if f in base.manifest(): b = base.filectx(f) return (not a.cmp(b) and a.flags() == b.flags()) else: return False else: return f not in base.manifest() files = [f for f in files if not samefile(f)] def filectxfn(repo, ctx_, path): try: fctx = ctx[path] flags = fctx.flags() mctx = context.memfilectx(repo, fctx.path(), fctx.data(), islink='l' in flags, isexec='x' in flags, copied=copied.get(path)) return mctx except KeyError: return None else: ui.note(_('copying changeset %s to %s\n') % (old, base)) # Use version of files as in the old cset def filectxfn(repo, ctx_, path): try: return old.filectx(path) except KeyError: return None user = opts.get('user') or old.user() date = opts.get('date') or old.date() editform = mergeeditform(old, 'commit.amend') editor = getcommiteditor(editform=editform, **opts) if not message: editor = getcommiteditor(edit=True, editform=editform) message = old.description() pureextra = extra.copy() extra['amend_source'] = old.hex() new = context.memctx(repo, parents=[base.node(), old.p2().node()], text=message, files=files, filectxfn=filectxfn, user=user, date=date, extra=extra, editor=editor) newdesc = changelog.stripdesc(new.description()) if ((not node) and newdesc == old.description() and user == old.user() and date == old.date() and pureextra == old.extra()): # nothing changed. continuing here would create a new node # anyway because of the amend_source noise. # # This not what we expect from amend. return old.node() ph = repo.ui.config('phases', 'new-commit', phases.draft) try: if opts.get('secret'): commitphase = 'secret' else: commitphase = old.phase() repo.ui.setconfig('phases', 'new-commit', commitphase, 'amend') newid = repo.commitctx(new) finally: repo.ui.setconfig('phases', 'new-commit', ph, 'amend') if newid != old.node(): # Reroute the working copy parent to the new changeset repo.setparents(newid, nullid) # Move bookmarks from old parent to amend commit bms = repo.nodebookmarks(old.node()) if bms: marks = repo._bookmarks for bm in bms: ui.debug('moving bookmarks %r from %s to %s\n' % (marks, old.hex(), hex(newid))) marks[bm] = newid marks.recordchange(tr) #commit the whole amend process if createmarkers: # mark the new changeset as successor of the rewritten one new = repo[newid] obs = [(old, (new,))] if node: obs.append((ctx, ())) obsolete.createmarkers(repo, obs) if not createmarkers and newid != old.node(): # Strip the intermediate commit (if there was one) and the amended # commit if node: ui.note(_('stripping intermediate changeset %s\n') % ctx) ui.note(_('stripping amended changeset %s\n') % old) repair.strip(ui, repo, old.node(), topic='amend-backup') finally: lockmod.release(lock, wlock) return newid def commiteditor(repo, ctx, subs, editform=''): if ctx.description(): return ctx.description() return commitforceeditor(repo, ctx, subs, editform=editform, unchangedmessagedetection=True) def commitforceeditor(repo, ctx, subs, finishdesc=None, extramsg=None, editform='', unchangedmessagedetection=False): if not extramsg: extramsg = _("Leave message empty to abort commit.") forms = [e for e in editform.split('.') if e] forms.insert(0, 'changeset') templatetext = None while forms: tmpl = repo.ui.config('committemplate', '.'.join(forms)) if tmpl: templatetext = committext = buildcommittemplate( repo, ctx, subs, extramsg, tmpl) break forms.pop() else: committext = buildcommittext(repo, ctx, subs, extramsg) # run editor in the repository root olddir = os.getcwd() os.chdir(repo.root) # make in-memory changes visible to external process tr = repo.currenttransaction() repo.dirstate.write(tr) pending = tr and tr.writepending() and repo.root editortext = repo.ui.edit(committext, ctx.user(), ctx.extra(), editform=editform, pending=pending) text = re.sub("(?m)^HG:.*(\n|$)", "", editortext) os.chdir(olddir) if finishdesc: text = finishdesc(text) if not text.strip(): raise error.Abort(_("empty commit message")) if unchangedmessagedetection and editortext == templatetext: raise error.Abort(_("commit message unchanged")) return text def buildcommittemplate(repo, ctx, subs, extramsg, tmpl): ui = repo.ui tmpl, mapfile = gettemplate(ui, tmpl, None) try: t = changeset_templater(ui, repo, None, {}, tmpl, mapfile, False) except SyntaxError as inst: raise error.Abort(inst.args[0]) for k, v in repo.ui.configitems('committemplate'): if k != 'changeset': t.t.cache[k] = v if not extramsg: extramsg = '' # ensure that extramsg is string ui.pushbuffer() t.show(ctx, extramsg=extramsg) return ui.popbuffer() def hgprefix(msg): return "\n".join(["HG: %s" % a for a in msg.split("\n") if a]) def buildcommittext(repo, ctx, subs, extramsg): edittext = [] modified, added, removed = ctx.modified(), ctx.added(), ctx.removed() if ctx.description(): edittext.append(ctx.description()) edittext.append("") edittext.append("") # Empty line between message and comments. edittext.append(hgprefix(_("Enter commit message." " Lines beginning with 'HG:' are removed."))) edittext.append(hgprefix(extramsg)) edittext.append("HG: --") edittext.append(hgprefix(_("user: %s") % ctx.user())) if ctx.p2(): edittext.append(hgprefix(_("branch merge"))) if ctx.branch(): edittext.append(hgprefix(_("branch '%s'") % ctx.branch())) if bookmarks.isactivewdirparent(repo): edittext.append(hgprefix(_("bookmark '%s'") % repo._activebookmark)) edittext.extend([hgprefix(_("subrepo %s") % s) for s in subs]) edittext.extend([hgprefix(_("added %s") % f) for f in added]) edittext.extend([hgprefix(_("changed %s") % f) for f in modified]) edittext.extend([hgprefix(_("removed %s") % f) for f in removed]) if not added and not modified and not removed: edittext.append(hgprefix(_("no files changed"))) edittext.append("") return "\n".join(edittext) def commitstatus(repo, node, branch, bheads=None, opts=None): if opts is None: opts = {} ctx = repo[node] parents = ctx.parents() if (not opts.get('amend') and bheads and node not in bheads and not [x for x in parents if x.node() in bheads and x.branch() == branch]): repo.ui.status(_('created new head\n')) # The message is not printed for initial roots. For the other # changesets, it is printed in the following situations: # # Par column: for the 2 parents with ... # N: null or no parent # B: parent is on another named branch # C: parent is a regular non head changeset # H: parent was a branch head of the current branch # Msg column: whether we print "created new head" message # In the following, it is assumed that there already exists some # initial branch heads of the current branch, otherwise nothing is # printed anyway. # # Par Msg Comment # N N y additional topo root # # B N y additional branch root # C N y additional topo head # H N n usual case # # B B y weird additional branch root # C B y branch merge # H B n merge with named branch # # C C y additional head from merge # C H n merge with a head # # H H n head merge: head count decreases if not opts.get('close_branch'): for r in parents: if r.closesbranch() and r.branch() == branch: repo.ui.status(_('reopening closed branch head %d\n') % r) if repo.ui.debugflag: repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex())) elif repo.ui.verbose: repo.ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx)) def postcommitstatus(repo, pats, opts): return repo.status(match=scmutil.match(repo[None], pats, opts)) def revert(ui, repo, ctx, parents, *pats, **opts): parent, p2 = parents node = ctx.node() mf = ctx.manifest() if node == p2: parent = p2 # need all matching names in dirstate and manifest of target rev, # so have to walk both. do not print errors if files exist in one # but not other. in both cases, filesets should be evaluated against # workingctx to get consistent result (issue4497). this means 'set:**' # cannot be used to select missing files from target rev. # `names` is a mapping for all elements in working copy and target revision # The mapping is in the form: # -> (, ) names = {} with repo.wlock(): ## filling of the `names` mapping # walk dirstate to fill `names` interactive = opts.get('interactive', False) wctx = repo[None] m = scmutil.match(wctx, pats, opts) # we'll need this later targetsubs = sorted(s for s in wctx.substate if m(s)) if not m.always(): for abs in repo.walk(matchmod.badmatch(m, lambda x, y: False)): names[abs] = m.rel(abs), m.exact(abs) # walk target manifest to fill `names` def badfn(path, msg): if path in names: return if path in ctx.substate: return path_ = path + '/' for f in names: if f.startswith(path_): return ui.warn("%s: %s\n" % (m.rel(path), msg)) for abs in ctx.walk(matchmod.badmatch(m, badfn)): if abs not in names: names[abs] = m.rel(abs), m.exact(abs) # Find status of all file in `names`. m = scmutil.matchfiles(repo, names) changes = repo.status(node1=node, match=m, unknown=True, ignored=True, clean=True) else: changes = repo.status(node1=node, match=m) for kind in changes: for abs in kind: names[abs] = m.rel(abs), m.exact(abs) m = scmutil.matchfiles(repo, names) modified = set(changes.modified) added = set(changes.added) removed = set(changes.removed) _deleted = set(changes.deleted) unknown = set(changes.unknown) unknown.update(changes.ignored) clean = set(changes.clean) modadded = set() # split between files known in target manifest and the others smf = set(mf) # determine the exact nature of the deleted changesets deladded = _deleted - smf deleted = _deleted - deladded # We need to account for the state of the file in the dirstate, # even when we revert against something else than parent. This will # slightly alter the behavior of revert (doing back up or not, delete # or just forget etc). if parent == node: dsmodified = modified dsadded = added dsremoved = removed # store all local modifications, useful later for rename detection localchanges = dsmodified | dsadded modified, added, removed = set(), set(), set() else: changes = repo.status(node1=parent, match=m) dsmodified = set(changes.modified) dsadded = set(changes.added) dsremoved = set(changes.removed) # store all local modifications, useful later for rename detection localchanges = dsmodified | dsadded # only take into account for removes between wc and target clean |= dsremoved - removed dsremoved &= removed # distinct between dirstate remove and other removed -= dsremoved modadded = added & dsmodified added -= modadded # tell newly modified apart. dsmodified &= modified dsmodified |= modified & dsadded # dirstate added may needs backup modified -= dsmodified # We need to wait for some post-processing to update this set # before making the distinction. The dirstate will be used for # that purpose. dsadded = added # in case of merge, files that are actually added can be reported as # modified, we need to post process the result if p2 != nullid: mergeadd = dsmodified - smf dsadded |= mergeadd dsmodified -= mergeadd # if f is a rename, update `names` to also revert the source cwd = repo.getcwd() for f in localchanges: src = repo.dirstate.copied(f) # XXX should we check for rename down to target node? if src and src not in names and repo.dirstate[src] == 'r': dsremoved.add(src) names[src] = (repo.pathto(src, cwd), True) # distinguish between file to forget and the other added = set() for abs in dsadded: if repo.dirstate[abs] != 'a': added.add(abs) dsadded -= added for abs in deladded: if repo.dirstate[abs] == 'a': dsadded.add(abs) deladded -= dsadded # For files marked as removed, we check if an unknown file is present at # the same path. If a such file exists it may need to be backed up. # Making the distinction at this stage helps have simpler backup # logic. removunk = set() for abs in removed: target = repo.wjoin(abs) if os.path.lexists(target): removunk.add(abs) removed -= removunk dsremovunk = set() for abs in dsremoved: target = repo.wjoin(abs) if os.path.lexists(target): dsremovunk.add(abs) dsremoved -= dsremovunk # action to be actually performed by revert # (, message>) tuple actions = {'revert': ([], _('reverting %s\n')), 'add': ([], _('adding %s\n')), 'remove': ([], _('removing %s\n')), 'drop': ([], _('removing %s\n')), 'forget': ([], _('forgetting %s\n')), 'undelete': ([], _('undeleting %s\n')), 'noop': (None, _('no changes needed to %s\n')), 'unknown': (None, _('file not managed: %s\n')), } # "constant" that convey the backup strategy. # All set to `discard` if `no-backup` is set do avoid checking # no_backup lower in the code. # These values are ordered for comparison purposes backup = 2 # unconditionally do backup check = 1 # check if the existing file differs from target discard = 0 # never do backup if opts.get('no_backup'): backup = check = discard backupanddel = actions['remove'] if not opts.get('no_backup'): backupanddel = actions['drop'] disptable = ( # dispatch table: # file state # action # make backup ## Sets that results that will change file on disk # Modified compared to target, no local change (modified, actions['revert'], discard), # Modified compared to target, but local file is deleted (deleted, actions['revert'], discard), # Modified compared to target, local change (dsmodified, actions['revert'], backup), # Added since target (added, actions['remove'], discard), # Added in working directory (dsadded, actions['forget'], discard), # Added since target, have local modification (modadded, backupanddel, backup), # Added since target but file is missing in working directory (deladded, actions['drop'], discard), # Removed since target, before working copy parent (removed, actions['add'], discard), # Same as `removed` but an unknown file exists at the same path (removunk, actions['add'], check), # Removed since targe, marked as such in working copy parent (dsremoved, actions['undelete'], discard), # Same as `dsremoved` but an unknown file exists at the same path (dsremovunk, actions['undelete'], check), ## the following sets does not result in any file changes # File with no modification (clean, actions['noop'], discard), # Existing file, not tracked anywhere (unknown, actions['unknown'], discard), ) for abs, (rel, exact) in sorted(names.items()): # target file to be touch on disk (relative to cwd) target = repo.wjoin(abs) # search the entry in the dispatch table. # if the file is in any of these sets, it was touched in the working # directory parent and we are sure it needs to be reverted. for table, (xlist, msg), dobackup in disptable: if abs not in table: continue if xlist is not None: xlist.append(abs) if dobackup and (backup <= dobackup or wctx[abs].cmp(ctx[abs])): bakname = scmutil.origpath(ui, repo, rel) ui.note(_('saving current version of %s as %s\n') % (rel, bakname)) if not opts.get('dry_run'): if interactive: util.copyfile(target, bakname) else: util.rename(target, bakname) if ui.verbose or not exact: if not isinstance(msg, basestring): msg = msg(abs) ui.status(msg % rel) elif exact: ui.warn(msg % rel) break if not opts.get('dry_run'): needdata = ('revert', 'add', 'undelete') _revertprefetch(repo, ctx, *[actions[name][0] for name in needdata]) _performrevert(repo, parents, ctx, actions, interactive) if targetsubs: # Revert the subrepos on the revert list for sub in targetsubs: try: wctx.sub(sub).revert(ctx.substate[sub], *pats, **opts) except KeyError: raise error.Abort("subrepository '%s' does not exist in %s!" % (sub, short(ctx.node()))) def _revertprefetch(repo, ctx, *files): """Let extension changing the storage layer prefetch content""" pass def _performrevert(repo, parents, ctx, actions, interactive=False): """function that actually perform all the actions computed for revert This is an independent function to let extension to plug in and react to the imminent revert. Make sure you have the working directory locked when calling this function. """ parent, p2 = parents node = ctx.node() def checkout(f): fc = ctx[f] repo.wwrite(f, fc.data(), fc.flags()) audit_path = pathutil.pathauditor(repo.root) for f in actions['forget'][0]: repo.dirstate.drop(f) for f in actions['remove'][0]: audit_path(f) try: util.unlinkpath(repo.wjoin(f)) except OSError: pass repo.dirstate.remove(f) for f in actions['drop'][0]: audit_path(f) repo.dirstate.remove(f) normal = None if node == parent: # We're reverting to our parent. If possible, we'd like status # to report the file as clean. We have to use normallookup for # merges to avoid losing information about merged/dirty files. if p2 != nullid: normal = repo.dirstate.normallookup else: normal = repo.dirstate.normal newlyaddedandmodifiedfiles = set() if interactive: # Prompt the user for changes to revert torevert = [repo.wjoin(f) for f in actions['revert'][0]] m = scmutil.match(ctx, torevert, {}) diffopts = patch.difffeatureopts(repo.ui, whitespace=True) diffopts.nodates = True diffopts.git = True reversehunks = repo.ui.configbool('experimental', 'revertalternateinteractivemode', True) if reversehunks: diff = patch.diff(repo, ctx.node(), None, m, opts=diffopts) else: diff = patch.diff(repo, None, ctx.node(), m, opts=diffopts) originalchunks = patch.parsepatch(diff) try: chunks, opts = recordfilter(repo.ui, originalchunks) if reversehunks: chunks = patch.reversehunks(chunks) except patch.PatchError as err: raise error.Abort(_('error parsing patch: %s') % err) newlyaddedandmodifiedfiles = newandmodified(chunks, originalchunks) # Apply changes fp = cStringIO.StringIO() for c in chunks: c.write(fp) dopatch = fp.tell() fp.seek(0) if dopatch: try: patch.internalpatch(repo.ui, repo, fp, 1, eolmode=None) except patch.PatchError as err: raise error.Abort(str(err)) del fp else: for f in actions['revert'][0]: checkout(f) if normal: normal(f) for f in actions['add'][0]: # Don't checkout modified files, they are already created by the diff if f not in newlyaddedandmodifiedfiles: checkout(f) repo.dirstate.add(f) normal = repo.dirstate.normallookup if node == parent and p2 == nullid: normal = repo.dirstate.normal for f in actions['undelete'][0]: checkout(f) normal(f) copied = copies.pathcopies(repo[parent], ctx) for f in actions['add'][0] + actions['undelete'][0] + actions['revert'][0]: if f in copied: repo.dirstate.copy(copied[f], f) def command(table): """Returns a function object to be used as a decorator for making commands. This function receives a command table as its argument. The table should be a dict. The returned function can be used as a decorator for adding commands to that command table. This function accepts multiple arguments to define a command. The first argument is the command name. The options argument is an iterable of tuples defining command arguments. See ``mercurial.fancyopts.fancyopts()`` for the format of each tuple. The synopsis argument defines a short, one line summary of how to use the command. This shows up in the help output. The norepo argument defines whether the command does not require a local repository. Most commands operate against a repository, thus the default is False. The optionalrepo argument defines whether the command optionally requires a local repository. The inferrepo argument defines whether to try to find a repository from the command line arguments. If True, arguments will be examined for potential repository locations. See ``findrepo()``. If a repository is found, it will be used. """ def cmd(name, options=(), synopsis=None, norepo=False, optionalrepo=False, inferrepo=False): def decorator(func): if synopsis: table[name] = func, list(options), synopsis else: table[name] = func, list(options) if norepo: # Avoid import cycle. import commands commands.norepo += ' %s' % ' '.join(parsealiases(name)) if optionalrepo: import commands commands.optionalrepo += ' %s' % ' '.join(parsealiases(name)) if inferrepo: import commands commands.inferrepo += ' %s' % ' '.join(parsealiases(name)) return func return decorator return cmd # a list of (ui, repo, otherpeer, opts, missing) functions called by # commands.outgoing. "missing" is "missing" of the result of # "findcommonoutgoing()" outgoinghooks = util.hooks() # a list of (ui, repo) functions called by commands.summary summaryhooks = util.hooks() # a list of (ui, repo, opts, changes) functions called by commands.summary. # # functions should return tuple of booleans below, if 'changes' is None: # (whether-incomings-are-needed, whether-outgoings-are-needed) # # otherwise, 'changes' is a tuple of tuples below: # - (sourceurl, sourcebranch, sourcepeer, incoming) # - (desturl, destbranch, destpeer, outgoing) summaryremotehooks = util.hooks() # A list of state files kept by multistep operations like graft. # Since graft cannot be aborted, it is considered 'clearable' by update. # note: bisect is intentionally excluded # (state file, clearable, allowcommit, error, hint) unfinishedstates = [ ('graftstate', True, False, _('graft in progress'), _("use 'hg graft --continue' or 'hg update' to abort")), ('updatestate', True, False, _('last update was interrupted'), _("use 'hg update' to get a consistent checkout")) ] def checkunfinished(repo, commit=False): '''Look for an unfinished multistep operation, like graft, and abort if found. It's probably good to check this right before bailifchanged(). ''' for f, clearable, allowcommit, msg, hint in unfinishedstates: if commit and allowcommit: continue if repo.vfs.exists(f): raise error.Abort(msg, hint=hint) def clearunfinished(repo): '''Check for unfinished operations (as above), and clear the ones that are clearable. ''' for f, clearable, allowcommit, msg, hint in unfinishedstates: if not clearable and repo.vfs.exists(f): raise error.Abort(msg, hint=hint) for f, clearable, allowcommit, msg, hint in unfinishedstates: if clearable and repo.vfs.exists(f): util.unlink(repo.join(f)) afterresolvedstates = [ ('graftstate', _('hg graft --continue')), ] def checkafterresolved(repo): contmsg = _("continue: %s\n") for f, msg in afterresolvedstates: if repo.vfs.exists(f): repo.ui.warn(contmsg % msg) return repo.ui.note(contmsg % _("hg commit")) class dirstateguard(object): '''Restore dirstate at unexpected failure. At the construction, this class does: - write current ``repo.dirstate`` out, and - save ``.hg/dirstate`` into the backup file This restores ``.hg/dirstate`` from backup file, if ``release()`` is invoked before ``close()``. This just removes the backup file at ``close()`` before ``release()``. ''' def __init__(self, repo, name): self._repo = repo self._suffix = '.backup.%s.%d' % (name, id(self)) repo.dirstate._savebackup(repo.currenttransaction(), self._suffix) self._active = True self._closed = False def __del__(self): if self._active: # still active # this may occur, even if this class is used correctly: # for example, releasing other resources like transaction # may raise exception before ``dirstateguard.release`` in # ``release(tr, ....)``. self._abort() def close(self): if not self._active: # already inactivated msg = (_("can't close already inactivated backup: dirstate%s") % self._suffix) raise error.Abort(msg) self._repo.dirstate._clearbackup(self._repo.currenttransaction(), self._suffix) self._active = False self._closed = True def _abort(self): self._repo.dirstate._restorebackup(self._repo.currenttransaction(), self._suffix) self._active = False def release(self): if not self._closed: if not self._active: # already inactivated msg = (_("can't release already inactivated backup:" " dirstate%s") % self._suffix) raise error.Abort(msg) self._abort() mercurial-3.7.3/mercurial/win32.py0000644000175000017500000004200312676531525016425 0ustar mpmmpm00000000000000# win32.py - utility functions that use win32 API # # Copyright 2005-2009 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import ctypes import errno import msvcrt import os import random import subprocess _kernel32 = ctypes.windll.kernel32 _advapi32 = ctypes.windll.advapi32 _user32 = ctypes.windll.user32 _BOOL = ctypes.c_long _WORD = ctypes.c_ushort _DWORD = ctypes.c_ulong _UINT = ctypes.c_uint _LONG = ctypes.c_long _LPCSTR = _LPSTR = ctypes.c_char_p _HANDLE = ctypes.c_void_p _HWND = _HANDLE _INVALID_HANDLE_VALUE = _HANDLE(-1).value # GetLastError _ERROR_SUCCESS = 0 _ERROR_NO_MORE_FILES = 18 _ERROR_INVALID_PARAMETER = 87 _ERROR_BROKEN_PIPE = 109 _ERROR_INSUFFICIENT_BUFFER = 122 # WPARAM is defined as UINT_PTR (unsigned type) # LPARAM is defined as LONG_PTR (signed type) if ctypes.sizeof(ctypes.c_long) == ctypes.sizeof(ctypes.c_void_p): _WPARAM = ctypes.c_ulong _LPARAM = ctypes.c_long elif ctypes.sizeof(ctypes.c_longlong) == ctypes.sizeof(ctypes.c_void_p): _WPARAM = ctypes.c_ulonglong _LPARAM = ctypes.c_longlong class _FILETIME(ctypes.Structure): _fields_ = [('dwLowDateTime', _DWORD), ('dwHighDateTime', _DWORD)] class _BY_HANDLE_FILE_INFORMATION(ctypes.Structure): _fields_ = [('dwFileAttributes', _DWORD), ('ftCreationTime', _FILETIME), ('ftLastAccessTime', _FILETIME), ('ftLastWriteTime', _FILETIME), ('dwVolumeSerialNumber', _DWORD), ('nFileSizeHigh', _DWORD), ('nFileSizeLow', _DWORD), ('nNumberOfLinks', _DWORD), ('nFileIndexHigh', _DWORD), ('nFileIndexLow', _DWORD)] # CreateFile _FILE_SHARE_READ = 0x00000001 _FILE_SHARE_WRITE = 0x00000002 _FILE_SHARE_DELETE = 0x00000004 _OPEN_EXISTING = 3 _FILE_FLAG_BACKUP_SEMANTICS = 0x02000000 # SetFileAttributes _FILE_ATTRIBUTE_NORMAL = 0x80 _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 0x2000 # Process Security and Access Rights _PROCESS_QUERY_INFORMATION = 0x0400 # GetExitCodeProcess _STILL_ACTIVE = 259 class _STARTUPINFO(ctypes.Structure): _fields_ = [('cb', _DWORD), ('lpReserved', _LPSTR), ('lpDesktop', _LPSTR), ('lpTitle', _LPSTR), ('dwX', _DWORD), ('dwY', _DWORD), ('dwXSize', _DWORD), ('dwYSize', _DWORD), ('dwXCountChars', _DWORD), ('dwYCountChars', _DWORD), ('dwFillAttribute', _DWORD), ('dwFlags', _DWORD), ('wShowWindow', _WORD), ('cbReserved2', _WORD), ('lpReserved2', ctypes.c_char_p), ('hStdInput', _HANDLE), ('hStdOutput', _HANDLE), ('hStdError', _HANDLE)] class _PROCESS_INFORMATION(ctypes.Structure): _fields_ = [('hProcess', _HANDLE), ('hThread', _HANDLE), ('dwProcessId', _DWORD), ('dwThreadId', _DWORD)] _CREATE_NO_WINDOW = 0x08000000 _SW_HIDE = 0 class _COORD(ctypes.Structure): _fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)] class _SMALL_RECT(ctypes.Structure): _fields_ = [('Left', ctypes.c_short), ('Top', ctypes.c_short), ('Right', ctypes.c_short), ('Bottom', ctypes.c_short)] class _CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure): _fields_ = [('dwSize', _COORD), ('dwCursorPosition', _COORD), ('wAttributes', _WORD), ('srWindow', _SMALL_RECT), ('dwMaximumWindowSize', _COORD)] _STD_ERROR_HANDLE = _DWORD(-12).value # CreateToolhelp32Snapshot, Process32First, Process32Next _TH32CS_SNAPPROCESS = 0x00000002 _MAX_PATH = 260 class _tagPROCESSENTRY32(ctypes.Structure): _fields_ = [('dwsize', _DWORD), ('cntUsage', _DWORD), ('th32ProcessID', _DWORD), ('th32DefaultHeapID', ctypes.c_void_p), ('th32ModuleID', _DWORD), ('cntThreads', _DWORD), ('th32ParentProcessID', _DWORD), ('pcPriClassBase', _LONG), ('dwFlags', _DWORD), ('szExeFile', ctypes.c_char * _MAX_PATH)] def __init__(self): super(_tagPROCESSENTRY32, self).__init__() self.dwsize = ctypes.sizeof(self) # types of parameters of C functions used (required by pypy) _kernel32.CreateFileA.argtypes = [_LPCSTR, _DWORD, _DWORD, ctypes.c_void_p, _DWORD, _DWORD, _HANDLE] _kernel32.CreateFileA.restype = _HANDLE _kernel32.GetFileInformationByHandle.argtypes = [_HANDLE, ctypes.c_void_p] _kernel32.GetFileInformationByHandle.restype = _BOOL _kernel32.CloseHandle.argtypes = [_HANDLE] _kernel32.CloseHandle.restype = _BOOL try: _kernel32.CreateHardLinkA.argtypes = [_LPCSTR, _LPCSTR, ctypes.c_void_p] _kernel32.CreateHardLinkA.restype = _BOOL except AttributeError: pass _kernel32.SetFileAttributesA.argtypes = [_LPCSTR, _DWORD] _kernel32.SetFileAttributesA.restype = _BOOL _kernel32.OpenProcess.argtypes = [_DWORD, _BOOL, _DWORD] _kernel32.OpenProcess.restype = _HANDLE _kernel32.GetExitCodeProcess.argtypes = [_HANDLE, ctypes.c_void_p] _kernel32.GetExitCodeProcess.restype = _BOOL _kernel32.GetLastError.argtypes = [] _kernel32.GetLastError.restype = _DWORD _kernel32.GetModuleFileNameA.argtypes = [_HANDLE, ctypes.c_void_p, _DWORD] _kernel32.GetModuleFileNameA.restype = _DWORD _kernel32.CreateProcessA.argtypes = [_LPCSTR, _LPCSTR, ctypes.c_void_p, ctypes.c_void_p, _BOOL, _DWORD, ctypes.c_void_p, _LPCSTR, ctypes.c_void_p, ctypes.c_void_p] _kernel32.CreateProcessA.restype = _BOOL _kernel32.ExitProcess.argtypes = [_UINT] _kernel32.ExitProcess.restype = None _kernel32.GetCurrentProcessId.argtypes = [] _kernel32.GetCurrentProcessId.restype = _DWORD _SIGNAL_HANDLER = ctypes.WINFUNCTYPE(_BOOL, _DWORD) _kernel32.SetConsoleCtrlHandler.argtypes = [_SIGNAL_HANDLER, _BOOL] _kernel32.SetConsoleCtrlHandler.restype = _BOOL _kernel32.GetStdHandle.argtypes = [_DWORD] _kernel32.GetStdHandle.restype = _HANDLE _kernel32.GetConsoleScreenBufferInfo.argtypes = [_HANDLE, ctypes.c_void_p] _kernel32.GetConsoleScreenBufferInfo.restype = _BOOL _advapi32.GetUserNameA.argtypes = [ctypes.c_void_p, ctypes.c_void_p] _advapi32.GetUserNameA.restype = _BOOL _user32.GetWindowThreadProcessId.argtypes = [_HANDLE, ctypes.c_void_p] _user32.GetWindowThreadProcessId.restype = _DWORD _user32.ShowWindow.argtypes = [_HANDLE, ctypes.c_int] _user32.ShowWindow.restype = _BOOL _WNDENUMPROC = ctypes.WINFUNCTYPE(_BOOL, _HWND, _LPARAM) _user32.EnumWindows.argtypes = [_WNDENUMPROC, _LPARAM] _user32.EnumWindows.restype = _BOOL _kernel32.CreateToolhelp32Snapshot.argtypes = [_DWORD, _DWORD] _kernel32.CreateToolhelp32Snapshot.restype = _BOOL _kernel32.PeekNamedPipe.argtypes = [_HANDLE, ctypes.c_void_p, _DWORD, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] _kernel32.PeekNamedPipe.restype = _BOOL _kernel32.Process32First.argtypes = [_HANDLE, ctypes.c_void_p] _kernel32.Process32First.restype = _BOOL _kernel32.Process32Next.argtypes = [_HANDLE, ctypes.c_void_p] _kernel32.Process32Next.restype = _BOOL def _raiseoserror(name): err = ctypes.WinError() raise OSError(err.errno, '%s: %s' % (name, err.strerror)) def _getfileinfo(name): fh = _kernel32.CreateFileA(name, 0, _FILE_SHARE_READ | _FILE_SHARE_WRITE | _FILE_SHARE_DELETE, None, _OPEN_EXISTING, _FILE_FLAG_BACKUP_SEMANTICS, None) if fh == _INVALID_HANDLE_VALUE: _raiseoserror(name) try: fi = _BY_HANDLE_FILE_INFORMATION() if not _kernel32.GetFileInformationByHandle(fh, ctypes.byref(fi)): _raiseoserror(name) return fi finally: _kernel32.CloseHandle(fh) def oslink(src, dst): try: if not _kernel32.CreateHardLinkA(dst, src, None): _raiseoserror(src) except AttributeError: # Wine doesn't support this function _raiseoserror(src) def nlinks(name): '''return number of hardlinks for the given file''' return _getfileinfo(name).nNumberOfLinks def samefile(path1, path2): '''Returns whether path1 and path2 refer to the same file or directory.''' res1 = _getfileinfo(path1) res2 = _getfileinfo(path2) return (res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber and res1.nFileIndexHigh == res2.nFileIndexHigh and res1.nFileIndexLow == res2.nFileIndexLow) def samedevice(path1, path2): '''Returns whether path1 and path2 are on the same device.''' res1 = _getfileinfo(path1) res2 = _getfileinfo(path2) return res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber def peekpipe(pipe): handle = msvcrt.get_osfhandle(pipe.fileno()) avail = _DWORD() if not _kernel32.PeekNamedPipe(handle, None, 0, None, ctypes.byref(avail), None): err = _kernel32.GetLastError() if err == _ERROR_BROKEN_PIPE: return 0 raise ctypes.WinError(err) return avail.value def testpid(pid): '''return True if pid is still running or unable to determine, False otherwise''' h = _kernel32.OpenProcess(_PROCESS_QUERY_INFORMATION, False, pid) if h: try: status = _DWORD() if _kernel32.GetExitCodeProcess(h, ctypes.byref(status)): return status.value == _STILL_ACTIVE finally: _kernel32.CloseHandle(h) return _kernel32.GetLastError() != _ERROR_INVALID_PARAMETER def executablepath(): '''return full path of hg.exe''' size = 600 buf = ctypes.create_string_buffer(size + 1) len = _kernel32.GetModuleFileNameA(None, ctypes.byref(buf), size) if len == 0: raise ctypes.WinError() # Note: WinError is a function elif len == size: raise ctypes.WinError(_ERROR_INSUFFICIENT_BUFFER) return buf.value def getuser(): '''return name of current user''' size = _DWORD(300) buf = ctypes.create_string_buffer(size.value + 1) if not _advapi32.GetUserNameA(ctypes.byref(buf), ctypes.byref(size)): raise ctypes.WinError() return buf.value _signalhandler = [] def setsignalhandler(): '''Register a termination handler for console events including CTRL+C. python signal handlers do not work well with socket operations. ''' def handler(event): _kernel32.ExitProcess(1) if _signalhandler: return # already registered h = _SIGNAL_HANDLER(handler) _signalhandler.append(h) # needed to prevent garbage collection if not _kernel32.SetConsoleCtrlHandler(h, True): raise ctypes.WinError() def hidewindow(): def callback(hwnd, pid): wpid = _DWORD() _user32.GetWindowThreadProcessId(hwnd, ctypes.byref(wpid)) if pid == wpid.value: _user32.ShowWindow(hwnd, _SW_HIDE) return False # stop enumerating windows return True pid = _kernel32.GetCurrentProcessId() _user32.EnumWindows(_WNDENUMPROC(callback), pid) def termwidth(): # cmd.exe does not handle CR like a unix console, the CR is # counted in the line length. On 80 columns consoles, if 80 # characters are written, the following CR won't apply on the # current line but on the new one. Keep room for it. width = 79 # Query stderr to avoid problems with redirections screenbuf = _kernel32.GetStdHandle( _STD_ERROR_HANDLE) # don't close the handle returned if screenbuf is None or screenbuf == _INVALID_HANDLE_VALUE: return width csbi = _CONSOLE_SCREEN_BUFFER_INFO() if not _kernel32.GetConsoleScreenBufferInfo( screenbuf, ctypes.byref(csbi)): return width width = csbi.srWindow.Right - csbi.srWindow.Left return width def _1stchild(pid): '''return the 1st found child of the given pid None is returned when no child is found''' pe = _tagPROCESSENTRY32() # create handle to list all processes ph = _kernel32.CreateToolhelp32Snapshot(_TH32CS_SNAPPROCESS, 0) if ph == _INVALID_HANDLE_VALUE: raise ctypes.WinError() try: r = _kernel32.Process32First(ph, ctypes.byref(pe)) # loop over all processes while r: if pe.th32ParentProcessID == pid: # return first child found return pe.th32ProcessID r = _kernel32.Process32Next(ph, ctypes.byref(pe)) finally: _kernel32.CloseHandle(ph) if _kernel32.GetLastError() != _ERROR_NO_MORE_FILES: raise ctypes.WinError() return None # no child found class _tochildpid(int): # pid is _DWORD, which always matches in an int '''helper for spawndetached, returns the child pid on conversion to string Does not resolve the child pid immediately because the child may not yet be started. ''' def childpid(self): '''returns the child pid of the first found child of the process with this pid''' return _1stchild(self) def __str__(self): # run when the pid is written to the file ppid = self.childpid() if ppid is None: # race, child has exited since check # fall back to this pid. Its process will also have disappeared, # raising the same error type later as when the child pid would # be returned. return " %d" % self return str(ppid) def spawndetached(args): # No standard library function really spawns a fully detached # process under win32 because they allocate pipes or other objects # to handle standard streams communications. Passing these objects # to the child process requires handle inheritance to be enabled # which makes really detached processes impossible. si = _STARTUPINFO() si.cb = ctypes.sizeof(_STARTUPINFO) pi = _PROCESS_INFORMATION() env = '' for k in os.environ: env += "%s=%s\0" % (k, os.environ[k]) if not env: env = '\0' env += '\0' args = subprocess.list2cmdline(args) # Not running the command in shell mode makes Python 2.6 hang when # writing to hgweb output socket. comspec = os.environ.get("COMSPEC", "cmd.exe") args = comspec + " /c " + args res = _kernel32.CreateProcessA( None, args, None, None, False, _CREATE_NO_WINDOW, env, os.getcwd(), ctypes.byref(si), ctypes.byref(pi)) if not res: raise ctypes.WinError() # _tochildpid because the process is the child of COMSPEC return _tochildpid(pi.dwProcessId) def unlink(f): '''try to implement POSIX' unlink semantics on Windows''' if os.path.isdir(f): # use EPERM because it is POSIX prescribed value, even though # unlink(2) on directories returns EISDIR on Linux raise IOError(errno.EPERM, "Unlinking directory not permitted: '%s'" % f) # POSIX allows to unlink and rename open files. Windows has serious # problems with doing that: # - Calling os.unlink (or os.rename) on a file f fails if f or any # hardlinked copy of f has been opened with Python's open(). There is no # way such a file can be deleted or renamed on Windows (other than # scheduling the delete or rename for the next reboot). # - Calling os.unlink on a file that has been opened with Mercurial's # posixfile (or comparable methods) will delay the actual deletion of # the file for as long as the file is held open. The filename is blocked # during that time and cannot be used for recreating a new file under # that same name ("zombie file"). Directories containing such zombie files # cannot be removed or moved. # A file that has been opened with posixfile can be renamed, so we rename # f to a random temporary name before calling os.unlink on it. This allows # callers to recreate f immediately while having other readers do their # implicit zombie filename blocking on a temporary name. for tries in xrange(10): temp = '%s-%08x' % (f, random.randint(0, 0xffffffff)) try: os.rename(f, temp) # raises OSError EEXIST if temp exists break except OSError as e: if e.errno != errno.EEXIST: raise else: raise IOError(errno.EEXIST, "No usable temporary filename found") try: os.unlink(temp) except OSError: # The unlink might have failed because the READONLY attribute may heave # been set on the original file. Rename works fine with READONLY set, # but not os.unlink. Reset all attributes and try again. _kernel32.SetFileAttributesA(temp, _FILE_ATTRIBUTE_NORMAL) try: os.unlink(temp) except OSError: # The unlink might have failed due to some very rude AV-Scanners. # Leaking a tempfile is the lesser evil than aborting here and # leaving some potentially serious inconsistencies. pass def makedir(path, notindexed): os.mkdir(path) if notindexed: _kernel32.SetFileAttributesA(path, _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED) mercurial-3.7.3/mercurial/merge.py0000644000175000017500000016465312676531525016602 0ustar mpmmpm00000000000000# merge.py - directory-level update/merge handling for Mercurial # # Copyright 2006, 2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import os import shutil import struct from .i18n import _ from .node import ( bin, hex, nullhex, nullid, nullrev, ) from . import ( copies, destutil, error, filemerge, obsolete, scmutil, subrepo, util, worker, ) _pack = struct.pack _unpack = struct.unpack def _droponode(data): # used for compatibility for v1 bits = data.split('\0') bits = bits[:-2] + bits[-1:] return '\0'.join(bits) class mergestate(object): '''track 3-way merge state of individual files The merge state is stored on disk when needed. Two files are used: one with an old format (version 1), and one with a new format (version 2). Version 2 stores a superset of the data in version 1, including new kinds of records in the future. For more about the new format, see the documentation for `_readrecordsv2`. Each record can contain arbitrary content, and has an associated type. This `type` should be a letter. If `type` is uppercase, the record is mandatory: versions of Mercurial that don't support it should abort. If `type` is lowercase, the record can be safely ignored. Currently known records: L: the node of the "local" part of the merge (hexified version) O: the node of the "other" part of the merge (hexified version) F: a file to be merged entry C: a change/delete or delete/change conflict D: a file that the external merge driver will merge internally (experimental) m: the external merge driver defined for this merge plus its run state (experimental) X: unsupported mandatory record type (used in tests) x: unsupported advisory record type (used in tests) Merge driver run states (experimental): u: driver-resolved files unmarked -- needs to be run next time we're about to resolve or commit m: driver-resolved files marked -- only needs to be run before commit s: success/skipped -- does not need to be run any more ''' statepathv1 = 'merge/state' statepathv2 = 'merge/state2' @staticmethod def clean(repo, node=None, other=None): """Initialize a brand new merge state, removing any existing state on disk.""" ms = mergestate(repo) ms.reset(node, other) return ms @staticmethod def read(repo): """Initialize the merge state, reading it from disk.""" ms = mergestate(repo) ms._read() return ms def __init__(self, repo): """Initialize the merge state. Do not use this directly! Instead call read() or clean().""" self._repo = repo self._dirty = False def reset(self, node=None, other=None): self._state = {} self._local = None self._other = None for var in ('localctx', 'otherctx'): if var in vars(self): delattr(self, var) if node: self._local = node self._other = other self._readmergedriver = None if self.mergedriver: self._mdstate = 's' else: self._mdstate = 'u' shutil.rmtree(self._repo.join('merge'), True) self._results = {} self._dirty = False def _read(self): """Analyse each record content to restore a serialized state from disk This function process "record" entry produced by the de-serialization of on disk file. """ self._state = {} self._local = None self._other = None for var in ('localctx', 'otherctx'): if var in vars(self): delattr(self, var) self._readmergedriver = None self._mdstate = 's' unsupported = set() records = self._readrecords() for rtype, record in records: if rtype == 'L': self._local = bin(record) elif rtype == 'O': self._other = bin(record) elif rtype == 'm': bits = record.split('\0', 1) mdstate = bits[1] if len(mdstate) != 1 or mdstate not in 'ums': # the merge driver should be idempotent, so just rerun it mdstate = 'u' self._readmergedriver = bits[0] self._mdstate = mdstate elif rtype in 'FDC': bits = record.split('\0') self._state[bits[0]] = bits[1:] elif not rtype.islower(): unsupported.add(rtype) self._results = {} self._dirty = False if unsupported: raise error.UnsupportedMergeRecords(unsupported) def _readrecords(self): """Read merge state from disk and return a list of record (TYPE, data) We read data from both v1 and v2 files and decide which one to use. V1 has been used by version prior to 2.9.1 and contains less data than v2. We read both versions and check if no data in v2 contradicts v1. If there is not contradiction we can safely assume that both v1 and v2 were written at the same time and use the extract data in v2. If there is contradiction we ignore v2 content as we assume an old version of Mercurial has overwritten the mergestate file and left an old v2 file around. returns list of record [(TYPE, data), ...]""" v1records = self._readrecordsv1() v2records = self._readrecordsv2() if self._v1v2match(v1records, v2records): return v2records else: # v1 file is newer than v2 file, use it # we have to infer the "other" changeset of the merge # we cannot do better than that with v1 of the format mctx = self._repo[None].parents()[-1] v1records.append(('O', mctx.hex())) # add place holder "other" file node information # nobody is using it yet so we do no need to fetch the data # if mctx was wrong `mctx[bits[-2]]` may fails. for idx, r in enumerate(v1records): if r[0] == 'F': bits = r[1].split('\0') bits.insert(-2, '') v1records[idx] = (r[0], '\0'.join(bits)) return v1records def _v1v2match(self, v1records, v2records): oldv2 = set() # old format version of v2 record for rec in v2records: if rec[0] == 'L': oldv2.add(rec) elif rec[0] == 'F': # drop the onode data (not contained in v1) oldv2.add(('F', _droponode(rec[1]))) for rec in v1records: if rec not in oldv2: return False else: return True def _readrecordsv1(self): """read on disk merge state for version 1 file returns list of record [(TYPE, data), ...] Note: the "F" data from this file are one entry short (no "other file node" entry) """ records = [] try: f = self._repo.vfs(self.statepathv1) for i, l in enumerate(f): if i == 0: records.append(('L', l[:-1])) else: records.append(('F', l[:-1])) f.close() except IOError as err: if err.errno != errno.ENOENT: raise return records def _readrecordsv2(self): """read on disk merge state for version 2 file This format is a list of arbitrary records of the form: [type][length][content] `type` is a single character, `length` is a 4 byte integer, and `content` is an arbitrary byte sequence of length `length`. Mercurial versions prior to 3.7 have a bug where if there are unsupported mandatory merge records, attempting to clear out the merge state with hg update --clean or similar aborts. The 't' record type works around that by writing out what those versions treat as an advisory record, but later versions interpret as special: the first character is the 'real' record type and everything onwards is the data. Returns list of records [(TYPE, data), ...].""" records = [] try: f = self._repo.vfs(self.statepathv2) data = f.read() off = 0 end = len(data) while off < end: rtype = data[off] off += 1 length = _unpack('>I', data[off:(off + 4)])[0] off += 4 record = data[off:(off + length)] off += length if rtype == 't': rtype, record = record[0], record[1:] records.append((rtype, record)) f.close() except IOError as err: if err.errno != errno.ENOENT: raise return records @util.propertycache def mergedriver(self): # protect against the following: # - A configures a malicious merge driver in their hgrc, then # pauses the merge # - A edits their hgrc to remove references to the merge driver # - A gives a copy of their entire repo, including .hg, to B # - B inspects .hgrc and finds it to be clean # - B then continues the merge and the malicious merge driver # gets invoked configmergedriver = self._repo.ui.config('experimental', 'mergedriver') if (self._readmergedriver is not None and self._readmergedriver != configmergedriver): raise error.ConfigError( _("merge driver changed since merge started"), hint=_("revert merge driver change or abort merge")) return configmergedriver @util.propertycache def localctx(self): if self._local is None: raise RuntimeError("localctx accessed but self._local isn't set") return self._repo[self._local] @util.propertycache def otherctx(self): if self._other is None: raise RuntimeError("localctx accessed but self._local isn't set") return self._repo[self._other] def active(self): """Whether mergestate is active. Returns True if there appears to be mergestate. This is a rough proxy for "is a merge in progress." """ # Check local variables before looking at filesystem for performance # reasons. return bool(self._local) or bool(self._state) or \ self._repo.vfs.exists(self.statepathv1) or \ self._repo.vfs.exists(self.statepathv2) def commit(self): """Write current state on disk (if necessary)""" if self._dirty: records = self._makerecords() self._writerecords(records) self._dirty = False def _makerecords(self): records = [] records.append(('L', hex(self._local))) records.append(('O', hex(self._other))) if self.mergedriver: records.append(('m', '\0'.join([ self.mergedriver, self._mdstate]))) for d, v in self._state.iteritems(): if v[0] == 'd': records.append(('D', '\0'.join([d] + v))) # v[1] == local ('cd'), v[6] == other ('dc') -- not supported by # older versions of Mercurial elif v[1] == nullhex or v[6] == nullhex: records.append(('C', '\0'.join([d] + v))) else: records.append(('F', '\0'.join([d] + v))) return records def _writerecords(self, records): """Write current state on disk (both v1 and v2)""" self._writerecordsv1(records) self._writerecordsv2(records) def _writerecordsv1(self, records): """Write current state on disk in a version 1 file""" f = self._repo.vfs(self.statepathv1, 'w') irecords = iter(records) lrecords = irecords.next() assert lrecords[0] == 'L' f.write(hex(self._local) + '\n') for rtype, data in irecords: if rtype == 'F': f.write('%s\n' % _droponode(data)) f.close() def _writerecordsv2(self, records): """Write current state on disk in a version 2 file See the docstring for _readrecordsv2 for why we use 't'.""" # these are the records that all version 2 clients can read whitelist = 'LOF' f = self._repo.vfs(self.statepathv2, 'w') for key, data in records: assert len(key) == 1 if key not in whitelist: key, data = 't', '%s%s' % (key, data) format = '>sI%is' % len(data) f.write(_pack(format, key, len(data), data)) f.close() def add(self, fcl, fco, fca, fd): """add a new (potentially?) conflicting file the merge state fcl: file context for local, fco: file context for remote, fca: file context for ancestors, fd: file path of the resulting merge. note: also write the local version to the `.hg/merge` directory. """ if fcl.isabsent(): hash = nullhex else: hash = util.sha1(fcl.path()).hexdigest() self._repo.vfs.write('merge/' + hash, fcl.data()) self._state[fd] = ['u', hash, fcl.path(), fca.path(), hex(fca.filenode()), fco.path(), hex(fco.filenode()), fcl.flags()] self._dirty = True def __contains__(self, dfile): return dfile in self._state def __getitem__(self, dfile): return self._state[dfile][0] def __iter__(self): return iter(sorted(self._state)) def files(self): return self._state.keys() def mark(self, dfile, state): self._state[dfile][0] = state self._dirty = True def mdstate(self): return self._mdstate def unresolved(self): """Obtain the paths of unresolved files.""" for f, entry in self._state.items(): if entry[0] == 'u': yield f def driverresolved(self): """Obtain the paths of driver-resolved files.""" for f, entry in self._state.items(): if entry[0] == 'd': yield f def _resolve(self, preresolve, dfile, wctx, labels=None): """rerun merge process for file path `dfile`""" if self[dfile] in 'rd': return True, 0 stateentry = self._state[dfile] state, hash, lfile, afile, anode, ofile, onode, flags = stateentry octx = self._repo[self._other] fcd = self._filectxorabsent(hash, wctx, dfile) fco = self._filectxorabsent(onode, octx, ofile) # TODO: move this to filectxorabsent fca = self._repo.filectx(afile, fileid=anode) # "premerge" x flags flo = fco.flags() fla = fca.flags() if 'x' in flags + flo + fla and 'l' not in flags + flo + fla: if fca.node() == nullid: if preresolve: self._repo.ui.warn( _('warning: cannot merge flags for %s\n') % afile) elif flags == fla: flags = flo if preresolve: # restore local if hash != nullhex: f = self._repo.vfs('merge/' + hash) self._repo.wwrite(dfile, f.read(), flags) f.close() else: self._repo.wvfs.unlinkpath(dfile, ignoremissing=True) complete, r, deleted = filemerge.premerge(self._repo, self._local, lfile, fcd, fco, fca, labels=labels) else: complete, r, deleted = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca, labels=labels) if r is None: # no real conflict del self._state[dfile] self._dirty = True elif not r: self.mark(dfile, 'r') if complete: action = None if deleted: if fcd.isabsent(): # dc: local picked. Need to drop if present, which may # happen on re-resolves. action = 'f' else: # cd: remote picked (or otherwise deleted) action = 'r' else: if fcd.isabsent(): # dc: remote picked action = 'g' elif fco.isabsent(): # cd: local picked if dfile in self.localctx: action = 'am' else: action = 'a' # else: regular merges (no action necessary) self._results[dfile] = r, action return complete, r def _filectxorabsent(self, hexnode, ctx, f): if hexnode == nullhex: return filemerge.absentfilectx(ctx, f) else: return ctx[f] def preresolve(self, dfile, wctx, labels=None): """run premerge process for dfile Returns whether the merge is complete, and the exit code.""" return self._resolve(True, dfile, wctx, labels=labels) def resolve(self, dfile, wctx, labels=None): """run merge process (assuming premerge was run) for dfile Returns the exit code of the merge.""" return self._resolve(False, dfile, wctx, labels=labels)[1] def counts(self): """return counts for updated, merged and removed files in this session""" updated, merged, removed = 0, 0, 0 for r, action in self._results.itervalues(): if r is None: updated += 1 elif r == 0: if action == 'r': removed += 1 else: merged += 1 return updated, merged, removed def unresolvedcount(self): """get unresolved count for this merge (persistent)""" return len([True for f, entry in self._state.iteritems() if entry[0] == 'u']) def actions(self): """return lists of actions to perform on the dirstate""" actions = {'r': [], 'f': [], 'a': [], 'am': [], 'g': []} for f, (r, action) in self._results.iteritems(): if action is not None: actions[action].append((f, None, "merge result")) return actions def recordactions(self): """record remove/add/get actions in the dirstate""" branchmerge = self._repo.dirstate.p2() != nullid recordupdates(self._repo, self.actions(), branchmerge) def queueremove(self, f): """queues a file to be removed from the dirstate Meant for use by custom merge drivers.""" self._results[f] = 0, 'r' def queueadd(self, f): """queues a file to be added to the dirstate Meant for use by custom merge drivers.""" self._results[f] = 0, 'a' def queueget(self, f): """queues a file to be marked modified in the dirstate Meant for use by custom merge drivers.""" self._results[f] = 0, 'g' def _getcheckunknownconfig(repo, section, name): config = repo.ui.config(section, name, default='abort') valid = ['abort', 'ignore', 'warn'] if config not in valid: validstr = ', '.join(["'" + v + "'" for v in valid]) raise error.ConfigError(_("%s.%s not valid " "('%s' is none of %s)") % (section, name, config, validstr)) return config def _checkunknownfile(repo, wctx, mctx, f, f2=None): if f2 is None: f2 = f return (repo.wvfs.isfileorlink(f) and repo.wvfs.audit.check(f) and repo.dirstate.normalize(f) not in repo.dirstate and mctx[f2].cmp(wctx[f])) def _checkunknownfiles(repo, wctx, mctx, force, actions): """ Considers any actions that care about the presence of conflicting unknown files. For some actions, the result is to abort; for others, it is to choose a different action. """ conflicts = set() if not force: abortconflicts = set() warnconflicts = set() def collectconflicts(conflicts, config): if config == 'abort': abortconflicts.update(conflicts) elif config == 'warn': warnconflicts.update(conflicts) unknownconfig = _getcheckunknownconfig(repo, 'merge', 'checkunknown') ignoredconfig = _getcheckunknownconfig(repo, 'merge', 'checkignored') for f, (m, args, msg) in actions.iteritems(): if m in ('c', 'dc'): if _checkunknownfile(repo, wctx, mctx, f): conflicts.add(f) elif m == 'dg': if _checkunknownfile(repo, wctx, mctx, f, args[0]): conflicts.add(f) ignoredconflicts = set([c for c in conflicts if repo.dirstate._ignore(c)]) unknownconflicts = conflicts - ignoredconflicts collectconflicts(ignoredconflicts, ignoredconfig) collectconflicts(unknownconflicts, unknownconfig) for f in sorted(abortconflicts): repo.ui.warn(_("%s: untracked file differs\n") % f) if abortconflicts: raise error.Abort(_("untracked files in working directory " "differ from files in requested revision")) for f in sorted(warnconflicts): repo.ui.warn(_("%s: replacing untracked file\n") % f) for f, (m, args, msg) in actions.iteritems(): backup = f in conflicts if m == 'c': flags, = args actions[f] = ('g', (flags, backup), msg) elif m == 'cm': fl2, anc = args different = _checkunknownfile(repo, wctx, mctx, f) if different: actions[f] = ('m', (f, f, None, False, anc), "remote differs from untracked local") else: actions[f] = ('g', (fl2, backup), "remote created") def _forgetremoved(wctx, mctx, branchmerge): """ Forget removed files If we're jumping between revisions (as opposed to merging), and if neither the working directory nor the target rev has the file, then we need to remove it from the dirstate, to prevent the dirstate from listing the file when it is no longer in the manifest. If we're merging, and the other revision has removed a file that is not present in the working directory, we need to mark it as removed. """ actions = {} m = 'f' if branchmerge: m = 'r' for f in wctx.deleted(): if f not in mctx: actions[f] = m, None, "forget deleted" if not branchmerge: for f in wctx.removed(): if f not in mctx: actions[f] = 'f', None, "forget removed" return actions def _checkcollision(repo, wmf, actions): # build provisional merged manifest up pmmf = set(wmf) if actions: # k, dr, e and rd are no-op for m in 'a', 'am', 'f', 'g', 'cd', 'dc': for f, args, msg in actions[m]: pmmf.add(f) for f, args, msg in actions['r']: pmmf.discard(f) for f, args, msg in actions['dm']: f2, flags = args pmmf.discard(f2) pmmf.add(f) for f, args, msg in actions['dg']: pmmf.add(f) for f, args, msg in actions['m']: f1, f2, fa, move, anc = args if move: pmmf.discard(f1) pmmf.add(f) # check case-folding collision in provisional merged manifest foldmap = {} for f in sorted(pmmf): fold = util.normcase(f) if fold in foldmap: raise error.Abort(_("case-folding collision between %s and %s") % (f, foldmap[fold])) foldmap[fold] = f # check case-folding of directories foldprefix = unfoldprefix = lastfull = '' for fold, f in sorted(foldmap.items()): if fold.startswith(foldprefix) and not f.startswith(unfoldprefix): # the folded prefix matches but actual casing is different raise error.Abort(_("case-folding collision between " "%s and directory of %s") % (lastfull, f)) foldprefix = fold + '/' unfoldprefix = f + '/' lastfull = f def driverpreprocess(repo, ms, wctx, labels=None): """run the preprocess step of the merge driver, if any This is currently not implemented -- it's an extension point.""" return True def driverconclude(repo, ms, wctx, labels=None): """run the conclude step of the merge driver, if any This is currently not implemented -- it's an extension point.""" return True def manifestmerge(repo, wctx, p2, pa, branchmerge, force, matcher, acceptremote, followcopies): """ Merge p1 and p2 with ancestor pa and generate merge action list branchmerge and force are as passed in to update matcher = matcher to filter file lists acceptremote = accept the incoming changes without prompting """ if matcher is not None and matcher.always(): matcher = None copy, movewithdir, diverge, renamedelete = {}, {}, {}, {} # manifests fetched in order are going to be faster, so prime the caches [x.manifest() for x in sorted(wctx.parents() + [p2, pa], key=lambda x: x.rev())] if followcopies: ret = copies.mergecopies(repo, wctx, p2, pa) copy, movewithdir, diverge, renamedelete = ret repo.ui.note(_("resolving manifests\n")) repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n" % (bool(branchmerge), bool(force), bool(matcher))) repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2)) m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest() copied = set(copy.values()) copied.update(movewithdir.values()) if '.hgsubstate' in m1: # check whether sub state is modified for s in sorted(wctx.substate): if wctx.sub(s).dirty(): m1['.hgsubstate'] += '+' break # Compare manifests if matcher is not None: m1 = m1.matches(matcher) m2 = m2.matches(matcher) diff = m1.diff(m2) actions = {} for f, ((n1, fl1), (n2, fl2)) in diff.iteritems(): if n1 and n2: # file exists on both local and remote side if f not in ma: fa = copy.get(f, None) if fa is not None: actions[f] = ('m', (f, f, fa, False, pa.node()), "both renamed from " + fa) else: actions[f] = ('m', (f, f, None, False, pa.node()), "both created") else: a = ma[f] fla = ma.flags(f) nol = 'l' not in fl1 + fl2 + fla if n2 == a and fl2 == fla: actions[f] = ('k' , (), "remote unchanged") elif n1 == a and fl1 == fla: # local unchanged - use remote if n1 == n2: # optimization: keep local content actions[f] = ('e', (fl2,), "update permissions") else: actions[f] = ('g', (fl2, False), "remote is newer") elif nol and n2 == a: # remote only changed 'x' actions[f] = ('e', (fl2,), "update permissions") elif nol and n1 == a: # local only changed 'x' actions[f] = ('g', (fl1, False), "remote is newer") else: # both changed something actions[f] = ('m', (f, f, f, False, pa.node()), "versions differ") elif n1: # file exists only on local side if f in copied: pass # we'll deal with it on m2 side elif f in movewithdir: # directory rename, move local f2 = movewithdir[f] if f2 in m2: actions[f2] = ('m', (f, f2, None, True, pa.node()), "remote directory rename, both created") else: actions[f2] = ('dm', (f, fl1), "remote directory rename - move from " + f) elif f in copy: f2 = copy[f] actions[f] = ('m', (f, f2, f2, False, pa.node()), "local copied/moved from " + f2) elif f in ma: # clean, a different, no remote if n1 != ma[f]: if acceptremote: actions[f] = ('r', None, "remote delete") else: actions[f] = ('cd', (f, None, f, False, pa.node()), "prompt changed/deleted") elif n1[20:] == 'a': # This extra 'a' is added by working copy manifest to mark # the file as locally added. We should forget it instead of # deleting it. actions[f] = ('f', None, "remote deleted") else: actions[f] = ('r', None, "other deleted") elif n2: # file exists only on remote side if f in copied: pass # we'll deal with it on m1 side elif f in movewithdir: f2 = movewithdir[f] if f2 in m1: actions[f2] = ('m', (f2, f, None, False, pa.node()), "local directory rename, both created") else: actions[f2] = ('dg', (f, fl2), "local directory rename - get from " + f) elif f in copy: f2 = copy[f] if f2 in m2: actions[f] = ('m', (f2, f, f2, False, pa.node()), "remote copied from " + f2) else: actions[f] = ('m', (f2, f, f2, True, pa.node()), "remote moved from " + f2) elif f not in ma: # local unknown, remote created: the logic is described by the # following table: # # force branchmerge different | action # n * * | create # y n * | create # y y n | create # y y y | merge # # Checking whether the files are different is expensive, so we # don't do that when we can avoid it. if not force: actions[f] = ('c', (fl2,), "remote created") elif not branchmerge: actions[f] = ('c', (fl2,), "remote created") else: actions[f] = ('cm', (fl2, pa.node()), "remote created, get or merge") elif n2 != ma[f]: if acceptremote: actions[f] = ('c', (fl2,), "remote recreating") else: actions[f] = ('dc', (None, f, f, False, pa.node()), "prompt deleted/changed") return actions, diverge, renamedelete def _resolvetrivial(repo, wctx, mctx, ancestor, actions): """Resolves false conflicts where the nodeid changed but the content remained the same.""" for f, (m, args, msg) in actions.items(): if m == 'cd' and f in ancestor and not wctx[f].cmp(ancestor[f]): # local did change but ended up with same content actions[f] = 'r', None, "prompt same" elif m == 'dc' and f in ancestor and not mctx[f].cmp(ancestor[f]): # remote did change but ended up with same content del actions[f] # don't get = keep local deleted def calculateupdates(repo, wctx, mctx, ancestors, branchmerge, force, acceptremote, followcopies, matcher=None): "Calculate the actions needed to merge mctx into wctx using ancestors" if len(ancestors) == 1: # default actions, diverge, renamedelete = manifestmerge( repo, wctx, mctx, ancestors[0], branchmerge, force, matcher, acceptremote, followcopies) _checkunknownfiles(repo, wctx, mctx, force, actions) else: # only when merge.preferancestor=* - the default repo.ui.note( _("note: merging %s and %s using bids from ancestors %s\n") % (wctx, mctx, _(' and ').join(str(anc) for anc in ancestors))) # Call for bids fbids = {} # mapping filename to bids (action method to list af actions) diverge, renamedelete = None, None for ancestor in ancestors: repo.ui.note(_('\ncalculating bids for ancestor %s\n') % ancestor) actions, diverge1, renamedelete1 = manifestmerge( repo, wctx, mctx, ancestor, branchmerge, force, matcher, acceptremote, followcopies) _checkunknownfiles(repo, wctx, mctx, force, actions) # Track the shortest set of warning on the theory that bid # merge will correctly incorporate more information if diverge is None or len(diverge1) < len(diverge): diverge = diverge1 if renamedelete is None or len(renamedelete) < len(renamedelete1): renamedelete = renamedelete1 for f, a in sorted(actions.iteritems()): m, args, msg = a repo.ui.debug(' %s: %s -> %s\n' % (f, msg, m)) if f in fbids: d = fbids[f] if m in d: d[m].append(a) else: d[m] = [a] else: fbids[f] = {m: [a]} # Pick the best bid for each file repo.ui.note(_('\nauction for merging merge bids\n')) actions = {} for f, bids in sorted(fbids.items()): # bids is a mapping from action method to list af actions # Consensus? if len(bids) == 1: # all bids are the same kind of method m, l = bids.items()[0] if all(a == l[0] for a in l[1:]): # len(bids) is > 1 repo.ui.note(" %s: consensus for %s\n" % (f, m)) actions[f] = l[0] continue # If keep is an option, just do it. if 'k' in bids: repo.ui.note(" %s: picking 'keep' action\n" % f) actions[f] = bids['k'][0] continue # If there are gets and they all agree [how could they not?], do it. if 'g' in bids: ga0 = bids['g'][0] if all(a == ga0 for a in bids['g'][1:]): repo.ui.note(" %s: picking 'get' action\n" % f) actions[f] = ga0 continue # TODO: Consider other simple actions such as mode changes # Handle inefficient democrazy. repo.ui.note(_(' %s: multiple bids for merge action:\n') % f) for m, l in sorted(bids.items()): for _f, args, msg in l: repo.ui.note(' %s -> %s\n' % (msg, m)) # Pick random action. TODO: Instead, prompt user when resolving m, l = bids.items()[0] repo.ui.warn(_(' %s: ambiguous merge - picked %s action\n') % (f, m)) actions[f] = l[0] continue repo.ui.note(_('end of auction\n\n')) _resolvetrivial(repo, wctx, mctx, ancestors[0], actions) if wctx.rev() is None: fractions = _forgetremoved(wctx, mctx, branchmerge) actions.update(fractions) return actions, diverge, renamedelete def batchremove(repo, actions): """apply removes to the working directory yields tuples for progress updates """ verbose = repo.ui.verbose unlink = util.unlinkpath wjoin = repo.wjoin audit = repo.wvfs.audit i = 0 for f, args, msg in actions: repo.ui.debug(" %s: %s -> r\n" % (f, msg)) if verbose: repo.ui.note(_("removing %s\n") % f) audit(f) try: unlink(wjoin(f), ignoremissing=True) except OSError as inst: repo.ui.warn(_("update failed to remove %s: %s!\n") % (f, inst.strerror)) if i == 100: yield i, f i = 0 i += 1 if i > 0: yield i, f def batchget(repo, mctx, actions): """apply gets to the working directory mctx is the context to get from yields tuples for progress updates """ verbose = repo.ui.verbose fctx = mctx.filectx wwrite = repo.wwrite ui = repo.ui i = 0 for f, (flags, backup), msg in actions: repo.ui.debug(" %s: %s -> g\n" % (f, msg)) if verbose: repo.ui.note(_("getting %s\n") % f) if backup: absf = repo.wjoin(f) orig = scmutil.origpath(ui, repo, absf) try: # TODO Mercurial has always aborted if an untracked directory # is replaced by a tracked file, or generally with # file/directory merges. This needs to be sorted out. if repo.wvfs.isfileorlink(f): util.rename(absf, orig) except OSError as e: if e.errno != errno.ENOENT: raise wwrite(f, fctx(f).data(), flags) if i == 100: yield i, f i = 0 i += 1 if i > 0: yield i, f def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None): """apply the merge action list to the working directory wctx is the working copy context mctx is the context to be merged into the working copy Return a tuple of counts (updated, merged, removed, unresolved) that describes how many files were affected by the update. """ updated, merged, removed = 0, 0, 0 ms = mergestate.clean(repo, wctx.p1().node(), mctx.node()) moves = [] for m, l in actions.items(): l.sort() # 'cd' and 'dc' actions are treated like other merge conflicts mergeactions = sorted(actions['cd']) mergeactions.extend(sorted(actions['dc'])) mergeactions.extend(actions['m']) for f, args, msg in mergeactions: f1, f2, fa, move, anc = args if f == '.hgsubstate': # merged internally continue if f1 is None: fcl = filemerge.absentfilectx(wctx, fa) else: repo.ui.debug(" preserving %s for resolve of %s\n" % (f1, f)) fcl = wctx[f1] if f2 is None: fco = filemerge.absentfilectx(mctx, fa) else: fco = mctx[f2] actx = repo[anc] if fa in actx: fca = actx[fa] else: # TODO: move to absentfilectx fca = repo.filectx(f1, fileid=nullrev) ms.add(fcl, fco, fca, f) if f1 != f and move: moves.append(f1) audit = repo.wvfs.audit _updating = _('updating') _files = _('files') progress = repo.ui.progress # remove renamed files after safely stored for f in moves: if os.path.lexists(repo.wjoin(f)): repo.ui.debug("removing %s\n" % f) audit(f) util.unlinkpath(repo.wjoin(f)) numupdates = sum(len(l) for m, l in actions.items() if m != 'k') if [a for a in actions['r'] if a[0] == '.hgsubstate']: subrepo.submerge(repo, wctx, mctx, wctx, overwrite) # remove in parallel (must come first) z = 0 prog = worker.worker(repo.ui, 0.001, batchremove, (repo,), actions['r']) for i, item in prog: z += i progress(_updating, z, item=item, total=numupdates, unit=_files) removed = len(actions['r']) # get in parallel prog = worker.worker(repo.ui, 0.001, batchget, (repo, mctx), actions['g']) for i, item in prog: z += i progress(_updating, z, item=item, total=numupdates, unit=_files) updated = len(actions['g']) if [a for a in actions['g'] if a[0] == '.hgsubstate']: subrepo.submerge(repo, wctx, mctx, wctx, overwrite) # forget (manifest only, just log it) (must come first) for f, args, msg in actions['f']: repo.ui.debug(" %s: %s -> f\n" % (f, msg)) z += 1 progress(_updating, z, item=f, total=numupdates, unit=_files) # re-add (manifest only, just log it) for f, args, msg in actions['a']: repo.ui.debug(" %s: %s -> a\n" % (f, msg)) z += 1 progress(_updating, z, item=f, total=numupdates, unit=_files) # re-add/mark as modified (manifest only, just log it) for f, args, msg in actions['am']: repo.ui.debug(" %s: %s -> am\n" % (f, msg)) z += 1 progress(_updating, z, item=f, total=numupdates, unit=_files) # keep (noop, just log it) for f, args, msg in actions['k']: repo.ui.debug(" %s: %s -> k\n" % (f, msg)) # no progress # directory rename, move local for f, args, msg in actions['dm']: repo.ui.debug(" %s: %s -> dm\n" % (f, msg)) z += 1 progress(_updating, z, item=f, total=numupdates, unit=_files) f0, flags = args repo.ui.note(_("moving %s to %s\n") % (f0, f)) audit(f) repo.wwrite(f, wctx.filectx(f0).data(), flags) util.unlinkpath(repo.wjoin(f0)) updated += 1 # local directory rename, get for f, args, msg in actions['dg']: repo.ui.debug(" %s: %s -> dg\n" % (f, msg)) z += 1 progress(_updating, z, item=f, total=numupdates, unit=_files) f0, flags = args repo.ui.note(_("getting %s to %s\n") % (f0, f)) repo.wwrite(f, mctx.filectx(f0).data(), flags) updated += 1 # exec for f, args, msg in actions['e']: repo.ui.debug(" %s: %s -> e\n" % (f, msg)) z += 1 progress(_updating, z, item=f, total=numupdates, unit=_files) flags, = args audit(f) util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags) updated += 1 # the ordering is important here -- ms.mergedriver will raise if the merge # driver has changed, and we want to be able to bypass it when overwrite is # True usemergedriver = not overwrite and mergeactions and ms.mergedriver if usemergedriver: ms.commit() proceed = driverpreprocess(repo, ms, wctx, labels=labels) # the driver might leave some files unresolved unresolvedf = set(ms.unresolved()) if not proceed: # XXX setting unresolved to at least 1 is a hack to make sure we # error out return updated, merged, removed, max(len(unresolvedf), 1) newactions = [] for f, args, msg in mergeactions: if f in unresolvedf: newactions.append((f, args, msg)) mergeactions = newactions # premerge tocomplete = [] for f, args, msg in mergeactions: repo.ui.debug(" %s: %s -> m (premerge)\n" % (f, msg)) z += 1 progress(_updating, z, item=f, total=numupdates, unit=_files) if f == '.hgsubstate': # subrepo states need updating subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx), overwrite) continue audit(f) complete, r = ms.preresolve(f, wctx, labels=labels) if not complete: numupdates += 1 tocomplete.append((f, args, msg)) # merge for f, args, msg in tocomplete: repo.ui.debug(" %s: %s -> m (merge)\n" % (f, msg)) z += 1 progress(_updating, z, item=f, total=numupdates, unit=_files) ms.resolve(f, wctx, labels=labels) ms.commit() unresolved = ms.unresolvedcount() if usemergedriver and not unresolved and ms.mdstate() != 's': if not driverconclude(repo, ms, wctx, labels=labels): # XXX setting unresolved to at least 1 is a hack to make sure we # error out unresolved = max(unresolved, 1) ms.commit() msupdated, msmerged, msremoved = ms.counts() updated += msupdated merged += msmerged removed += msremoved extraactions = ms.actions() for k, acts in extraactions.iteritems(): actions[k].extend(acts) progress(_updating, None, total=numupdates, unit=_files) return updated, merged, removed, unresolved def recordupdates(repo, actions, branchmerge): "record merge actions to the dirstate" # remove (must come first) for f, args, msg in actions.get('r', []): if branchmerge: repo.dirstate.remove(f) else: repo.dirstate.drop(f) # forget (must come first) for f, args, msg in actions.get('f', []): repo.dirstate.drop(f) # re-add for f, args, msg in actions.get('a', []): repo.dirstate.add(f) # re-add/mark as modified for f, args, msg in actions.get('am', []): if branchmerge: repo.dirstate.normallookup(f) else: repo.dirstate.add(f) # exec change for f, args, msg in actions.get('e', []): repo.dirstate.normallookup(f) # keep for f, args, msg in actions.get('k', []): pass # get for f, args, msg in actions.get('g', []): if branchmerge: repo.dirstate.otherparent(f) else: repo.dirstate.normal(f) # merge for f, args, msg in actions.get('m', []): f1, f2, fa, move, anc = args if branchmerge: # We've done a branch merge, mark this file as merged # so that we properly record the merger later repo.dirstate.merge(f) if f1 != f2: # copy/rename if move: repo.dirstate.remove(f1) if f1 != f: repo.dirstate.copy(f1, f) else: repo.dirstate.copy(f2, f) else: # We've update-merged a locally modified file, so # we set the dirstate to emulate a normal checkout # of that file some time in the past. Thus our # merge will appear as a normal local file # modification. if f2 == f: # file not locally copied/moved repo.dirstate.normallookup(f) if move: repo.dirstate.drop(f1) # directory rename, move local for f, args, msg in actions.get('dm', []): f0, flag = args if branchmerge: repo.dirstate.add(f) repo.dirstate.remove(f0) repo.dirstate.copy(f0, f) else: repo.dirstate.normal(f) repo.dirstate.drop(f0) # directory rename, get for f, args, msg in actions.get('dg', []): f0, flag = args if branchmerge: repo.dirstate.add(f) repo.dirstate.copy(f0, f) else: repo.dirstate.normal(f) def update(repo, node, branchmerge, force, ancestor=None, mergeancestor=False, labels=None, matcher=None): """ Perform a merge between the working directory and the given node node = the node to update to, or None if unspecified branchmerge = whether to merge between branches force = whether to force branch merging or file overwriting matcher = a matcher to filter file lists (dirstate not updated) mergeancestor = whether it is merging with an ancestor. If true, we should accept the incoming changes for any prompts that occur. If false, merging with an ancestor (fast-forward) is only allowed between different named branches. This flag is used by rebase extension as a temporary fix and should be avoided in general. The table below shows all the behaviors of the update command given the -c and -C or no options, whether the working directory is dirty, whether a revision is specified, and the relationship of the parent rev to the target rev (linear, on the same named branch, or on another named branch). This logic is tested by test-update-branches.t. -c -C dirty rev | linear same cross n n n n | ok (1) x n n n y | ok ok ok n n y n | merge (2) (2) n n y y | merge (3) (3) n y * * | discard discard discard y n y * | (4) (4) (4) y n n * | ok ok ok y y * * | (5) (5) (5) x = can't happen * = don't-care 1 = abort: not a linear update (merge or update --check to force update) 2 = abort: uncommitted changes (commit and merge, or update --clean to discard changes) 3 = abort: uncommitted changes (commit or update --clean to discard changes) 4 = abort: uncommitted changes (checked in commands.py) 5 = incompatible options (checked in commands.py) Return the same tuple as applyupdates(). """ onode = node # If we're doing a partial update, we need to skip updating # the dirstate, so make a note of any partial-ness to the # update here. if matcher is None or matcher.always(): partial = False else: partial = True with repo.wlock(): wc = repo[None] pl = wc.parents() p1 = pl[0] pas = [None] if ancestor is not None: pas = [repo[ancestor]] if node is None: if (repo.ui.configbool('devel', 'all-warnings') or repo.ui.configbool('devel', 'oldapi')): repo.ui.develwarn('update with no target') rev, _mark, _act = destutil.destupdate(repo) node = repo[rev].node() overwrite = force and not branchmerge p2 = repo[node] if pas[0] is None: if repo.ui.configlist('merge', 'preferancestor', ['*']) == ['*']: cahs = repo.changelog.commonancestorsheads(p1.node(), p2.node()) pas = [repo[anc] for anc in (sorted(cahs) or [nullid])] else: pas = [p1.ancestor(p2, warn=branchmerge)] fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2) ### check phase if not overwrite: if len(pl) > 1: raise error.Abort(_("outstanding uncommitted merge")) ms = mergestate.read(repo) if list(ms.unresolved()): raise error.Abort(_("outstanding merge conflicts")) if branchmerge: if pas == [p2]: raise error.Abort(_("merging with a working directory ancestor" " has no effect")) elif pas == [p1]: if not mergeancestor and p1.branch() == p2.branch(): raise error.Abort(_("nothing to merge"), hint=_("use 'hg update' " "or check 'hg heads'")) if not force and (wc.files() or wc.deleted()): raise error.Abort(_("uncommitted changes"), hint=_("use 'hg status' to list changes")) for s in sorted(wc.substate): wc.sub(s).bailifchanged() elif not overwrite: if p1 == p2: # no-op update # call the hooks and exit early repo.hook('preupdate', throw=True, parent1=xp2, parent2='') repo.hook('update', parent1=xp2, parent2='', error=0) return 0, 0, 0, 0 if pas not in ([p1], [p2]): # nonlinear dirty = wc.dirty(missing=True) if dirty or onode is None: # Branching is a bit strange to ensure we do the minimal # amount of call to obsolete.background. foreground = obsolete.foreground(repo, [p1.node()]) # note: the variable contains a random identifier if repo[node].node() in foreground: pas = [p1] # allow updating to successors elif dirty: msg = _("uncommitted changes") if onode is None: hint = _("commit and merge, or update --clean to" " discard changes") else: hint = _("commit or update --clean to discard" " changes") raise error.Abort(msg, hint=hint) else: # node is none msg = _("not a linear update") hint = _("merge or update --check to force update") raise error.Abort(msg, hint=hint) else: # Allow jumping branches if clean and specific rev given pas = [p1] # deprecated config: merge.followcopies followcopies = False if overwrite: pas = [wc] elif pas == [p2]: # backwards pas = [wc.p1()] elif not branchmerge and not wc.dirty(missing=True): pass elif pas[0] and repo.ui.configbool('merge', 'followcopies', True): followcopies = True ### calculate phase actionbyfile, diverge, renamedelete = calculateupdates( repo, wc, p2, pas, branchmerge, force, mergeancestor, followcopies, matcher=matcher) # Prompt and create actions. Most of this is in the resolve phase # already, but we can't handle .hgsubstate in filemerge or # subrepo.submerge yet so we have to keep prompting for it. if '.hgsubstate' in actionbyfile: f = '.hgsubstate' m, args, msg = actionbyfile[f] if m == 'cd': if repo.ui.promptchoice( _("local changed %s which remote deleted\n" "use (c)hanged version or (d)elete?" "$$ &Changed $$ &Delete") % f, 0): actionbyfile[f] = ('r', None, "prompt delete") elif f in p1: actionbyfile[f] = ('am', None, "prompt keep") else: actionbyfile[f] = ('a', None, "prompt keep") elif m == 'dc': f1, f2, fa, move, anc = args flags = p2[f2].flags() if repo.ui.promptchoice( _("remote changed %s which local deleted\n" "use (c)hanged version or leave (d)eleted?" "$$ &Changed $$ &Deleted") % f, 0) == 0: actionbyfile[f] = ('g', (flags, False), "prompt recreating") else: del actionbyfile[f] # Convert to dictionary-of-lists format actions = dict((m, []) for m in 'a am f g cd dc r dm dg m e k'.split()) for f, (m, args, msg) in actionbyfile.iteritems(): if m not in actions: actions[m] = [] actions[m].append((f, args, msg)) if not util.checkcase(repo.path): # check collision between files only in p2 for clean update if (not branchmerge and (force or not wc.dirty(missing=True, branch=False))): _checkcollision(repo, p2.manifest(), None) else: _checkcollision(repo, wc.manifest(), actions) # divergent renames for f, fl in sorted(diverge.iteritems()): repo.ui.warn(_("note: possible conflict - %s was renamed " "multiple times to:\n") % f) for nf in fl: repo.ui.warn(" %s\n" % nf) # rename and delete for f, fl in sorted(renamedelete.iteritems()): repo.ui.warn(_("note: possible conflict - %s was deleted " "and renamed to:\n") % f) for nf in fl: repo.ui.warn(" %s\n" % nf) ### apply phase if not branchmerge: # just jump to the new rev fp1, fp2, xp1, xp2 = fp2, nullid, xp2, '' if not partial: repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2) # note that we're in the middle of an update repo.vfs.write('updatestate', p2.hex()) stats = applyupdates(repo, actions, wc, p2, overwrite, labels=labels) if not partial: repo.dirstate.beginparentchange() repo.setparents(fp1, fp2) recordupdates(repo, actions, branchmerge) # update completed, clear state util.unlink(repo.join('updatestate')) if not branchmerge: repo.dirstate.setbranch(p2.branch()) repo.dirstate.endparentchange() if not partial: repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3]) return stats def graft(repo, ctx, pctx, labels, keepparent=False): """Do a graft-like merge. This is a merge where the merge ancestor is chosen such that one or more changesets are grafted onto the current changeset. In addition to the merge, this fixes up the dirstate to include only a single parent (if keepparent is False) and tries to duplicate any renames/copies appropriately. ctx - changeset to rebase pctx - merge base, usually ctx.p1() labels - merge labels eg ['local', 'graft'] keepparent - keep second parent if any """ # If we're grafting a descendant onto an ancestor, be sure to pass # mergeancestor=True to update. This does two things: 1) allows the merge if # the destination is the same as the parent of the ctx (so we can use graft # to copy commits), and 2) informs update that the incoming changes are # newer than the destination so it doesn't prompt about "remote changed foo # which local deleted". mergeancestor = repo.changelog.isancestor(repo['.'].node(), ctx.node()) stats = update(repo, ctx.node(), True, True, pctx.node(), mergeancestor=mergeancestor, labels=labels) pother = nullid parents = ctx.parents() if keepparent and len(parents) == 2 and pctx in parents: parents.remove(pctx) pother = parents[0].node() repo.dirstate.beginparentchange() repo.setparents(repo['.'].node(), pother) repo.dirstate.write(repo.currenttransaction()) # fix up dirstate for copies and renames copies.duplicatecopies(repo, ctx.rev(), pctx.rev()) repo.dirstate.endparentchange() return stats mercurial-3.7.3/mercurial/url.py0000644000175000017500000004377512676531525016306 0ustar mpmmpm00000000000000# url.py - HTTP handling for mercurial # # Copyright 2005, 2006, 2007, 2008 Matt Mackall # Copyright 2006, 2007 Alexis S. L. Carvalho # Copyright 2006 Vadim Gelfer # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import base64 import cStringIO import httplib import os import socket import urllib import urllib2 from .i18n import _ from . import ( error, httpconnection as httpconnectionmod, keepalive, sslutil, util, ) class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm): def __init__(self, ui): urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self) self.ui = ui def find_user_password(self, realm, authuri): authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password( self, realm, authuri) user, passwd = authinfo if user and passwd: self._writedebug(user, passwd) return (user, passwd) if not user or not passwd: res = httpconnectionmod.readauthforuri(self.ui, authuri, user) if res: group, auth = res user, passwd = auth.get('username'), auth.get('password') self.ui.debug("using auth.%s.* for authentication\n" % group) if not user or not passwd: u = util.url(authuri) u.query = None if not self.ui.interactive(): raise error.Abort(_('http authorization required for %s') % util.hidepassword(str(u))) self.ui.write(_("http authorization required for %s\n") % util.hidepassword(str(u))) self.ui.write(_("realm: %s\n") % realm) if user: self.ui.write(_("user: %s\n") % user) else: user = self.ui.prompt(_("user:"), default=None) if not passwd: passwd = self.ui.getpass() self.add_password(realm, authuri, user, passwd) self._writedebug(user, passwd) return (user, passwd) def _writedebug(self, user, passwd): msg = _('http auth: user %s, password %s\n') self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) def find_stored_password(self, authuri): return urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password( self, None, authuri) class proxyhandler(urllib2.ProxyHandler): def __init__(self, ui): proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy') # XXX proxyauthinfo = None if proxyurl: # proxy can be proper url or host[:port] if not (proxyurl.startswith('http:') or proxyurl.startswith('https:')): proxyurl = 'http://' + proxyurl + '/' proxy = util.url(proxyurl) if not proxy.user: proxy.user = ui.config("http_proxy", "user") proxy.passwd = ui.config("http_proxy", "passwd") # see if we should use a proxy for this url no_list = ["localhost", "127.0.0.1"] no_list.extend([p.lower() for p in ui.configlist("http_proxy", "no")]) no_list.extend([p.strip().lower() for p in os.getenv("no_proxy", '').split(',') if p.strip()]) # "http_proxy.always" config is for running tests on localhost if ui.configbool("http_proxy", "always"): self.no_list = [] else: self.no_list = no_list proxyurl = str(proxy) proxies = {'http': proxyurl, 'https': proxyurl} ui.debug('proxying through http://%s:%s\n' % (proxy.host, proxy.port)) else: proxies = {} # urllib2 takes proxy values from the environment and those # will take precedence if found. So, if there's a config entry # defining a proxy, drop the environment ones if ui.config("http_proxy", "host"): for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]: try: if env in os.environ: del os.environ[env] except OSError: pass urllib2.ProxyHandler.__init__(self, proxies) self.ui = ui def proxy_open(self, req, proxy, type_): host = req.get_host().split(':')[0] for e in self.no_list: if host == e: return None if e.startswith('*.') and host.endswith(e[2:]): return None if e.startswith('.') and host.endswith(e[1:]): return None return urllib2.ProxyHandler.proxy_open(self, req, proxy, type_) def _gen_sendfile(orgsend): def _sendfile(self, data): # send a file if isinstance(data, httpconnectionmod.httpsendfile): # if auth required, some data sent twice, so rewind here data.seek(0) for chunk in util.filechunkiter(data): orgsend(self, chunk) else: orgsend(self, data) return _sendfile has_https = util.safehasattr(urllib2, 'HTTPSHandler') if has_https: try: _create_connection = socket.create_connection except AttributeError: _GLOBAL_DEFAULT_TIMEOUT = object() def _create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None): # lifted from Python 2.6 msg = "getaddrinfo returns an empty list" host, port = address for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res sock = None try: sock = socket.socket(af, socktype, proto) if timeout is not _GLOBAL_DEFAULT_TIMEOUT: sock.settimeout(timeout) if source_address: sock.bind(source_address) sock.connect(sa) return sock except socket.error as msg: if sock is not None: sock.close() raise socket.error(msg) class httpconnection(keepalive.HTTPConnection): # must be able to send big bundle as stream. send = _gen_sendfile(keepalive.HTTPConnection.send) def connect(self): if has_https and self.realhostport: # use CONNECT proxy self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.connect((self.host, self.port)) if _generic_proxytunnel(self): # we do not support client X.509 certificates self.sock = sslutil.wrapsocket(self.sock, None, None, None, serverhostname=self.host) else: keepalive.HTTPConnection.connect(self) def getresponse(self): proxyres = getattr(self, 'proxyres', None) if proxyres: if proxyres.will_close: self.close() self.proxyres = None return proxyres return keepalive.HTTPConnection.getresponse(self) # general transaction handler to support different ways to handle # HTTPS proxying before and after Python 2.6.3. def _generic_start_transaction(handler, h, req): tunnel_host = getattr(req, '_tunnel_host', None) if tunnel_host: if tunnel_host[:7] not in ['http://', 'https:/']: tunnel_host = 'https://' + tunnel_host new_tunnel = True else: tunnel_host = req.get_selector() new_tunnel = False if new_tunnel or tunnel_host == req.get_full_url(): # has proxy u = util.url(tunnel_host) if new_tunnel or u.scheme == 'https': # only use CONNECT for HTTPS h.realhostport = ':'.join([u.host, (u.port or '443')]) h.headers = req.headers.copy() h.headers.update(handler.parent.addheaders) return h.realhostport = None h.headers = None def _generic_proxytunnel(self): proxyheaders = dict( [(x, self.headers[x]) for x in self.headers if x.lower().startswith('proxy-')]) self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport) for header in proxyheaders.iteritems(): self.send('%s: %s\r\n' % header) self.send('\r\n') # majority of the following code is duplicated from # httplib.HTTPConnection as there are no adequate places to # override functions to provide the needed functionality res = self.response_class(self.sock, strict=self.strict, method=self._method) while True: version, status, reason = res._read_status() if status != httplib.CONTINUE: break while True: skip = res.fp.readline().strip() if not skip: break res.status = status res.reason = reason.strip() if res.status == 200: while True: line = res.fp.readline() if line == '\r\n': break return True if version == 'HTTP/1.0': res.version = 10 elif version.startswith('HTTP/1.'): res.version = 11 elif version == 'HTTP/0.9': res.version = 9 else: raise httplib.UnknownProtocol(version) if res.version == 9: res.length = None res.chunked = 0 res.will_close = 1 res.msg = httplib.HTTPMessage(cStringIO.StringIO()) return False res.msg = httplib.HTTPMessage(res.fp) res.msg.fp = None # are we using the chunked-style of transfer encoding? trenc = res.msg.getheader('transfer-encoding') if trenc and trenc.lower() == "chunked": res.chunked = 1 res.chunk_left = None else: res.chunked = 0 # will the connection close at the end of the response? res.will_close = res._check_close() # do we have a Content-Length? # NOTE: RFC 2616, section 4.4, #3 says we ignore this if # transfer-encoding is "chunked" length = res.msg.getheader('content-length') if length and not res.chunked: try: res.length = int(length) except ValueError: res.length = None else: if res.length < 0: # ignore nonsensical negative lengths res.length = None else: res.length = None # does the body have a fixed length? (of zero) if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or 100 <= status < 200 or # 1xx codes res._method == 'HEAD'): res.length = 0 # if the connection remains open, and we aren't using chunked, and # a content-length was not provided, then assume that the connection # WILL close. if (not res.will_close and not res.chunked and res.length is None): res.will_close = 1 self.proxyres = res return False class httphandler(keepalive.HTTPHandler): def http_open(self, req): return self.do_open(httpconnection, req) def _start_transaction(self, h, req): _generic_start_transaction(self, h, req) return keepalive.HTTPHandler._start_transaction(self, h, req) if has_https: class httpsconnection(httplib.HTTPConnection): response_class = keepalive.HTTPResponse default_port = httplib.HTTPS_PORT # must be able to send big bundle as stream. send = _gen_sendfile(keepalive.safesend) getresponse = keepalive.wrapgetresponse(httplib.HTTPConnection) def __init__(self, host, port=None, key_file=None, cert_file=None, *args, **kwargs): httplib.HTTPConnection.__init__(self, host, port, *args, **kwargs) self.key_file = key_file self.cert_file = cert_file def connect(self): self.sock = _create_connection((self.host, self.port)) host = self.host if self.realhostport: # use CONNECT proxy _generic_proxytunnel(self) host = self.realhostport.rsplit(':', 1)[0] self.sock = sslutil.wrapsocket( self.sock, self.key_file, self.cert_file, serverhostname=host, **sslutil.sslkwargs(self.ui, host)) sslutil.validator(self.ui, host)(self.sock) class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler): def __init__(self, ui): keepalive.KeepAliveHandler.__init__(self) urllib2.HTTPSHandler.__init__(self) self.ui = ui self.pwmgr = passwordmgr(self.ui) def _start_transaction(self, h, req): _generic_start_transaction(self, h, req) return keepalive.KeepAliveHandler._start_transaction(self, h, req) def https_open(self, req): # req.get_full_url() does not contain credentials and we may # need them to match the certificates. url = req.get_full_url() user, password = self.pwmgr.find_stored_password(url) res = httpconnectionmod.readauthforuri(self.ui, url, user) if res: group, auth = res self.auth = auth self.ui.debug("using auth.%s.* for authentication\n" % group) else: self.auth = None return self.do_open(self._makeconnection, req) def _makeconnection(self, host, port=None, *args, **kwargs): keyfile = None certfile = None if len(args) >= 1: # key_file keyfile = args[0] if len(args) >= 2: # cert_file certfile = args[1] args = args[2:] # if the user has specified different key/cert files in # hgrc, we prefer these if self.auth and 'key' in self.auth and 'cert' in self.auth: keyfile = self.auth['key'] certfile = self.auth['cert'] conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs) conn.ui = self.ui return conn class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler): def __init__(self, *args, **kwargs): urllib2.HTTPDigestAuthHandler.__init__(self, *args, **kwargs) self.retried_req = None def reset_retry_count(self): # Python 2.6.5 will call this on 401 or 407 errors and thus loop # forever. We disable reset_retry_count completely and reset in # http_error_auth_reqed instead. pass def http_error_auth_reqed(self, auth_header, host, req, headers): # Reset the retry counter once for each request. if req is not self.retried_req: self.retried_req = req self.retried = 0 return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed( self, auth_header, host, req, headers) class httpbasicauthhandler(urllib2.HTTPBasicAuthHandler): def __init__(self, *args, **kwargs): self.auth = None urllib2.HTTPBasicAuthHandler.__init__(self, *args, **kwargs) self.retried_req = None def http_request(self, request): if self.auth: request.add_unredirected_header(self.auth_header, self.auth) return request def https_request(self, request): if self.auth: request.add_unredirected_header(self.auth_header, self.auth) return request def reset_retry_count(self): # Python 2.6.5 will call this on 401 or 407 errors and thus loop # forever. We disable reset_retry_count completely and reset in # http_error_auth_reqed instead. pass def http_error_auth_reqed(self, auth_header, host, req, headers): # Reset the retry counter once for each request. if req is not self.retried_req: self.retried_req = req self.retried = 0 return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed( self, auth_header, host, req, headers) def retry_http_basic_auth(self, host, req, realm): user, pw = self.passwd.find_user_password(realm, req.get_full_url()) if pw is not None: raw = "%s:%s" % (user, pw) auth = 'Basic %s' % base64.b64encode(raw).strip() if req.headers.get(self.auth_header, None) == auth: return None self.auth = auth req.add_unredirected_header(self.auth_header, auth) return self.parent.open(req) else: return None handlerfuncs = [] def opener(ui, authinfo=None): ''' construct an opener suitable for urllib2 authinfo will be added to the password manager ''' # experimental config: ui.usehttp2 if ui.configbool('ui', 'usehttp2', False): handlers = [httpconnectionmod.http2handler(ui, passwordmgr(ui))] else: handlers = [httphandler()] if has_https: handlers.append(httpshandler(ui)) handlers.append(proxyhandler(ui)) passmgr = passwordmgr(ui) if authinfo is not None: passmgr.add_password(*authinfo) user, passwd = authinfo[2:4] ui.debug('http auth: user %s, password %s\n' % (user, passwd and '*' * len(passwd) or 'not set')) handlers.extend((httpbasicauthhandler(passmgr), httpdigestauthhandler(passmgr))) handlers.extend([h(ui, passmgr) for h in handlerfuncs]) opener = urllib2.build_opener(*handlers) # 1.0 here is the _protocol_ version opener.addheaders = [('User-agent', 'mercurial/proto-1.0')] opener.addheaders.append(('Accept', 'application/mercurial-0.1')) return opener def open(ui, url_, data=None): u = util.url(url_) if u.scheme: u.scheme = u.scheme.lower() url_, authinfo = u.authinfo() else: path = util.normpath(os.path.abspath(url_)) url_ = 'file://' + urllib.pathname2url(path) authinfo = None return opener(ui, authinfo).open(url_, data) mercurial-3.7.3/mercurial/extensions.py0000644000175000017500000003437612676531525017700 0ustar mpmmpm00000000000000# extensions.py - extension handling for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import imp import os from .i18n import ( _, gettext, ) from . import ( cmdutil, error, util, ) _extensions = {} _aftercallbacks = {} _order = [] _builtin = set(['hbisect', 'bookmarks', 'parentrevspec', 'progress', 'interhg', 'inotify']) def extensions(ui=None): if ui: def enabled(name): for format in ['%s', 'hgext.%s']: conf = ui.config('extensions', format % name) if conf is not None and not conf.startswith('!'): return True else: enabled = lambda name: True for name in _order: module = _extensions[name] if module and enabled(name): yield name, module def find(name): '''return module with given extension name''' mod = None try: mod = _extensions[name] except KeyError: for k, v in _extensions.iteritems(): if k.endswith('.' + name) or k.endswith('/' + name): mod = v break if not mod: raise KeyError(name) return mod def loadpath(path, module_name): module_name = module_name.replace('.', '_') path = util.normpath(util.expandpath(path)) if os.path.isdir(path): # module/__init__.py style d, f = os.path.split(path) fd, fpath, desc = imp.find_module(f, [d]) return imp.load_module(module_name, fd, fpath, desc) else: try: return imp.load_source(module_name, path) except IOError as exc: if not exc.filename: exc.filename = path # python does not fill this raise def load(ui, name, path): if name.startswith('hgext.') or name.startswith('hgext/'): shortname = name[6:] else: shortname = name if shortname in _builtin: return None if shortname in _extensions: return _extensions[shortname] _extensions[shortname] = None if path: # the module will be loaded in sys.modules # choose an unique name so that it doesn't # conflicts with other modules mod = loadpath(path, 'hgext.%s' % name) else: def importh(name): mod = __import__(name) components = name.split('.') for comp in components[1:]: mod = getattr(mod, comp) return mod try: mod = importh("hgext.%s" % name) except ImportError as err: ui.debug('could not import hgext.%s (%s): trying %s\n' % (name, err, name)) if ui.debugflag: ui.traceback() mod = importh(name) # Before we do anything with the extension, check against minimum stated # compatibility. This gives extension authors a mechanism to have their # extensions short circuit when loaded with a known incompatible version # of Mercurial. minver = getattr(mod, 'minimumhgversion', None) if minver and util.versiontuple(minver, 2) > util.versiontuple(n=2): ui.warn(_('(third party extension %s requires version %s or newer ' 'of Mercurial; disabling)\n') % (shortname, minver)) return _extensions[shortname] = mod _order.append(shortname) for fn in _aftercallbacks.get(shortname, []): fn(loaded=True) return mod def loadall(ui): result = ui.configitems("extensions") newindex = len(_order) for (name, path) in result: if path: if path[0] == '!': continue try: load(ui, name, path) except KeyboardInterrupt: raise except Exception as inst: if path: ui.warn(_("*** failed to import extension %s from %s: %s\n") % (name, path, inst)) else: ui.warn(_("*** failed to import extension %s: %s\n") % (name, inst)) ui.traceback() for name in _order[newindex:]: uisetup = getattr(_extensions[name], 'uisetup', None) if uisetup: uisetup(ui) for name in _order[newindex:]: extsetup = getattr(_extensions[name], 'extsetup', None) if extsetup: try: extsetup(ui) except TypeError: if extsetup.func_code.co_argcount != 0: raise extsetup() # old extsetup with no ui argument # Call aftercallbacks that were never met. for shortname in _aftercallbacks: if shortname in _extensions: continue for fn in _aftercallbacks[shortname]: fn(loaded=False) # loadall() is called multiple times and lingering _aftercallbacks # entries could result in double execution. See issue4646. _aftercallbacks.clear() def afterloaded(extension, callback): '''Run the specified function after a named extension is loaded. If the named extension is already loaded, the callback will be called immediately. If the named extension never loads, the callback will be called after all extensions have been loaded. The callback receives the named argument ``loaded``, which is a boolean indicating whether the dependent extension actually loaded. ''' if extension in _extensions: callback(loaded=True) else: _aftercallbacks.setdefault(extension, []).append(callback) def bind(func, *args): '''Partial function application Returns a new function that is the partial application of args and kwargs to func. For example, f(1, 2, bar=3) === bind(f, 1)(2, bar=3)''' assert callable(func) def closure(*a, **kw): return func(*(args + a), **kw) return closure def wrapcommand(table, command, wrapper, synopsis=None, docstring=None): '''Wrap the command named `command' in table Replace command in the command table with wrapper. The wrapped command will be inserted into the command table specified by the table argument. The wrapper will be called like wrapper(orig, *args, **kwargs) where orig is the original (wrapped) function, and *args, **kwargs are the arguments passed to it. Optionally append to the command synopsis and docstring, used for help. For example, if your extension wraps the ``bookmarks`` command to add the flags ``--remote`` and ``--all`` you might call this function like so: synopsis = ' [-a] [--remote]' docstring = """ The ``remotenames`` extension adds the ``--remote`` and ``--all`` (``-a``) flags to the bookmarks command. Either flag will show the remote bookmarks known to the repository; ``--remote`` will also suppress the output of the local bookmarks. """ extensions.wrapcommand(commands.table, 'bookmarks', exbookmarks, synopsis, docstring) ''' assert callable(wrapper) aliases, entry = cmdutil.findcmd(command, table) for alias, e in table.iteritems(): if e is entry: key = alias break origfn = entry[0] wrap = bind(util.checksignature(wrapper), util.checksignature(origfn)) wrap.__module__ = getattr(origfn, '__module__') doc = getattr(origfn, '__doc__') if docstring is not None: doc += docstring wrap.__doc__ = doc newentry = list(entry) newentry[0] = wrap if synopsis is not None: newentry[2] += synopsis table[key] = tuple(newentry) return entry def wrapfunction(container, funcname, wrapper): '''Wrap the function named funcname in container Replace the funcname member in the given container with the specified wrapper. The container is typically a module, class, or instance. The wrapper will be called like wrapper(orig, *args, **kwargs) where orig is the original (wrapped) function, and *args, **kwargs are the arguments passed to it. Wrapping methods of the repository object is not recommended since it conflicts with extensions that extend the repository by subclassing. All extensions that need to extend methods of localrepository should use this subclassing trick: namely, reposetup() should look like def reposetup(ui, repo): class myrepo(repo.__class__): def whatever(self, *args, **kwargs): [...extension stuff...] super(myrepo, self).whatever(*args, **kwargs) [...extension stuff...] repo.__class__ = myrepo In general, combining wrapfunction() with subclassing does not work. Since you cannot control what other extensions are loaded by your end users, you should play nicely with others by using the subclass trick. ''' assert callable(wrapper) origfn = getattr(container, funcname) assert callable(origfn) setattr(container, funcname, bind(wrapper, origfn)) return origfn def _disabledpaths(strip_init=False): '''find paths of disabled extensions. returns a dict of {name: path} removes /__init__.py from packages if strip_init is True''' import hgext extpath = os.path.dirname(os.path.abspath(hgext.__file__)) try: # might not be a filesystem path files = os.listdir(extpath) except OSError: return {} exts = {} for e in files: if e.endswith('.py'): name = e.rsplit('.', 1)[0] path = os.path.join(extpath, e) else: name = e path = os.path.join(extpath, e, '__init__.py') if not os.path.exists(path): continue if strip_init: path = os.path.dirname(path) if name in exts or name in _order or name == '__init__': continue exts[name] = path return exts def _moduledoc(file): '''return the top-level python documentation for the given file Loosely inspired by pydoc.source_synopsis(), but rewritten to handle triple quotes and to return the whole text instead of just the synopsis''' result = [] line = file.readline() while line[:1] == '#' or not line.strip(): line = file.readline() if not line: break start = line[:3] if start == '"""' or start == "'''": line = line[3:] while line: if line.rstrip().endswith(start): line = line.split(start)[0] if line: result.append(line) break elif not line: return None # unmatched delimiter result.append(line) line = file.readline() else: return None return ''.join(result) def _disabledhelp(path): '''retrieve help synopsis of a disabled extension (without importing)''' try: file = open(path) except IOError: return else: doc = _moduledoc(file) file.close() if doc: # extracting localized synopsis return gettext(doc).splitlines()[0] else: return _('(no help text available)') def disabled(): '''find disabled extensions from hgext. returns a dict of {name: desc}''' try: from hgext import __index__ return dict((name, gettext(desc)) for name, desc in __index__.docs.iteritems() if name not in _order) except (ImportError, AttributeError): pass paths = _disabledpaths() if not paths: return {} exts = {} for name, path in paths.iteritems(): doc = _disabledhelp(path) if doc: exts[name] = doc return exts def disabledext(name): '''find a specific disabled extension from hgext. returns desc''' try: from hgext import __index__ if name in _order: # enabled return else: return gettext(__index__.docs.get(name)) except (ImportError, AttributeError): pass paths = _disabledpaths() if name in paths: return _disabledhelp(paths[name]) def disabledcmd(ui, cmd, strict=False): '''import disabled extensions until cmd is found. returns (cmdname, extname, module)''' paths = _disabledpaths(strip_init=True) if not paths: raise error.UnknownCommand(cmd) def findcmd(cmd, name, path): try: mod = loadpath(path, 'hgext.%s' % name) except Exception: return try: aliases, entry = cmdutil.findcmd(cmd, getattr(mod, 'cmdtable', {}), strict) except (error.AmbiguousCommand, error.UnknownCommand): return except Exception: ui.warn(_('warning: error finding commands in %s\n') % path) ui.traceback() return for c in aliases: if c.startswith(cmd): cmd = c break else: cmd = aliases[0] return (cmd, name, mod) ext = None # first, search for an extension with the same name as the command path = paths.pop(cmd, None) if path: ext = findcmd(cmd, cmd, path) if not ext: # otherwise, interrogate each extension until there's a match for name, path in paths.iteritems(): ext = findcmd(cmd, name, path) if ext: break if ext and 'DEPRECATED' not in ext.__doc__: return ext raise error.UnknownCommand(cmd) def enabled(shortname=True): '''return a dict of {name: desc} of extensions''' exts = {} for ename, ext in extensions(): doc = (gettext(ext.__doc__) or _('(no help text available)')) if shortname: ename = ename.split('.')[-1] exts[ename] = doc.splitlines()[0].strip() return exts def moduleversion(module): '''return version information from given module as a string''' if (util.safehasattr(module, 'getversion') and callable(module.getversion)): version = module.getversion() elif util.safehasattr(module, '__version__'): version = module.__version__ else: version = '' if isinstance(version, (list, tuple)): version = '.'.join(str(o) for o in version) return version mercurial-3.7.3/mercurial/help/0000755000175000017500000000000012676531544016043 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/help/hg.1.txt0000644000175000017500000000613012676531524017337 0ustar mpmmpm00000000000000==== hg ==== --------------------------------------- Mercurial source code management system --------------------------------------- :Author: Matt Mackall :Organization: Mercurial :Manual section: 1 :Manual group: Mercurial Manual .. contents:: :backlinks: top :class: htmlonly :depth: 1 Synopsis """""""" **hg** *command* [*option*]... [*argument*]... Description """"""""""" The **hg** command provides a command line interface to the Mercurial system. Command Elements """""""""""""""" files... indicates one or more filename or relative path filenames; see `File Name Patterns`_ for information on pattern matching path indicates a path on the local machine revision indicates a changeset which can be specified as a changeset revision number, a tag, or a unique substring of the changeset hash value repository path either the pathname of a local repository or the URI of a remote repository. .. include:: hg.1.gendoc.txt Files """"" ``/etc/mercurial/hgrc``, ``$HOME/.hgrc``, ``.hg/hgrc`` This file contains defaults and configuration. Values in ``.hg/hgrc`` override those in ``$HOME/.hgrc``, and these override settings made in the global ``/etc/mercurial/hgrc`` configuration. See |hgrc(5)|_ for details of the contents and format of these files. ``.hgignore`` This file contains regular expressions (one per line) that describe file names that should be ignored by **hg**. For details, see |hgignore(5)|_. ``.hgsub`` This file defines the locations of all subrepositories, and tells where the subrepository checkouts came from. For details, see :hg:`help subrepos`. ``.hgsubstate`` This file is where Mercurial stores all nested repository states. *NB: This file should not be edited manually.* ``.hgtags`` This file contains changeset hash values and text tag names (one of each separated by spaces) that correspond to tagged versions of the repository contents. The file content is encoded using UTF-8. ``.hg/last-message.txt`` This file is used by :hg:`commit` to store a backup of the commit message in case the commit fails. ``.hg/localtags`` This file can be used to define local tags which are not shared among repositories. The file format is the same as for ``.hgtags``, but it is encoded using the local system encoding. Some commands (e.g. revert) produce backup files ending in ``.orig``, if the ``.orig`` file already exists and is not tracked by Mercurial, it will be overwritten. Bugs """" Probably lots, please post them to the mailing list (see Resources_ below) when you find them. See Also """""""" |hgignore(5)|_, |hgrc(5)|_ Author """""" Written by Matt Mackall Resources """"""""" Main Web Site: https://mercurial-scm.org/ Source code repository: http://selenic.com/hg Mailing list: http://selenic.com/mailman/listinfo/mercurial Copying """"""" Copyright (C) 2005-2016 Matt Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. .. include:: common.txt mercurial-3.7.3/mercurial/help/filesets.txt0000644000175000017500000000353512676531525020427 0ustar mpmmpm00000000000000Mercurial supports a functional language for selecting a set of files. Like other file patterns, this pattern type is indicated by a prefix, 'set:'. The language supports a number of predicates which are joined by infix operators. Parenthesis can be used for grouping. Identifiers such as filenames or patterns must be quoted with single or double quotes if they contain characters outside of ``[.*{}[]?/\_a-zA-Z0-9\x80-\xff]`` or if they match one of the predefined predicates. This generally applies to file patterns other than globs and arguments for predicates. Special characters can be used in quoted identifiers by escaping them, e.g., ``\n`` is interpreted as a newline. To prevent them from being interpreted, strings can be prefixed with ``r``, e.g. ``r'...'``. There is a single prefix operator: ``not x`` Files not in x. Short form is ``! x``. These are the supported infix operators: ``x and y`` The intersection of files in x and y. Short form is ``x & y``. ``x or y`` The union of files in x and y. There are two alternative short forms: ``x | y`` and ``x + y``. ``x - y`` Files in x but not in y. The following predicates are supported: .. predicatesmarker Some sample queries: - Show status of files that appear to be binary in the working directory:: hg status -A "set:binary()" - Forget files that are in .hgignore but are already tracked:: hg forget "set:hgignore() and not ignored()" - Find text files that contain a string:: hg files "set:grep(magic) and not binary()" - Find C files in a non-standard encoding:: hg files "set:**.c and not encoding('UTF-8')" - Revert copies of large binary files:: hg revert "set:copied() and binary() and size('>1M')" - Remove files listed in foo.lst that contain the letter a or b:: hg remove "set: 'listfile:foo.lst' and (**a* or **b*)" See also :hg:`help patterns`. mercurial-3.7.3/mercurial/help/internals/0000755000175000017500000000000012676531544020042 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/help/internals/changegroups.txt0000644000175000017500000001417412676531525023276 0ustar mpmmpm00000000000000Changegroups ============ Changegroups are representations of repository revlog data, specifically the changelog, manifest, and filelogs. There are 3 versions of changegroups: ``1``, ``2``, and ``3``. From a high-level, versions ``1`` and ``2`` are almost exactly the same, with the only difference being a header on entries in the changeset segment. Version ``3`` adds support for exchanging treemanifests and includes revlog flags in the delta header. Changegroups consists of 3 logical segments:: +---------------------------------+ | | | | | changeset | manifest | filelogs | | | | | +---------------------------------+ The principle building block of each segment is a *chunk*. A *chunk* is a framed piece of data:: +---------------------------------------+ | | | | length | data | | (32 bits) | bytes | | | | +---------------------------------------+ Each chunk starts with a 32-bit big-endian signed integer indicating the length of the raw data that follows. There is a special case chunk that has 0 length (``0x00000000``). We call this an *empty chunk*. Delta Groups ------------ A *delta group* expresses the content of a revlog as a series of deltas, or patches against previous revisions. Delta groups consist of 0 or more *chunks* followed by the *empty chunk* to signal the end of the delta group:: +------------------------------------------------------------------------+ | | | | | | | chunk0 length | chunk0 data | chunk1 length | chunk1 data | 0x0 | | (32 bits) | (various) | (32 bits) | (various) | (32 bits) | | | | | | | +------------------------------------------------------------+-----------+ Each *chunk*'s data consists of the following:: +-----------------------------------------+ | | | | | delta header | mdiff header | delta | | (various) | (12 bytes) | (various) | | | | | +-----------------------------------------+ The *length* field is the byte length of the remaining 3 logical pieces of data. The *delta* is a diff from an existing entry in the changelog. The *delta header* is different between versions ``1``, ``2``, and ``3`` of the changegroup format. Version 1:: +------------------------------------------------------+ | | | | | | node | p1 node | p2 node | link node | | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | | | | | | +------------------------------------------------------+ Version 2:: +------------------------------------------------------------------+ | | | | | | | node | p1 node | p2 node | base node | link node | | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | | | | | | | +------------------------------------------------------------------+ Version 3:: +------------------------------------------------------------------------------+ | | | | | | | | node | p1 node | p2 node | base node | link node | flags | | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | (2 bytes) | | | | | | | | +------------------------------------------------------------------------------+ The *mdiff header* consists of 3 32-bit big-endian signed integers describing offsets at which to apply the following delta content:: +-------------------------------------+ | | | | | offset | old length | new length | | (32 bits) | (32 bits) | (32 bits) | | | | | +-------------------------------------+ In version 1, the delta is always applied against the previous node from the changegroup or the first parent if this is the first entry in the changegroup. In version 2, the delta base node is encoded in the entry in the changegroup. This allows the delta to be expressed against any parent, which can result in smaller deltas and more efficient encoding of data. Changeset Segment ----------------- The *changeset segment* consists of a single *delta group* holding changelog data. It is followed by an *empty chunk* to denote the boundary to the *manifests segment*. Manifest Segment ---------------- The *manifest segment* consists of a single *delta group* holding manifest data. It is followed by an *empty chunk* to denote the boundary to the *filelogs segment*. Filelogs Segment ---------------- The *filelogs* segment consists of multiple sub-segments, each corresponding to an individual file whose data is being described:: +--------------------------------------+ | | | | | | filelog0 | filelog1 | filelog2 | ... | | | | | | +--------------------------------------+ In version ``3`` of the changegroup format, filelogs may include directory logs when treemanifests are in use. directory logs are identified by having a trailing '/' on their filename (see below). The final filelog sub-segment is followed by an *empty chunk* to denote the end of the segment and the overall changegroup. Each filelog sub-segment consists of the following:: +------------------------------------------+ | | | | | filename size | filename | delta group | | (32 bits) | (various) | (various) | | | | | +------------------------------------------+ That is, a *chunk* consisting of the filename (not terminated or padded) followed by N chunks constituting the *delta group* for this file. mercurial-3.7.3/mercurial/help/internals/revlogs.txt0000644000175000017500000001613612676531525022272 0ustar mpmmpm00000000000000Revisions Logs ============== Revision logs - or *revlogs* - are an append only data structure for storing discrete entries, or *revisions*. They are the primary storage mechanism of repository data. Revlogs effectively model a directed acyclic graph (DAG). Each node has edges to 1 or 2 *parent* nodes. Each node contains metadata and the raw value for that node. Revlogs consist of entries which have metadata and revision data. Metadata includes the hash of the revision's content, sizes, and links to its *parent* entries. The collective metadata is referred to as the *index* and the revision data is the *data*. Revision data is stored as a series of compressed deltas against previous revisions. Revlogs are written in an append-only fashion. We never need to rewrite a file to insert nor do we need to remove data. Rolling back in-progress writes can be performed by truncating files. Read locks can be avoided using simple techniques. This means that references to other data in the same revlog *always* refer to a previous entry. Revlogs can be modeled as 0-indexed arrays. The first revision is revision #0 and the second is revision #1. The revision -1 is typically used to mean *does not exist* or *not defined*. File Format ----------- A revlog begins with a 32-bit big endian integer holding version info and feature flags. This integer is logically divided into 2 16-bit shorts. The least significant half of the integer is the format/version short. The other short holds feature flags that dictate behavior of the revlog. Only 1 bit of the format/version short is currently used. Remaining bits are reserved for future use. The following values for the format/version short are defined: 0 The original revlog version. 1 RevlogNG (*next generation*). It replaced version 0 when it was implemented in 2006. The feature flags short consists of bit flags. Where 0 is the least significant bit, the following bit offsets define flags: 0 Store revision data inline. 1 Generaldelta encoding. 2-15 Reserved for future use. The following header values are common: 00 00 00 01 RevlogNG 00 01 00 01 RevlogNG + inline 00 02 00 01 RevlogNG + generaldelta 00 03 00 01 RevlogNG + inline + generaldelta Following the 32-bit header is *index* data. Inlined revision data is possibly located between index entries. More on this layout is described below. RevlogNG Format --------------- RevlogNG (version 1) begins with an index describing the revisions in the revlog. If the ``inline`` flag is set, revision data is stored inline, or between index entries (as opposed to in a separate container). Each index entry is 64 bytes. The byte layout of each entry is as follows, with byte 0 being the first byte (all data stored as big endian): 0-5 (6 bytes) Absolute offset of revision data from beginning of revlog. 6-7 (2 bytes) Bit flags impacting revision behavior. 8-11 (4 bytes) Compressed length of revision data / chunk as stored in revlog. 12-15 (4 bytes) Uncompressed length of revision data / chunk. 16-19 (4 bytes) Base or previous revision this revision's delta was produced against. -1 means this revision holds full text (as opposed to a delta). For generaldelta repos, this is the previous revision in the delta chain. For non-generaldelta repos, this is the base or first revision in the delta chain. 20-23 (4 bytes) A revision this revision is *linked* to. This allows a revision in one revlog to be forever associated with a revision in another revlog. For example, a file's revlog may point to the changelog revision that introduced it. 24-27 (4 bytes) Revision of 1st parent. -1 indicates no parent. 28-31 (4 bytes) Revision of 2nd parent. -1 indicates no 2nd parent. 32-63 (32 bytes) Hash of revision's full text. Currently, SHA-1 is used and only the first 20 bytes of this field are used. The rest of the bytes are ignored and should be stored as \0. If inline revision data is being stored, the compressed revision data (of length from bytes offset 8-11 from the index entry) immediately follows the index entry. There is no header on the revision data. There is no padding between it and the index entries before and after. If revision data is not inline, then raw revision data is stored in a separate byte container. The offsets from bytes 0-5 and the compressed length from bytes 8-11 define how to access this data. Delta Chains ------------ Revision data is encoded as a chain of *chunks*. Each chain begins with the compressed original full text for that revision. Each subsequent *chunk* is a *delta* against the previous revision. We therefore call these chains of chunks/deltas *delta chains*. The full text for a revision is reconstructed by loading the original full text for the base revision of a *delta chain* and then applying *deltas* until the target revision is reconstructed. *Delta chains* are limited in length so lookup time is bound. They are limited to ~2x the length of the revision's data. The linear distance between the base chunk and the final chunk is also limited so the amount of read I/O to load all chunks in the delta chain is bound. Deltas and delta chains are either computed against the previous revision in the revlog or another revision (almost certainly one of the parents of the revision). Historically, deltas were computed against the previous revision. The *generaldelta* revlog feature flag (enabled by default in Mercurial 3.7) activates the mode where deltas are computed against an arbitrary revision (almost certainly a parent revision). File Storage ------------ Revlogs logically consist of an index (metadata of entries) and revision data. This data may be stored together in a single file or in separate files. The mechanism used is indicated by the ``inline`` feature flag on the revlog. Mercurial's behavior is to use inline storage until a revlog reaches a certain size, at which point it will be converted to non-inline. The reason there is a size limit on inline storage is to establish an upper bound on how much data must be read to load the index. It would be a waste to read tens or hundreds of extra megabytes of data just to access the index data. The actual layout of revlog files on disk is governed by the repository's *store format*. Typically, a ``.i`` file represents the index revlog (possibly containing inline data) and a ``.d`` file holds the revision data. Revision Entries ---------------- Revision entries consist of an optional 1 byte header followed by an encoding of the revision data. The headers are as follows: \0 (0x00) Revision data is the entirety of the entry, including this header. u (0x75) Raw revision data follows. x (0x78) zlib (RFC 1950) data. The 0x78 value is actually the first byte of the zlib header (CMF byte). Hash Computation ---------------- The hash of the revision is stored in the index and is used both as a primary key and for data integrity verification. Currently, SHA-1 is the only supported hashing algorithm. To obtain the SHA-1 hash of a revision: 1. Hash the parent nodes 2. Hash the fulltext of the revision The 20 byte node ids of the parents are fed into the hasher in ascending order.mercurial-3.7.3/mercurial/help/internals/bundles.txt0000644000175000017500000000640212676531525022240 0ustar mpmmpm00000000000000Bundles ======= A bundle is a container for repository data. Bundles are used as standalone files as well as the interchange format over the wire protocol used when two Mercurial peers communicate with each other. Headers ------- Bundles produced since Mercurial 0.7 (September 2005) have a 4 byte header identifying the major bundle type. The header always begins with ``HG`` and the follow 2 bytes indicate the bundle type/version. Some bundle types have additional data after this 4 byte header. The following sections describe each bundle header/type. HG10 ---- ``HG10`` headers indicate a *changegroup bundle*. This is the original bundle format, so it is sometimes referred to as *bundle1*. It has been present since version 0.7 (released September 2005). This header is followed by 2 bytes indicating the compression algorithm used for data that follows. All subsequent data following this compression identifier is compressed according to the algorithm/method specified. Supported algorithms include the following. ``BZ`` *bzip2* compression. Bzip2 compressors emit a leading ``BZ`` header. Mercurial uses this leading ``BZ`` as part of the bundle header. Therefore consumers of bzip2 bundles need to *seed* the bzip2 decompressor with ``BZ`` or seek the input stream back to the beginning of the algorithm component of the bundle header so that decompressor input is valid. This behavior is unique among supported compression algorithms. Supported since version 0.7 (released December 2006). ``GZ`` *zlib* compression. Supported since version 0.9.2 (released December 2006). ``UN`` *Uncompressed* or no compression. Unmodified changegroup data follows. Supported since version 0.9.2 (released December 2006). 3rd party extensions may implement their own compression. However, no authority reserves values for their compression algorithm identifiers. HG2X ---- ``HG2X`` headers (where ``X`` is any value) denote a *bundle2* bundle. Bundle2 bundles are a container format for various kinds of repository data and capabilities, beyond changegroup data (which was the only data supported by ``HG10`` bundles. ``HG20`` is currently the only defined bundle2 version. The ``HG20`` format is not yet documented here. See the inline comments in ``mercurial/exchange.py`` for now. Initial ``HG20`` support was added in Mercurial 3.0 (released May 2014). However, bundle2 bundles were hidden behind an experimental flag until version 3.5 (released August 2015), when they were enabled in the wire protocol. Various commands (including ``hg bundle``) did not support generating bundle2 files until Mercurial 3.6 (released November 2015). HGS1 ---- *Experimental* A ``HGS1`` header indicates a *streaming clone bundle*. This is a bundle that contains raw revlog data from a repository store. (Typically revlog data is exchanged in the form of changegroups.) The purpose of *streaming clone bundles* are to *clone* repository data very efficiently. The ``HGS1`` header is always followed by 2 bytes indicating a compression algorithm of the data that follows. Only ``UN`` (uncompressed data) is currently allowed. ``HGS1UN`` support was added as an experimental feature in version 3.6 (released November 2015) as part of the initial offering of the *clone bundles* feature. mercurial-3.7.3/mercurial/help/templates.txt0000644000175000017500000000732712676531524020611 0ustar mpmmpm00000000000000Mercurial allows you to customize output of commands through templates. You can either pass in a template or select an existing template-style from the command line, via the --template option. You can customize output for any "log-like" command: log, outgoing, incoming, tip, parents, and heads. Some built-in styles are packaged with Mercurial. These can be listed with :hg:`log --template list`. Example usage:: $ hg log -r1.0::1.1 --template changelog A template is a piece of text, with markup to invoke variable expansion:: $ hg log -r1 --template "{node}\n" b56ce7b07c52de7d5fd79fb89701ea538af65746 Strings in curly braces are called keywords. The availability of keywords depends on the exact context of the templater. These keywords are usually available for templating a log-like command: .. keywordsmarker The "date" keyword does not produce human-readable output. If you want to use a date in your output, you can use a filter to process it. Filters are functions which return a string based on the input variable. Be sure to use the stringify filter first when you're applying a string-input filter to a list-like input variable. You can also use a chain of filters to get the desired output:: $ hg tip --template "{date|isodate}\n" 2008-08-21 18:22 +0000 List of filters: .. filtersmarker Note that a filter is nothing more than a function call, i.e. ``expr|filter`` is equivalent to ``filter(expr)``. In addition to filters, there are some basic built-in functions: .. functionsmarker Also, for any expression that returns a list, there is a list operator:: expr % "{template}" As seen in the above example, ``{template}`` is interpreted as a template. To prevent it from being interpreted, you can use an escape character ``\{`` or a raw string prefix, ``r'...'``. Some sample command line templates: - Format lists, e.g. files:: $ hg log -r 0 --template "files:\n{files % ' {file}\n'}" - Join the list of files with a ", ":: $ hg log -r 0 --template "files: {join(files, ', ')}\n" - Modify each line of a commit description:: $ hg log --template "{splitlines(desc) % '**** {line}\n'}" - Format date:: $ hg log -r 0 --template "{date(date, '%Y')}\n" - Display date in UTC:: $ hg log -r 0 --template "{localdate(date, 'UTC')|date}\n" - Output the description set to a fill-width of 30:: $ hg log -r 0 --template "{fill(desc, 30)}" - Use a conditional to test for the default branch:: $ hg log -r 0 --template "{ifeq(branch, 'default', 'on the main branch', 'on branch {branch}')}\n" - Append a newline if not empty:: $ hg tip --template "{if(author, '{author}\n')}" - Label the output for use with the color extension:: $ hg log -r 0 --template "{label('changeset.{phase}', node|short)}\n" - Invert the firstline filter, i.e. everything but the first line:: $ hg log -r 0 --template "{sub(r'^.*\n?\n?', '', desc)}\n" - Display the contents of the 'extra' field, one per line:: $ hg log -r 0 --template "{join(extras, '\n')}\n" - Mark the active bookmark with '*':: $ hg log --template "{bookmarks % '{bookmark}{ifeq(bookmark, active, '*')} '}\n" - Find the previous release candidate tag, the distance and changes since the tag:: $ hg log -r . --template "{latesttag('re:^.*-rc$') % '{tag}, {changes}, {distance}'}\n" - Mark the working copy parent with '@':: $ hg log --template "{ifcontains(rev, revset('.'), '@')}\n" - Show details of parent revisions:: $ hg log --template "{revset('parents(%d)', rev) % '{desc|firstline}\n'}" - Show only commit descriptions that start with "template":: $ hg log --template "{startswith('template', firstline(desc))}\n" - Print the first word of each line of a commit message:: $ hg log --template "{word(0, desc)}\n" mercurial-3.7.3/mercurial/help/diffs.txt0000644000175000017500000000251612676531525017702 0ustar mpmmpm00000000000000Mercurial's default format for showing changes between two versions of a file is compatible with the unified format of GNU diff, which can be used by GNU patch and many other standard tools. While this standard format is often enough, it does not encode the following information: - executable status and other permission bits - copy or rename information - changes in binary files - creation or deletion of empty files Mercurial also supports the extended diff format from the git VCS which addresses these limitations. The git diff format is not produced by default because a few widespread tools still do not understand this format. This means that when generating diffs from a Mercurial repository (e.g. with :hg:`export`), you should be careful about things like file copies and renames or other things mentioned above, because when applying a standard diff to a different repository, this extra information is lost. Mercurial's internal operations (like push and pull) are not affected by this, because they use an internal binary format for communicating changes. To make Mercurial produce the git extended diff format, use the --git option available for many commands, or set 'git = True' in the [diff] section of your configuration file. You do not need to set this option when importing diffs in this format or using them in the mq extension. mercurial-3.7.3/mercurial/help/scripting.txt0000644000175000017500000001523312676531525020611 0ustar mpmmpm00000000000000It is common for machines (as opposed to humans) to consume Mercurial. This help topic describes some of the considerations for interfacing machines with Mercurial. Choosing an Interface ===================== Machines have a choice of several methods to interface with Mercurial. These include: - Executing the ``hg`` process - Querying a HTTP server - Calling out to a command server Executing ``hg`` processes is very similar to how humans interact with Mercurial in the shell. It should already be familiar to you. :hg:`serve` can be used to start a server. By default, this will start a "hgweb" HTTP server. This HTTP server has support for machine-readable output, such as JSON. For more, see :hg:`help hgweb`. :hg:`serve` can also start a "command server." Clients can connect to this server and issue Mercurial commands over a special protocol. For more details on the command server, including links to client libraries, see https://mercurial.selenic.com/wiki/CommandServer. :hg:`serve` based interfaces (the hgweb and command servers) have the advantage over simple ``hg`` process invocations in that they are likely more efficient. This is because there is significant overhead to spawn new Python processes. .. tip:: If you need to invoke several ``hg`` processes in short order and/or performance is important to you, use of a server-based interface is highly recommended. Environment Variables ===================== As documented in :hg:`help environment`, various environment variables influence the operation of Mercurial. The following are particularly relevant for machines consuming Mercurial: HGPLAIN If not set, Mercurial's output could be influenced by configuration settings that impact its encoding, verbose mode, localization, etc. It is highly recommended for machines to set this variable when invoking ``hg`` processes. HGENCODING If not set, the locale used by Mercurial will be detected from the environment. If the determined locale does not support display of certain characters, Mercurial may render these character sequences incorrectly (often by using "?" as a placeholder for invalid characters in the current locale). Explicitly setting this environment variable is a good practice to guarantee consistent results. "utf-8" is a good choice on UNIX-like environments. HGRCPATH If not set, Mercurial will inherit config options from config files using the process described in :hg:`help config`. This includes inheriting user or system-wide config files. When utmost control over the Mercurial configuration is desired, the value of ``HGRCPATH`` can be set to an explicit file with known good configs. In rare cases, the value can be set to an empty file or the null device (often ``/dev/null``) to bypass loading of any user or system config files. Note that these approaches can have unintended consequences, as the user and system config files often define things like the username and extensions that may be required to interface with a repository. Consuming Command Output ======================== It is common for machines to need to parse the output of Mercurial commands for relevant data. This section describes the various techniques for doing so. Parsing Raw Command Output -------------------------- Likely the simplest and most effective solution for consuming command output is to simply invoke ``hg`` commands as you would as a user and parse their output. The output of many commands can easily be parsed with tools like ``grep``, ``sed``, and ``awk``. A potential downside with parsing command output is that the output of commands can change when Mercurial is upgraded. While Mercurial does generally strive for strong backwards compatibility, command output does occasionally change. Having tests for your automated interactions with ``hg`` commands is generally recommended, but is even more important when raw command output parsing is involved. Using Templates to Control Output --------------------------------- Many ``hg`` commands support templatized output via the ``-T/--template`` argument. For more, see :hg:`help templates`. Templates are useful for explicitly controlling output so that you get exactly the data you want formatted how you want it. For example, ``log -T {node}\n`` can be used to print a newline delimited list of changeset nodes instead of a human-tailored output containing authors, dates, descriptions, etc. .. tip:: If parsing raw command output is too complicated, consider using templates to make your life easier. The ``-T/--template`` argument allows specifying pre-defined styles. Mercurial ships with the machine-readable styles ``json`` and ``xml``, which provide JSON and XML output, respectively. These are useful for producing output that is machine readable as-is. .. important:: The ``json`` and ``xml`` styles are considered experimental. While they may be attractive to use for easily obtaining machine-readable output, their behavior may change in subsequent versions. These styles may also exhibit unexpected results when dealing with certain encodings. Mercurial treats things like filenames as a series of bytes and normalizing certain byte sequences to JSON or XML with certain encoding settings can lead to surprises. Command Server Output --------------------- If using the command server to interact with Mercurial, you are likely using an existing library/API that abstracts implementation details of the command server. If so, this interface layer may perform parsing for you, saving you the work of implementing it yourself. Output Verbosity ---------------- Commands often have varying output verbosity, even when machine readable styles are being used (e.g. ``-T json``). Adding ``-v/--verbose`` and ``--debug`` to the command's arguments can increase the amount of data exposed by Mercurial. An alternate way to get the data you need is by explicitly specifying a template. Other Topics ============ revsets Revisions sets is a functional query language for selecting a set of revisions. Think of it as SQL for Mercurial repositories. Revsets are useful for querying repositories for specific data. See :hg:`help revsets` for more. share extension The ``share`` extension provides functionality for sharing repository data across several working copies. It can even automatically "pool" storage for logically related repositories when cloning. Configuring the ``share`` extension can lead to significant resource utilization reduction, particularly around disk space and the network. This is especially true for continuous integration (CI) environments. See :hg:`help -e share` for more. mercurial-3.7.3/mercurial/help/extensions.txt0000644000175000017500000000234312676531525021004 0ustar mpmmpm00000000000000Mercurial has the ability to add new features through the use of extensions. Extensions may add new commands, add options to existing commands, change the default behavior of commands, or implement hooks. To enable the "foo" extension, either shipped with Mercurial or in the Python search path, create an entry for it in your configuration file, like this:: [extensions] foo = You may also specify the full path to an extension:: [extensions] myfeature = ~/.hgext/myfeature.py See :hg:`help config` for more information on configuration files. Extensions are not loaded by default for a variety of reasons: they can increase startup overhead; they may be meant for advanced usage only; they may provide potentially dangerous abilities (such as letting you destroy or modify history); they might not be ready for prime time; or they may alter some usual behaviors of stock Mercurial. It is thus up to the user to activate extensions as needed. To explicitly disable an extension enabled in a configuration file of broader scope, prepend its path with !:: [extensions] # disabling extension bar residing in /path/to/extension/bar.py bar = !/path/to/extension/bar.py # ditto, but no path was supplied for extension baz baz = ! mercurial-3.7.3/mercurial/help/revisions.txt0000644000175000017500000000251212676531524020623 0ustar mpmmpm00000000000000Mercurial supports several ways to specify individual revisions. A plain integer is treated as a revision number. Negative integers are treated as sequential offsets from the tip, with -1 denoting the tip, -2 denoting the revision prior to the tip, and so forth. A 40-digit hexadecimal string is treated as a unique revision identifier. A hexadecimal string less than 40 characters long is treated as a unique revision identifier and is referred to as a short-form identifier. A short-form identifier is only valid if it is the prefix of exactly one full-length identifier. Any other string is treated as a bookmark, tag, or branch name. A bookmark is a movable pointer to a revision. A tag is a permanent name associated with a revision. A branch name denotes the tipmost open branch head of that branch - or if they are all closed, the tipmost closed head of the branch. Bookmark, tag, and branch names must not contain the ":" character. The reserved name "tip" always identifies the most recent revision. The reserved name "null" indicates the null revision. This is the revision of an empty repository, and the parent of revision 0. The reserved name "." indicates the working directory parent. If no working directory is checked out, it is equivalent to null. If an uncommitted merge is in progress, "." is the revision of the first parent. mercurial-3.7.3/mercurial/help/dates.txt0000644000175000017500000000237112676531525017706 0ustar mpmmpm00000000000000Some commands allow the user to specify a date, e.g.: - backout, commit, import, tag: Specify the commit date. - log, revert, update: Select revision(s) by date. Many date formats are valid. Here are some examples: - ``Wed Dec 6 13:18:29 2006`` (local timezone assumed) - ``Dec 6 13:18 -0600`` (year assumed, time offset provided) - ``Dec 6 13:18 UTC`` (UTC and GMT are aliases for +0000) - ``Dec 6`` (midnight) - ``13:18`` (today assumed) - ``3:39`` (3:39AM assumed) - ``3:39pm`` (15:39) - ``2006-12-06 13:18:29`` (ISO 8601 format) - ``2006-12-6 13:18`` - ``2006-12-6`` - ``12-6`` - ``12/6`` - ``12/6/6`` (Dec 6 2006) - ``today`` (midnight) - ``yesterday`` (midnight) - ``now`` - right now Lastly, there is Mercurial's internal format: - ``1165411109 0`` (Wed Dec 6 13:18:29 2006 UTC) This is the internal representation format for dates. The first number is the number of seconds since the epoch (1970-01-01 00:00 UTC). The second is the offset of the local timezone, in seconds west of UTC (negative if the timezone is east of UTC). The log command also accepts date ranges: - ``DATE`` - on or after a given date/time - ``DATE to DATE`` - a date range, inclusive - ``-DAYS`` - within a given number of days of today mercurial-3.7.3/mercurial/help/glossary.txt0000644000175000017500000003555612676531525020464 0ustar mpmmpm00000000000000Ancestor Any changeset that can be reached by an unbroken chain of parent changesets from a given changeset. More precisely, the ancestors of a changeset can be defined by two properties: a parent of a changeset is an ancestor, and a parent of an ancestor is an ancestor. See also: 'Descendant'. Bookmark Bookmarks are pointers to certain commits that move when committing. They are similar to tags in that it is possible to use bookmark names in all places where Mercurial expects a changeset ID, e.g., with :hg:`update`. Unlike tags, bookmarks move along when you make a commit. Bookmarks can be renamed, copied and deleted. Bookmarks are local, unless they are explicitly pushed or pulled between repositories. Pushing and pulling bookmarks allow you to collaborate with others on a branch without creating a named branch. Branch (Noun) A child changeset that has been created from a parent that is not a head. These are known as topological branches, see 'Branch, topological'. If a topological branch is named, it becomes a named branch. If a topological branch is not named, it becomes an anonymous branch. See 'Branch, anonymous' and 'Branch, named'. Branches may be created when changes are pulled from or pushed to a remote repository, since new heads may be created by these operations. Note that the term branch can also be used informally to describe a development process in which certain development is done independently of other development. This is sometimes done explicitly with a named branch, but it can also be done locally, using bookmarks or clones and anonymous branches. Example: "The experimental branch." (Verb) The action of creating a child changeset which results in its parent having more than one child. Example: "I'm going to branch at X." Branch, anonymous Every time a new child changeset is created from a parent that is not a head and the name of the branch is not changed, a new anonymous branch is created. Branch, closed A named branch whose branch heads have all been closed. Branch, default The branch assigned to a changeset when no name has previously been assigned. Branch head See 'Head, branch'. Branch, inactive If a named branch has no topological heads, it is considered to be inactive. As an example, a feature branch becomes inactive when it is merged into the default branch. The :hg:`branches` command shows inactive branches by default, though they can be hidden with :hg:`branches --active`. NOTE: this concept is deprecated because it is too implicit. Branches should now be explicitly closed using :hg:`commit --close-branch` when they are no longer needed. Branch, named A collection of changesets which have the same branch name. By default, children of a changeset in a named branch belong to the same named branch. A child can be explicitly assigned to a different branch. See :hg:`help branch`, :hg:`help branches` and :hg:`commit --close-branch` for more information on managing branches. Named branches can be thought of as a kind of namespace, dividing the collection of changesets that comprise the repository into a collection of disjoint subsets. A named branch is not necessarily a topological branch. If a new named branch is created from the head of another named branch, or the default branch, but no further changesets are added to that previous branch, then that previous branch will be a branch in name only. Branch tip See 'Tip, branch'. Branch, topological Every time a new child changeset is created from a parent that is not a head, a new topological branch is created. If a topological branch is named, it becomes a named branch. If a topological branch is not named, it becomes an anonymous branch of the current, possibly default, branch. Changelog A record of the changesets in the order in which they were added to the repository. This includes details such as changeset id, author, commit message, date, and list of changed files. Changeset A snapshot of the state of the repository used to record a change. Changeset, child The converse of parent changeset: if P is a parent of C, then C is a child of P. There is no limit to the number of children that a changeset may have. Changeset id A SHA-1 hash that uniquely identifies a changeset. It may be represented as either a "long" 40 hexadecimal digit string, or a "short" 12 hexadecimal digit string. Changeset, merge A changeset with two parents. This occurs when a merge is committed. Changeset, parent A revision upon which a child changeset is based. Specifically, a parent changeset of a changeset C is a changeset whose node immediately precedes C in the DAG. Changesets have at most two parents. Checkout (Noun) The working directory being updated to a specific revision. This use should probably be avoided where possible, as changeset is much more appropriate than checkout in this context. Example: "I'm using checkout X." (Verb) Updating the working directory to a specific changeset. See :hg:`help update`. Example: "I'm going to check out changeset X." Child changeset See 'Changeset, child'. Close changeset See 'Head, closed branch'. Closed branch See 'Branch, closed'. Clone (Noun) An entire or partial copy of a repository. The partial clone must be in the form of a revision and its ancestors. Example: "Is your clone up to date?" (Verb) The process of creating a clone, using :hg:`clone`. Example: "I'm going to clone the repository." Closed branch head See 'Head, closed branch'. Commit (Noun) A synonym for changeset. Example: "Is the bug fixed in your recent commit?" (Verb) The act of recording changes to a repository. When files are committed in a working directory, Mercurial finds the differences between the committed files and their parent changeset, creating a new changeset in the repository. Example: "You should commit those changes now." Cset A common abbreviation of the term changeset. DAG The repository of changesets of a distributed version control system (DVCS) can be described as a directed acyclic graph (DAG), consisting of nodes and edges, where nodes correspond to changesets and edges imply a parent -> child relation. This graph can be visualized by graphical tools such as :hg:`log --graph`. In Mercurial, the DAG is limited by the requirement for children to have at most two parents. Deprecated Feature removed from documentation, but not scheduled for removal. Default branch See 'Branch, default'. Descendant Any changeset that can be reached by a chain of child changesets from a given changeset. More precisely, the descendants of a changeset can be defined by two properties: the child of a changeset is a descendant, and the child of a descendant is a descendant. See also: 'Ancestor'. Diff (Noun) The difference between the contents and attributes of files in two changesets or a changeset and the current working directory. The difference is usually represented in a standard form called a "diff" or "patch". The "git diff" format is used when the changes include copies, renames, or changes to file attributes, none of which can be represented/handled by classic "diff" and "patch". Example: "Did you see my correction in the diff?" (Verb) Diffing two changesets is the action of creating a diff or patch. Example: "If you diff with changeset X, you will see what I mean." Directory, working The working directory represents the state of the files tracked by Mercurial, that will be recorded in the next commit. The working directory initially corresponds to the snapshot at an existing changeset, known as the parent of the working directory. See 'Parent, working directory'. The state may be modified by changes to the files introduced manually or by a merge. The repository metadata exists in the .hg directory inside the working directory. Draft Changesets in the draft phase have not been shared with publishing repositories and may thus be safely changed by history-modifying extensions. See :hg:`help phases`. Experimental Feature that may change or be removed at a later date. Graph See DAG and :hg:`log --graph`. Head The term 'head' may be used to refer to both a branch head or a repository head, depending on the context. See 'Head, branch' and 'Head, repository' for specific definitions. Heads are where development generally takes place and are the usual targets for update and merge operations. Head, branch A changeset with no descendants on the same named branch. Head, closed branch A changeset that marks a head as no longer interesting. The closed head is no longer listed by :hg:`heads`. A branch is considered closed when all its heads are closed and consequently is not listed by :hg:`branches`. Closed heads can be re-opened by committing new changeset as the child of the changeset that marks a head as closed. Head, repository A topological head which has not been closed. Head, topological A changeset with no children in the repository. History, immutable Once committed, changesets cannot be altered. Extensions which appear to change history actually create new changesets that replace existing ones, and then destroy the old changesets. Doing so in public repositories can result in old changesets being reintroduced to the repository. History, rewriting The changesets in a repository are immutable. However, extensions to Mercurial can be used to alter the repository, usually in such a way as to preserve changeset contents. Immutable history See 'History, immutable'. Merge changeset See 'Changeset, merge'. Manifest Each changeset has a manifest, which is the list of files that are tracked by the changeset. Merge Used to bring together divergent branches of work. When you update to a changeset and then merge another changeset, you bring the history of the latter changeset into your working directory. Once conflicts are resolved (and marked), this merge may be committed as a merge changeset, bringing two branches together in the DAG. Named branch See 'Branch, named'. Null changeset The empty changeset. It is the parent state of newly-initialized repositories and repositories with no checked out revision. It is thus the parent of root changesets and the effective ancestor when merging unrelated changesets. Can be specified by the alias 'null' or by the changeset ID '000000000000'. Parent See 'Changeset, parent'. Parent changeset See 'Changeset, parent'. Parent, working directory The working directory parent reflects a virtual revision which is the child of the changeset (or two changesets with an uncommitted merge) shown by :hg:`parents`. This is changed with :hg:`update`. Other commands to see the working directory parent are :hg:`summary` and :hg:`id`. Can be specified by the alias ".". Patch (Noun) The product of a diff operation. Example: "I've sent you my patch." (Verb) The process of using a patch file to transform one changeset into another. Example: "You will need to patch that revision." Phase A per-changeset state tracking how the changeset has been or should be shared. See :hg:`help phases`. Public Changesets in the public phase have been shared with publishing repositories and are therefore considered immutable. See :hg:`help phases`. Pull An operation in which changesets in a remote repository which are not in the local repository are brought into the local repository. Note that this operation without special arguments only updates the repository, it does not update the files in the working directory. See :hg:`help pull`. Push An operation in which changesets in a local repository which are not in a remote repository are sent to the remote repository. Note that this operation only adds changesets which have been committed locally to the remote repository. Uncommitted changes are not sent. See :hg:`help push`. Repository The metadata describing all recorded states of a collection of files. Each recorded state is represented by a changeset. A repository is usually (but not always) found in the ``.hg`` subdirectory of a working directory. Any recorded state can be recreated by "updating" a working directory to a specific changeset. Repository head See 'Head, repository'. Revision A state of the repository at some point in time. Earlier revisions can be updated to by using :hg:`update`. See also 'Revision number'; See also 'Changeset'. Revision number This integer uniquely identifies a changeset in a specific repository. It represents the order in which changesets were added to a repository, starting with revision number 0. Note that the revision number may be different in each clone of a repository. To identify changesets uniquely between different clones, see 'Changeset id'. Revlog History storage mechanism used by Mercurial. It is a form of delta encoding, with occasional full revision of data followed by delta of each successive revision. It includes data and an index pointing to the data. Rewriting history See 'History, rewriting'. Root A changeset that has only the null changeset as its parent. Most repositories have only a single root changeset. Secret Changesets in the secret phase may not be shared via push, pull, or clone. See :hg:`help phases`. Tag An alternative name given to a changeset. Tags can be used in all places where Mercurial expects a changeset ID, e.g., with :hg:`update`. The creation of a tag is stored in the history and will thus automatically be shared with other using push and pull. Tip The changeset with the highest revision number. It is the changeset most recently added in a repository. Tip, branch The head of a given branch with the highest revision number. When a branch name is used as a revision identifier, it refers to the branch tip. See also 'Branch, head'. Note that because revision numbers may be different in different repository clones, the branch tip may be different in different cloned repositories. Update (Noun) Another synonym of changeset. Example: "I've pushed an update." (Verb) This term is usually used to describe updating the state of the working directory to that of a specific changeset. See :hg:`help update`. Example: "You should update." Working directory See 'Directory, working'. Working directory parent See 'Parent, working directory'. mercurial-3.7.3/mercurial/help/phases.txt0000644000175000017500000000572112676531525020073 0ustar mpmmpm00000000000000What are phases? ================ Phases are a system for tracking which changesets have been or should be shared. This helps prevent common mistakes when modifying history (for instance, with the mq or rebase extensions). Each changeset in a repository is in one of the following phases: - public : changeset is visible on a public server - draft : changeset is not yet published - secret : changeset should not be pushed, pulled, or cloned These phases are ordered (public < draft < secret) and no changeset can be in a lower phase than its ancestors. For instance, if a changeset is public, all its ancestors are also public. Lastly, changeset phases should only be changed towards the public phase. How are phases managed? ======================= For the most part, phases should work transparently. By default, a changeset is created in the draft phase and is moved into the public phase when it is pushed to another repository. Once changesets become public, extensions like mq and rebase will refuse to operate on them to prevent creating duplicate changesets. Phases can also be manually manipulated with the :hg:`phase` command if needed. See :hg:`help -v phase` for examples. To make yours commits secret by default, put this in your configuration file:: [phases] new-commit = secret Phases and servers ================== Normally, all servers are ``publishing`` by default. This means:: - all draft changesets that are pulled or cloned appear in phase public on the client - all draft changesets that are pushed appear as public on both client and server - secret changesets are neither pushed, pulled, or cloned .. note:: Pulling a draft changeset from a publishing server does not mark it as public on the server side due to the read-only nature of pull. Sometimes it may be desirable to push and pull changesets in the draft phase to share unfinished work. This can be done by setting a repository to disable publishing in its configuration file:: [phases] publish = False See :hg:`help config` for more information on configuration files. .. note:: Servers running older versions of Mercurial are treated as publishing. .. note:: Changesets in secret phase are not exchanged with the server. This applies to their content: file names, file contents, and changeset metadata. For technical reasons, the identifier (e.g. d825e4025e39) of the secret changeset may be communicated to the server. Examples ======== - list changesets in draft or secret phase:: hg log -r "not public()" - change all secret changesets to draft:: hg phase --draft "secret()" - forcibly move the current changeset and descendants from public to draft:: hg phase --force --draft . - show a list of changeset revision and phase:: hg log --template "{rev} {phase}\n" - resynchronize draft changesets relative to a remote repository:: hg phase -fd "outgoing(URL)" See :hg:`help phase` for more information on manually manipulating phases. mercurial-3.7.3/mercurial/help/config.txt0000644000175000017500000021602412676531525020055 0ustar mpmmpm00000000000000The Mercurial system uses a set of configuration files to control aspects of its behavior. Troubleshooting =============== If you're having problems with your configuration, :hg:`config --debug` can help you understand what is introducing a setting into your environment. See :hg:`help config.syntax` and :hg:`help config.files` for information about how and where to override things. Structure ========= The configuration files use a simple ini-file format. A configuration file consists of sections, led by a ``[section]`` header and followed by ``name = value`` entries:: [ui] username = Firstname Lastname verbose = True The above entries will be referred to as ``ui.username`` and ``ui.verbose``, respectively. See :hg:`help config.syntax`. Files ===== Mercurial reads configuration data from several files, if they exist. These files do not exist by default and you will have to create the appropriate configuration files yourself: Local configuration is put into the per-repository ``/.hg/hgrc`` file. Global configuration like the username setting is typically put into: .. container:: windows - ``%USERPROFILE%\mercurial.ini`` (on Windows) .. container:: unix.plan9 - ``$HOME/.hgrc`` (on Unix, Plan9) The names of these files depend on the system on which Mercurial is installed. ``*.rc`` files from a single directory are read in alphabetical order, later ones overriding earlier ones. Where multiple paths are given below, settings from earlier paths override later ones. .. container:: verbose.unix On Unix, the following files are consulted: - ``/.hg/hgrc`` (per-repository) - ``$HOME/.hgrc`` (per-user) - ``/etc/mercurial/hgrc`` (per-installation) - ``/etc/mercurial/hgrc.d/*.rc`` (per-installation) - ``/etc/mercurial/hgrc`` (per-system) - ``/etc/mercurial/hgrc.d/*.rc`` (per-system) - ``/default.d/*.rc`` (defaults) .. container:: verbose.windows On Windows, the following files are consulted: - ``/.hg/hgrc`` (per-repository) - ``%USERPROFILE%\.hgrc`` (per-user) - ``%USERPROFILE%\Mercurial.ini`` (per-user) - ``%HOME%\.hgrc`` (per-user) - ``%HOME%\Mercurial.ini`` (per-user) - ``HKEY_LOCAL_MACHINE\SOFTWARE\Mercurial`` (per-installation) - ``\hgrc.d\*.rc`` (per-installation) - ``\Mercurial.ini`` (per-installation) - ``/default.d/*.rc`` (defaults) .. note:: The registry key ``HKEY_LOCAL_MACHINE\SOFTWARE\Wow6432Node\Mercurial`` is used when running 32-bit Python on 64-bit Windows. .. container:: windows On Windows 9x, ``%HOME%`` is replaced by ``%APPDATA%``. .. container:: verbose.plan9 On Plan9, the following files are consulted: - ``/.hg/hgrc`` (per-repository) - ``$home/lib/hgrc`` (per-user) - ``/lib/mercurial/hgrc`` (per-installation) - ``/lib/mercurial/hgrc.d/*.rc`` (per-installation) - ``/lib/mercurial/hgrc`` (per-system) - ``/lib/mercurial/hgrc.d/*.rc`` (per-system) - ``/default.d/*.rc`` (defaults) Per-repository configuration options only apply in a particular repository. This file is not version-controlled, and will not get transferred during a "clone" operation. Options in this file override options in all other configuration files. .. container:: unix.plan9 On Plan 9 and Unix, most of this file will be ignored if it doesn't belong to a trusted user or to a trusted group. See :hg:`help config.trusted` for more details. Per-user configuration file(s) are for the user running Mercurial. Options in these files apply to all Mercurial commands executed by this user in any directory. Options in these files override per-system and per-installation options. Per-installation configuration files are searched for in the directory where Mercurial is installed. ```` is the parent directory of the **hg** executable (or symlink) being run. .. container:: unix.plan9 For example, if installed in ``/shared/tools/bin/hg``, Mercurial will look in ``/shared/tools/etc/mercurial/hgrc``. Options in these files apply to all Mercurial commands executed by any user in any directory. Per-installation configuration files are for the system on which Mercurial is running. Options in these files apply to all Mercurial commands executed by any user in any directory. Registry keys contain PATH-like strings, every part of which must reference a ``Mercurial.ini`` file or be a directory where ``*.rc`` files will be read. Mercurial checks each of these locations in the specified order until one or more configuration files are detected. Per-system configuration files are for the system on which Mercurial is running. Options in these files apply to all Mercurial commands executed by any user in any directory. Options in these files override per-installation options. Mercurial comes with some default configuration. The default configuration files are installed with Mercurial and will be overwritten on upgrades. Default configuration files should never be edited by users or administrators but can be overridden in other configuration files. So far the directory only contains merge tool configuration but packagers can also put other default configuration there. Syntax ====== A configuration file consists of sections, led by a ``[section]`` header and followed by ``name = value`` entries (sometimes called ``configuration keys``):: [spam] eggs=ham green= eggs Each line contains one entry. If the lines that follow are indented, they are treated as continuations of that entry. Leading whitespace is removed from values. Empty lines are skipped. Lines beginning with ``#`` or ``;`` are ignored and may be used to provide comments. Configuration keys can be set multiple times, in which case Mercurial will use the value that was configured last. As an example:: [spam] eggs=large ham=serrano eggs=small This would set the configuration key named ``eggs`` to ``small``. It is also possible to define a section multiple times. A section can be redefined on the same and/or on different configuration files. For example:: [foo] eggs=large ham=serrano eggs=small [bar] eggs=ham green= eggs [foo] ham=prosciutto eggs=medium bread=toasted This would set the ``eggs``, ``ham``, and ``bread`` configuration keys of the ``foo`` section to ``medium``, ``prosciutto``, and ``toasted``, respectively. As you can see there only thing that matters is the last value that was set for each of the configuration keys. If a configuration key is set multiple times in different configuration files the final value will depend on the order in which the different configuration files are read, with settings from earlier paths overriding later ones as described on the ``Files`` section above. A line of the form ``%include file`` will include ``file`` into the current configuration file. The inclusion is recursive, which means that included files can include other files. Filenames are relative to the configuration file in which the ``%include`` directive is found. Environment variables and ``~user`` constructs are expanded in ``file``. This lets you do something like:: %include ~/.hgrc.d/$HOST.rc to include a different configuration file on each computer you use. A line with ``%unset name`` will remove ``name`` from the current section, if it has been set previously. The values are either free-form text strings, lists of text strings, or Boolean values. Boolean values can be set to true using any of "1", "yes", "true", or "on" and to false using "0", "no", "false", or "off" (all case insensitive). List values are separated by whitespace or comma, except when values are placed in double quotation marks:: allow_read = "John Doe, PhD", brian, betty Quotation marks can be escaped by prefixing them with a backslash. Only quotation marks at the beginning of a word is counted as a quotation (e.g., ``foo"bar baz`` is the list of ``foo"bar`` and ``baz``). Sections ======== This section describes the different sections that may appear in a Mercurial configuration file, the purpose of each section, its possible keys, and their possible values. ``alias`` --------- Defines command aliases. Aliases allow you to define your own commands in terms of other commands (or aliases), optionally including arguments. Positional arguments in the form of ``$1``, ``$2``, etc. in the alias definition are expanded by Mercurial before execution. Positional arguments not already used by ``$N`` in the definition are put at the end of the command to be executed. Alias definitions consist of lines of the form:: = []... For example, this definition:: latest = log --limit 5 creates a new command ``latest`` that shows only the five most recent changesets. You can define subsequent aliases using earlier ones:: stable5 = latest -b stable .. note:: It is possible to create aliases with the same names as existing commands, which will then override the original definitions. This is almost always a bad idea! An alias can start with an exclamation point (``!``) to make it a shell alias. A shell alias is executed with the shell and will let you run arbitrary commands. As an example, :: echo = !echo $@ will let you do ``hg echo foo`` to have ``foo`` printed in your terminal. A better example might be:: purge = !$HG status --no-status --unknown -0 re: | xargs -0 rm which will make ``hg purge`` delete all unknown files in the repository in the same manner as the purge extension. Positional arguments like ``$1``, ``$2``, etc. in the alias definition expand to the command arguments. Unmatched arguments are removed. ``$0`` expands to the alias name and ``$@`` expands to all arguments separated by a space. ``"$@"`` (with quotes) expands to all arguments quoted individually and separated by a space. These expansions happen before the command is passed to the shell. Shell aliases are executed in an environment where ``$HG`` expands to the path of the Mercurial that was used to execute the alias. This is useful when you want to call further Mercurial commands in a shell alias, as was done above for the purge alias. In addition, ``$HG_ARGS`` expands to the arguments given to Mercurial. In the ``hg echo foo`` call above, ``$HG_ARGS`` would expand to ``echo foo``. .. note:: Some global configuration options such as ``-R`` are processed before shell aliases and will thus not be passed to aliases. ``annotate`` ------------ Settings used when displaying file annotations. All values are Booleans and default to False. See :hg:`help config.diff` for related options for the diff command. ``ignorews`` Ignore white space when comparing lines. ``ignorewsamount`` Ignore changes in the amount of white space. ``ignoreblanklines`` Ignore changes whose lines are all blank. ``auth`` -------- Authentication credentials for HTTP authentication. This section allows you to store usernames and passwords for use when logging *into* HTTP servers. See :hg:`help config.web` if you want to configure *who* can login to your HTTP server. Each line has the following format:: . = where ```` is used to group arguments into authentication entries. Example:: foo.prefix = hg.intevation.de/mercurial foo.username = foo foo.password = bar foo.schemes = http https bar.prefix = secure.example.org bar.key = path/to/file.key bar.cert = path/to/file.cert bar.schemes = https Supported arguments: ``prefix`` Either ``*`` or a URI prefix with or without the scheme part. The authentication entry with the longest matching prefix is used (where ``*`` matches everything and counts as a match of length 1). If the prefix doesn't include a scheme, the match is performed against the URI with its scheme stripped as well, and the schemes argument, q.v., is then subsequently consulted. ``username`` Optional. Username to authenticate with. If not given, and the remote site requires basic or digest authentication, the user will be prompted for it. Environment variables are expanded in the username letting you do ``foo.username = $USER``. If the URI includes a username, only ``[auth]`` entries with a matching username or without a username will be considered. ``password`` Optional. Password to authenticate with. If not given, and the remote site requires basic or digest authentication, the user will be prompted for it. ``key`` Optional. PEM encoded client certificate key file. Environment variables are expanded in the filename. ``cert`` Optional. PEM encoded client certificate chain file. Environment variables are expanded in the filename. ``schemes`` Optional. Space separated list of URI schemes to use this authentication entry with. Only used if the prefix doesn't include a scheme. Supported schemes are http and https. They will match static-http and static-https respectively, as well. (default: https) If no suitable authentication entry is found, the user is prompted for credentials as usual if required by the remote. ``committemplate`` ------------------ ``changeset`` String: configuration in this section is used as the template to customize the text shown in the editor when committing. In addition to pre-defined template keywords, commit log specific one below can be used for customization: ``extramsg`` String: Extra message (typically 'Leave message empty to abort commit.'). This may be changed by some commands or extensions. For example, the template configuration below shows as same text as one shown by default:: [committemplate] changeset = {desc}\n\n HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: {extramsg} HG: -- HG: user: {author}\n{ifeq(p2rev, "-1", "", "HG: branch merge\n") }HG: branch '{branch}'\n{if(activebookmark, "HG: bookmark '{activebookmark}'\n") }{subrepos % "HG: subrepo {subrepo}\n" }{file_adds % "HG: added {file}\n" }{file_mods % "HG: changed {file}\n" }{file_dels % "HG: removed {file}\n" }{if(files, "", "HG: no files changed\n")} .. note:: For some problematic encodings (see :hg:`help win32mbcs` for detail), this customization should be configured carefully, to avoid showing broken characters. For example, if a multibyte character ending with backslash (0x5c) is followed by the ASCII character 'n' in the customized template, the sequence of backslash and 'n' is treated as line-feed unexpectedly (and the multibyte character is broken, too). Customized template is used for commands below (``--edit`` may be required): - :hg:`backout` - :hg:`commit` - :hg:`fetch` (for merge commit only) - :hg:`graft` - :hg:`histedit` - :hg:`import` - :hg:`qfold`, :hg:`qnew` and :hg:`qrefresh` - :hg:`rebase` - :hg:`shelve` - :hg:`sign` - :hg:`tag` - :hg:`transplant` Configuring items below instead of ``changeset`` allows showing customized message only for specific actions, or showing different messages for each action. - ``changeset.backout`` for :hg:`backout` - ``changeset.commit.amend.merge`` for :hg:`commit --amend` on merges - ``changeset.commit.amend.normal`` for :hg:`commit --amend` on other - ``changeset.commit.normal.merge`` for :hg:`commit` on merges - ``changeset.commit.normal.normal`` for :hg:`commit` on other - ``changeset.fetch`` for :hg:`fetch` (impling merge commit) - ``changeset.gpg.sign`` for :hg:`sign` - ``changeset.graft`` for :hg:`graft` - ``changeset.histedit.edit`` for ``edit`` of :hg:`histedit` - ``changeset.histedit.fold`` for ``fold`` of :hg:`histedit` - ``changeset.histedit.mess`` for ``mess`` of :hg:`histedit` - ``changeset.histedit.pick`` for ``pick`` of :hg:`histedit` - ``changeset.import.bypass`` for :hg:`import --bypass` - ``changeset.import.normal.merge`` for :hg:`import` on merges - ``changeset.import.normal.normal`` for :hg:`import` on other - ``changeset.mq.qnew`` for :hg:`qnew` - ``changeset.mq.qfold`` for :hg:`qfold` - ``changeset.mq.qrefresh`` for :hg:`qrefresh` - ``changeset.rebase.collapse`` for :hg:`rebase --collapse` - ``changeset.rebase.merge`` for :hg:`rebase` on merges - ``changeset.rebase.normal`` for :hg:`rebase` on other - ``changeset.shelve.shelve`` for :hg:`shelve` - ``changeset.tag.add`` for :hg:`tag` without ``--remove`` - ``changeset.tag.remove`` for :hg:`tag --remove` - ``changeset.transplant.merge`` for :hg:`transplant` on merges - ``changeset.transplant.normal`` for :hg:`transplant` on other These dot-separated lists of names are treated as hierarchical ones. For example, ``changeset.tag.remove`` customizes the commit message only for :hg:`tag --remove`, but ``changeset.tag`` customizes the commit message for :hg:`tag` regardless of ``--remove`` option. When the external editor is invoked for a commit, the corresponding dot-separated list of names without the ``changeset.`` prefix (e.g. ``commit.normal.normal``) is in the ``HGEDITFORM`` environment variable. In this section, items other than ``changeset`` can be referred from others. For example, the configuration to list committed files up below can be referred as ``{listupfiles}``:: [committemplate] listupfiles = {file_adds % "HG: added {file}\n" }{file_mods % "HG: changed {file}\n" }{file_dels % "HG: removed {file}\n" }{if(files, "", "HG: no files changed\n")} ``decode/encode`` ----------------- Filters for transforming files on checkout/checkin. This would typically be used for newline processing or other localization/canonicalization of files. Filters consist of a filter pattern followed by a filter command. Filter patterns are globs by default, rooted at the repository root. For example, to match any file ending in ``.txt`` in the root directory only, use the pattern ``*.txt``. To match any file ending in ``.c`` anywhere in the repository, use the pattern ``**.c``. For each file only the first matching filter applies. The filter command can start with a specifier, either ``pipe:`` or ``tempfile:``. If no specifier is given, ``pipe:`` is used by default. A ``pipe:`` command must accept data on stdin and return the transformed data on stdout. Pipe example:: [encode] # uncompress gzip files on checkin to improve delta compression # note: not necessarily a good idea, just an example *.gz = pipe: gunzip [decode] # recompress gzip files when writing them to the working dir (we # can safely omit "pipe:", because it's the default) *.gz = gzip A ``tempfile:`` command is a template. The string ``INFILE`` is replaced with the name of a temporary file that contains the data to be filtered by the command. The string ``OUTFILE`` is replaced with the name of an empty temporary file, where the filtered data must be written by the command. .. container:: windows .. note:: The tempfile mechanism is recommended for Windows systems, where the standard shell I/O redirection operators often have strange effects and may corrupt the contents of your files. This filter mechanism is used internally by the ``eol`` extension to translate line ending characters between Windows (CRLF) and Unix (LF) format. We suggest you use the ``eol`` extension for convenience. ``defaults`` ------------ (defaults are deprecated. Don't use them. Use aliases instead.) Use the ``[defaults]`` section to define command defaults, i.e. the default options/arguments to pass to the specified commands. The following example makes :hg:`log` run in verbose mode, and :hg:`status` show only the modified files, by default:: [defaults] log = -v status = -m The actual commands, instead of their aliases, must be used when defining command defaults. The command defaults will also be applied to the aliases of the commands defined. ``diff`` -------- Settings used when displaying diffs. Everything except for ``unified`` is a Boolean and defaults to False. See :hg:`help config.annotate` for related options for the annotate command. ``git`` Use git extended diff format. ``nobinary`` Omit git binary patches. ``nodates`` Don't include dates in diff headers. ``noprefix`` Omit 'a/' and 'b/' prefixes from filenames. Ignored in plain mode. ``showfunc`` Show which function each change is in. ``ignorews`` Ignore white space when comparing lines. ``ignorewsamount`` Ignore changes in the amount of white space. ``ignoreblanklines`` Ignore changes whose lines are all blank. ``unified`` Number of lines of context to show. ``email`` --------- Settings for extensions that send email messages. ``from`` Optional. Email address to use in "From" header and SMTP envelope of outgoing messages. ``to`` Optional. Comma-separated list of recipients' email addresses. ``cc`` Optional. Comma-separated list of carbon copy recipients' email addresses. ``bcc`` Optional. Comma-separated list of blind carbon copy recipients' email addresses. ``method`` Optional. Method to use to send email messages. If value is ``smtp`` (default), use SMTP (see the ``[smtp]`` section for configuration). Otherwise, use as name of program to run that acts like sendmail (takes ``-f`` option for sender, list of recipients on command line, message on stdin). Normally, setting this to ``sendmail`` or ``/usr/sbin/sendmail`` is enough to use sendmail to send messages. ``charsets`` Optional. Comma-separated list of character sets considered convenient for recipients. Addresses, headers, and parts not containing patches of outgoing messages will be encoded in the first character set to which conversion from local encoding (``$HGENCODING``, ``ui.fallbackencoding``) succeeds. If correct conversion fails, the text in question is sent as is. (default: '') Order of outgoing email character sets: 1. ``us-ascii``: always first, regardless of settings 2. ``email.charsets``: in order given by user 3. ``ui.fallbackencoding``: if not in email.charsets 4. ``$HGENCODING``: if not in email.charsets 5. ``utf-8``: always last, regardless of settings Email example:: [email] from = Joseph User method = /usr/sbin/sendmail # charsets for western Europeans # us-ascii, utf-8 omitted, as they are tried first and last charsets = iso-8859-1, iso-8859-15, windows-1252 ``extensions`` -------------- Mercurial has an extension mechanism for adding new features. To enable an extension, create an entry for it in this section. If you know that the extension is already in Python's search path, you can give the name of the module, followed by ``=``, with nothing after the ``=``. Otherwise, give a name that you choose, followed by ``=``, followed by the path to the ``.py`` file (including the file name extension) that defines the extension. To explicitly disable an extension that is enabled in an hgrc of broader scope, prepend its path with ``!``, as in ``foo = !/ext/path`` or ``foo = !`` when path is not supplied. Example for ``~/.hgrc``:: [extensions] # (the color extension will get loaded from Mercurial's path) color = # (this extension will get loaded from the file specified) myfeature = ~/.hgext/myfeature.py ``format`` ---------- ``usegeneraldelta`` Enable or disable the "generaldelta" repository format which improves repository compression by allowing "revlog" to store delta against arbitrary revision instead of the previous stored one. This provides significant improvement for repositories with branches. Repositories with this on-disk format require Mercurial version 1.9. Enabled by default. ``dotencode`` Enable or disable the "dotencode" repository format which enhances the "fncache" repository format (which has to be enabled to use dotencode) to avoid issues with filenames starting with ._ on Mac OS X and spaces on Windows. Repositories with this on-disk format require Mercurial version 1.7. Enabled by default. ``usefncache`` Enable or disable the "fncache" repository format which enhances the "store" repository format (which has to be enabled to use fncache) to allow longer filenames and avoids using Windows reserved names, e.g. "nul". Repositories with this on-disk format require Mercurial version 1.1. Enabled by default. ``usestore`` Enable or disable the "store" repository format which improves compatibility with systems that fold case or otherwise mangle filenames. Disabling this option will allow you to store longer filenames in some situations at the expense of compatibility. Repositories with this on-disk format require Mercurial version 0.9.4. Enabled by default. ``graph`` --------- Web graph view configuration. This section let you change graph elements display properties by branches, for instance to make the ``default`` branch stand out. Each line has the following format:: . = where ```` is the name of the branch being customized. Example:: [graph] # 2px width default.width = 2 # red color default.color = FF0000 Supported arguments: ``width`` Set branch edges width in pixels. ``color`` Set branch edges color in hexadecimal RGB notation. ``hooks`` --------- Commands or Python functions that get automatically executed by various actions such as starting or finishing a commit. Multiple hooks can be run for the same action by appending a suffix to the action. Overriding a site-wide hook can be done by changing its value or setting it to an empty string. Hooks can be prioritized by adding a prefix of ``priority.`` to the hook name on a new line and setting the priority. The default priority is 0. Example ``.hg/hgrc``:: [hooks] # update working directory after adding changesets changegroup.update = hg update # do not use the site-wide hook incoming = incoming.email = /my/email/hook incoming.autobuild = /my/build/hook # force autobuild hook to run before other incoming hooks priority.incoming.autobuild = 1 Most hooks are run with environment variables set that give useful additional information. For each hook below, the environment variables it is passed are listed with names of the form ``$HG_foo``. ``changegroup`` Run after a changegroup has been added via push, pull or unbundle. ID of the first new changeset is in ``$HG_NODE`` and last in ``$HG_NODE_LAST``. URL from which changes came is in ``$HG_URL``. ``commit`` Run after a changeset has been created in the local repository. ID of the newly created changeset is in ``$HG_NODE``. Parent changeset IDs are in ``$HG_PARENT1`` and ``$HG_PARENT2``. ``incoming`` Run after a changeset has been pulled, pushed, or unbundled into the local repository. The ID of the newly arrived changeset is in ``$HG_NODE``. URL that was source of changes came is in ``$HG_URL``. ``outgoing`` Run after sending changes from local repository to another. ID of first changeset sent is in ``$HG_NODE``. Source of operation is in ``$HG_SOURCE``; Also see :hg:`help config.preoutgoing` hook. ``post-`` Run after successful invocations of the associated command. The contents of the command line are passed as ``$HG_ARGS`` and the result code in ``$HG_RESULT``. Parsed command line arguments are passed as ``$HG_PATS`` and ``$HG_OPTS``. These contain string representations of the python data internally passed to . ``$HG_OPTS`` is a dictionary of options (with unspecified options set to their defaults). ``$HG_PATS`` is a list of arguments. Hook failure is ignored. ``pre-`` Run before executing the associated command. The contents of the command line are passed as ``$HG_ARGS``. Parsed command line arguments are passed as ``$HG_PATS`` and ``$HG_OPTS``. These contain string representations of the data internally passed to . ``$HG_OPTS`` is a dictionary of options (with unspecified options set to their defaults). ``$HG_PATS`` is a list of arguments. If the hook returns failure, the command doesn't execute and Mercurial returns the failure code. ``prechangegroup`` Run before a changegroup is added via push, pull or unbundle. Exit status 0 allows the changegroup to proceed. Non-zero status will cause the push, pull or unbundle to fail. URL from which changes will come is in ``$HG_URL``. ``precommit`` Run before starting a local commit. Exit status 0 allows the commit to proceed. Non-zero status will cause the commit to fail. Parent changeset IDs are in ``$HG_PARENT1`` and ``$HG_PARENT2``. ``prelistkeys`` Run before listing pushkeys (like bookmarks) in the repository. Non-zero status will cause failure. The key namespace is in ``$HG_NAMESPACE``. ``preoutgoing`` Run before collecting changes to send from the local repository to another. Non-zero status will cause failure. This lets you prevent pull over HTTP or SSH. Also prevents against local pull, push (outbound) or bundle commands, but not effective, since you can just copy files instead then. Source of operation is in ``$HG_SOURCE``. If "serve", operation is happening on behalf of remote SSH or HTTP repository. If "push", "pull" or "bundle", operation is happening on behalf of repository on same system. ``prepushkey`` Run before a pushkey (like a bookmark) is added to the repository. Non-zero status will cause the key to be rejected. The key namespace is in ``$HG_NAMESPACE``, the key is in ``$HG_KEY``, the old value (if any) is in ``$HG_OLD``, and the new value is in ``$HG_NEW``. ``pretag`` Run before creating a tag. Exit status 0 allows the tag to be created. Non-zero status will cause the tag to fail. ID of changeset to tag is in ``$HG_NODE``. Name of tag is in ``$HG_TAG``. Tag is local if ``$HG_LOCAL=1``, in repository if ``$HG_LOCAL=0``. ``pretxnopen`` Run before any new repository transaction is open. The reason for the transaction will be in ``$HG_TXNNAME`` and a unique identifier for the transaction will be in ``HG_TXNID``. A non-zero status will prevent the transaction from being opened. ``pretxnclose`` Run right before the transaction is actually finalized. Any repository change will be visible to the hook program. This lets you validate the transaction content or change it. Exit status 0 allows the commit to proceed. Non-zero status will cause the transaction to be rolled back. The reason for the transaction opening will be in ``$HG_TXNNAME`` and a unique identifier for the transaction will be in ``HG_TXNID``. The rest of the available data will vary according the transaction type. New changesets will add ``$HG_NODE`` (id of the first added changeset), ``$HG_NODE_LAST`` (id of the last added changeset), ``$HG_URL`` and ``$HG_SOURCE`` variables, bookmarks and phases changes will set ``HG_BOOKMARK_MOVED`` and ``HG_PHASES_MOVED`` to ``1``, etc. ``txnclose`` Run after any repository transaction has been committed. At this point, the transaction can no longer be rolled back. The hook will run after the lock is released. See :hg:`help config.pretxnclose` docs for details about available variables. ``txnabort`` Run when a transaction is aborted. See :hg:`help config.pretxnclose` docs for details about available variables. ``pretxnchangegroup`` Run after a changegroup has been added via push, pull or unbundle, but before the transaction has been committed. Changegroup is visible to hook program. This lets you validate incoming changes before accepting them. Passed the ID of the first new changeset in ``$HG_NODE`` and last in ``$HG_NODE_LAST``. Exit status 0 allows the transaction to commit. Non-zero status will cause the transaction to be rolled back and the push, pull or unbundle will fail. URL that was source of changes is in ``$HG_URL``. ``pretxncommit`` Run after a changeset has been created but the transaction not yet committed. Changeset is visible to hook program. This lets you validate commit message and changes. Exit status 0 allows the commit to proceed. Non-zero status will cause the transaction to be rolled back. ID of changeset is in ``$HG_NODE``. Parent changeset IDs are in ``$HG_PARENT1`` and ``$HG_PARENT2``. ``preupdate`` Run before updating the working directory. Exit status 0 allows the update to proceed. Non-zero status will prevent the update. Changeset ID of first new parent is in ``$HG_PARENT1``. If merge, ID of second new parent is in ``$HG_PARENT2``. ``listkeys`` Run after listing pushkeys (like bookmarks) in the repository. The key namespace is in ``$HG_NAMESPACE``. ``$HG_VALUES`` is a dictionary containing the keys and values. ``pushkey`` Run after a pushkey (like a bookmark) is added to the repository. The key namespace is in ``$HG_NAMESPACE``, the key is in ``$HG_KEY``, the old value (if any) is in ``$HG_OLD``, and the new value is in ``$HG_NEW``. ``tag`` Run after a tag is created. ID of tagged changeset is in ``$HG_NODE``. Name of tag is in ``$HG_TAG``. Tag is local if ``$HG_LOCAL=1``, in repository if ``$HG_LOCAL=0``. ``update`` Run after updating the working directory. Changeset ID of first new parent is in ``$HG_PARENT1``. If merge, ID of second new parent is in ``$HG_PARENT2``. If the update succeeded, ``$HG_ERROR=0``. If the update failed (e.g. because conflicts not resolved), ``$HG_ERROR=1``. .. note:: It is generally better to use standard hooks rather than the generic pre- and post- command hooks as they are guaranteed to be called in the appropriate contexts for influencing transactions. Also, hooks like "commit" will be called in all contexts that generate a commit (e.g. tag) and not just the commit command. .. note:: Environment variables with empty values may not be passed to hooks on platforms such as Windows. As an example, ``$HG_PARENT2`` will have an empty value under Unix-like platforms for non-merge changesets, while it will not be available at all under Windows. The syntax for Python hooks is as follows:: hookname = python:modulename.submodule.callable hookname = python:/path/to/python/module.py:callable Python hooks are run within the Mercurial process. Each hook is called with at least three keyword arguments: a ui object (keyword ``ui``), a repository object (keyword ``repo``), and a ``hooktype`` keyword that tells what kind of hook is used. Arguments listed as environment variables above are passed as keyword arguments, with no ``HG_`` prefix, and names in lower case. If a Python hook returns a "true" value or raises an exception, this is treated as a failure. ``hostfingerprints`` -------------------- Fingerprints of the certificates of known HTTPS servers. A HTTPS connection to a server with a fingerprint configured here will only succeed if the servers certificate matches the fingerprint. This is very similar to how ssh known hosts works. The fingerprint is the SHA-1 hash value of the DER encoded certificate. The CA chain and web.cacerts is not used for servers with a fingerprint. For example:: [hostfingerprints] hg.intevation.de = fc:e2:8d:d9:51:cd:cb:c1:4d:18:6b:b7:44:8d:49:72:57:e6:cd:33 hg.intevation.org = fc:e2:8d:d9:51:cd:cb:c1:4d:18:6b:b7:44:8d:49:72:57:e6:cd:33 This feature is only supported when using Python 2.6 or later. ``http_proxy`` -------------- Used to access web-based Mercurial repositories through a HTTP proxy. ``host`` Host name and (optional) port of the proxy server, for example "myproxy:8000". ``no`` Optional. Comma-separated list of host names that should bypass the proxy. ``passwd`` Optional. Password to authenticate with at the proxy server. ``user`` Optional. User name to authenticate with at the proxy server. ``always`` Optional. Always use the proxy, even for localhost and any entries in ``http_proxy.no``. (default: False) ``merge-patterns`` ------------------ This section specifies merge tools to associate with particular file patterns. Tools matched here will take precedence over the default merge tool. Patterns are globs by default, rooted at the repository root. Example:: [merge-patterns] **.c = kdiff3 **.jpg = myimgmerge ``merge-tools`` --------------- This section configures external merge tools to use for file-level merges. This section has likely been preconfigured at install time. Use :hg:`config merge-tools` to check the existing configuration. Also see :hg:`help merge-tools` for more details. Example ``~/.hgrc``:: [merge-tools] # Override stock tool location kdiff3.executable = ~/bin/kdiff3 # Specify command line kdiff3.args = $base $local $other -o $output # Give higher priority kdiff3.priority = 1 # Changing the priority of preconfigured tool meld.priority = 0 # Disable a preconfigured tool vimdiff.disabled = yes # Define new tool myHtmlTool.args = -m $local $other $base $output myHtmlTool.regkey = Software\FooSoftware\HtmlMerge myHtmlTool.priority = 1 Supported arguments: ``priority`` The priority in which to evaluate this tool. (default: 0) ``executable`` Either just the name of the executable or its pathname. .. container:: windows On Windows, the path can use environment variables with ${ProgramFiles} syntax. (default: the tool name) ``args`` The arguments to pass to the tool executable. You can refer to the files being merged as well as the output file through these variables: ``$base``, ``$local``, ``$other``, ``$output``. The meaning of ``$local`` and ``$other`` can vary depending on which action is being performed. During and update or merge, ``$local`` represents the original state of the file, while ``$other`` represents the commit you are updating to or the commit you are merging with. During a rebase ``$local`` represents the destination of the rebase, and ``$other`` represents the commit being rebased. (default: ``$local $base $other``) ``premerge`` Attempt to run internal non-interactive 3-way merge tool before launching external tool. Options are ``true``, ``false``, ``keep`` or ``keep-merge3``. The ``keep`` option will leave markers in the file if the premerge fails. The ``keep-merge3`` will do the same but include information about the base of the merge in the marker (see internal :merge3 in :hg:`help merge-tools`). (default: True) ``binary`` This tool can merge binary files. (default: False, unless tool was selected by file pattern match) ``symlink`` This tool can merge symlinks. (default: False) ``check`` A list of merge success-checking options: ``changed`` Ask whether merge was successful when the merged file shows no changes. ``conflicts`` Check whether there are conflicts even though the tool reported success. ``prompt`` Always prompt for merge success, regardless of success reported by tool. ``fixeol`` Attempt to fix up EOL changes caused by the merge tool. (default: False) ``gui`` This tool requires a graphical interface to run. (default: False) .. container:: windows ``regkey`` Windows registry key which describes install location of this tool. Mercurial will search for this key first under ``HKEY_CURRENT_USER`` and then under ``HKEY_LOCAL_MACHINE``. (default: None) ``regkeyalt`` An alternate Windows registry key to try if the first key is not found. The alternate key uses the same ``regname`` and ``regappend`` semantics of the primary key. The most common use for this key is to search for 32bit applications on 64bit operating systems. (default: None) ``regname`` Name of value to read from specified registry key. (default: the unnamed (default) value) ``regappend`` String to append to the value read from the registry, typically the executable name of the tool. (default: None) ``patch`` --------- Settings used when applying patches, for instance through the 'import' command or with Mercurial Queues extension. ``eol`` When set to 'strict' patch content and patched files end of lines are preserved. When set to ``lf`` or ``crlf``, both files end of lines are ignored when patching and the result line endings are normalized to either LF (Unix) or CRLF (Windows). When set to ``auto``, end of lines are again ignored while patching but line endings in patched files are normalized to their original setting on a per-file basis. If target file does not exist or has no end of line, patch line endings are preserved. (default: strict) ``fuzz`` The number of lines of 'fuzz' to allow when applying patches. This controls how much context the patcher is allowed to ignore when trying to apply a patch. (default: 2) ``paths`` --------- Assigns symbolic names and behavior to repositories. Options are symbolic names defining the URL or directory that is the location of the repository. Example:: [paths] my_server = https://example.com/my_repo local_path = /home/me/repo These symbolic names can be used from the command line. To pull from ``my_server``: :hg:`pull my_server`. To push to ``local_path``: :hg:`push local_path`. Options containing colons (``:``) denote sub-options that can influence behavior for that specific path. Example:: [paths] my_server = https://example.com/my_path my_server:pushurl = ssh://example.com/my_path The following sub-options can be defined: ``pushurl`` The URL to use for push operations. If not defined, the location defined by the path's main entry is used. The following special named paths exist: ``default`` The URL or directory to use when no source or remote is specified. :hg:`clone` will automatically define this path to the location the repository was cloned from. ``default-push`` (deprecated) The URL or directory for the default :hg:`push` location. ``default:pushurl`` should be used instead. ``phases`` ---------- Specifies default handling of phases. See :hg:`help phases` for more information about working with phases. ``publish`` Controls draft phase behavior when working as a server. When true, pushed changesets are set to public in both client and server and pulled or cloned changesets are set to public in the client. (default: True) ``new-commit`` Phase of newly-created commits. (default: draft) ``checksubrepos`` Check the phase of the current revision of each subrepository. Allowed values are "ignore", "follow" and "abort". For settings other than "ignore", the phase of the current revision of each subrepository is checked before committing the parent repository. If any of those phases is greater than the phase of the parent repository (e.g. if a subrepo is in a "secret" phase while the parent repo is in "draft" phase), the commit is either aborted (if checksubrepos is set to "abort") or the higher phase is used for the parent repository commit (if set to "follow"). (default: follow) ``profiling`` ------------- Specifies profiling type, format, and file output. Two profilers are supported: an instrumenting profiler (named ``ls``), and a sampling profiler (named ``stat``). In this section description, 'profiling data' stands for the raw data collected during profiling, while 'profiling report' stands for a statistical text report generated from the profiling data. The profiling is done using lsprof. ``type`` The type of profiler to use. (default: ls) ``ls`` Use Python's built-in instrumenting profiler. This profiler works on all platforms, but each line number it reports is the first line of a function. This restriction makes it difficult to identify the expensive parts of a non-trivial function. ``stat`` Use a third-party statistical profiler, statprof. This profiler currently runs only on Unix systems, and is most useful for profiling commands that run for longer than about 0.1 seconds. ``format`` Profiling format. Specific to the ``ls`` instrumenting profiler. (default: text) ``text`` Generate a profiling report. When saving to a file, it should be noted that only the report is saved, and the profiling data is not kept. ``kcachegrind`` Format profiling data for kcachegrind use: when saving to a file, the generated file can directly be loaded into kcachegrind. ``frequency`` Sampling frequency. Specific to the ``stat`` sampling profiler. (default: 1000) ``output`` File path where profiling data or report should be saved. If the file exists, it is replaced. (default: None, data is printed on stderr) ``sort`` Sort field. Specific to the ``ls`` instrumenting profiler. One of ``callcount``, ``reccallcount``, ``totaltime`` and ``inlinetime``. (default: inlinetime) ``limit`` Number of lines to show. Specific to the ``ls`` instrumenting profiler. (default: 30) ``nested`` Show at most this number of lines of drill-down info after each main entry. This can help explain the difference between Total and Inline. Specific to the ``ls`` instrumenting profiler. (default: 5) ``progress`` ------------ Mercurial commands can draw progress bars that are as informative as possible. Some progress bars only offer indeterminate information, while others have a definite end point. ``delay`` Number of seconds (float) before showing the progress bar. (default: 3) ``changedelay`` Minimum delay before showing a new topic. When set to less than 3 * refresh, that value will be used instead. (default: 1) ``refresh`` Time in seconds between refreshes of the progress bar. (default: 0.1) ``format`` Format of the progress bar. Valid entries for the format field are ``topic``, ``bar``, ``number``, ``unit``, ``estimate``, ``speed``, and ``item``. ``item`` defaults to the last 20 characters of the item, but this can be changed by adding either ``-`` which would take the last num characters, or ``+`` for the first num characters. (default: topic bar number estimate) ``width`` If set, the maximum width of the progress information (that is, min(width, term width) will be used). ``clear-complete`` Clear the progress bar after it's done. (default: True) ``disable`` If true, don't show a progress bar. ``assume-tty`` If true, ALWAYS show a progress bar, unless disable is given. ``rebase`` ---------- ``allowdivergence`` Default to False, when True allow creating divergence when performing rebase of obsolete changesets. ``revsetalias`` --------------- Alias definitions for revsets. See :hg:`help revsets` for details. ``server`` ---------- Controls generic server settings. ``uncompressed`` Whether to allow clients to clone a repository using the uncompressed streaming protocol. This transfers about 40% more data than a regular clone, but uses less memory and CPU on both server and client. Over a LAN (100 Mbps or better) or a very fast WAN, an uncompressed streaming clone is a lot faster (~10x) than a regular clone. Over most WAN connections (anything slower than about 6 Mbps), uncompressed streaming is slower, because of the extra data transfer overhead. This mode will also temporarily hold the write lock while determining what data to transfer. (default: True) ``preferuncompressed`` When set, clients will try to use the uncompressed streaming protocol. (default: False) ``validate`` Whether to validate the completeness of pushed changesets by checking that all new file revisions specified in manifests are present. (default: False) ``maxhttpheaderlen`` Instruct HTTP clients not to send request headers longer than this many bytes. (default: 1024) ``bundle1`` Whether to allow clients to push and pull using the legacy bundle1 exchange format. (default: True) ``bundle1gd`` Like ``bundle1`` but only used if the repository is using the *generaldelta* storage format. (default: True) ``bundle1.push`` Whether to allow clients to push using the legacy bundle1 exchange format. (default: True) ``bundle1gd.push`` Like ``bundle1.push`` but only used if the repository is using the *generaldelta* storage format. (default: True) ``bundle1.pull`` Whether to allow clients to pull using the legacy bundle1 exchange format. (default: True) ``bundle1gd.pull`` Like ``bundle1.pull`` but only used if the repository is using the *generaldelta* storage format. (default: True) Large repositories using the *generaldelta* storage format should consider setting this option because converting *generaldelta* repositories to the exchange format required by the bundle1 data format can consume a lot of CPU. ``smtp`` -------- Configuration for extensions that need to send email messages. ``host`` Host name of mail server, e.g. "mail.example.com". ``port`` Optional. Port to connect to on mail server. (default: 465 if ``tls`` is smtps; 25 otherwise) ``tls`` Optional. Method to enable TLS when connecting to mail server: starttls, smtps or none. (default: none) ``verifycert`` Optional. Verification for the certificate of mail server, when ``tls`` is starttls or smtps. "strict", "loose" or False. For "strict" or "loose", the certificate is verified as same as the verification for HTTPS connections (see ``[hostfingerprints]`` and ``[web] cacerts`` also). For "strict", sending email is also aborted, if there is no configuration for mail server in ``[hostfingerprints]`` and ``[web] cacerts``. --insecure for :hg:`email` overwrites this as "loose". (default: strict) ``username`` Optional. User name for authenticating with the SMTP server. (default: None) ``password`` Optional. Password for authenticating with the SMTP server. If not specified, interactive sessions will prompt the user for a password; non-interactive sessions will fail. (default: None) ``local_hostname`` Optional. The hostname that the sender can use to identify itself to the MTA. ``subpaths`` ------------ Subrepository source URLs can go stale if a remote server changes name or becomes temporarily unavailable. This section lets you define rewrite rules of the form:: = where ``pattern`` is a regular expression matching a subrepository source URL and ``replacement`` is the replacement string used to rewrite it. Groups can be matched in ``pattern`` and referenced in ``replacements``. For instance:: http://server/(.*)-hg/ = http://hg.server/\1/ rewrites ``http://server/foo-hg/`` into ``http://hg.server/foo/``. Relative subrepository paths are first made absolute, and the rewrite rules are then applied on the full (absolute) path. The rules are applied in definition order. ``trusted`` ----------- Mercurial will not use the settings in the ``.hg/hgrc`` file from a repository if it doesn't belong to a trusted user or to a trusted group, as various hgrc features allow arbitrary commands to be run. This issue is often encountered when configuring hooks or extensions for shared repositories or servers. However, the web interface will use some safe settings from the ``[web]`` section. This section specifies what users and groups are trusted. The current user is always trusted. To trust everybody, list a user or a group with name ``*``. These settings must be placed in an *already-trusted file* to take effect, such as ``$HOME/.hgrc`` of the user or service running Mercurial. ``users`` Comma-separated list of trusted users. ``groups`` Comma-separated list of trusted groups. ``ui`` ------ User interface controls. ``archivemeta`` Whether to include the .hg_archival.txt file containing meta data (hashes for the repository base and for tip) in archives created by the :hg:`archive` command or downloaded via hgweb. (default: True) ``askusername`` Whether to prompt for a username when committing. If True, and neither ``$HGUSER`` nor ``$EMAIL`` has been specified, then the user will be prompted to enter a username. If no username is entered, the default ``USER@HOST`` is used instead. (default: False) ``clonebundles`` Whether the "clone bundles" feature is enabled. When enabled, :hg:`clone` may download and apply a server-advertised bundle file from a URL instead of using the normal exchange mechanism. This can likely result in faster and more reliable clones. (default: True) ``clonebundlefallback`` Whether failure to apply an advertised "clone bundle" from a server should result in fallback to a regular clone. This is disabled by default because servers advertising "clone bundles" often do so to reduce server load. If advertised bundles start mass failing and clients automatically fall back to a regular clone, this would add significant and unexpected load to the server since the server is expecting clone operations to be offloaded to pre-generated bundles. Failing fast (the default behavior) ensures clients don't overwhelm the server when "clone bundle" application fails. (default: False) ``clonebundleprefers`` Defines preferences for which "clone bundles" to use. Servers advertising "clone bundles" may advertise multiple available bundles. Each bundle may have different attributes, such as the bundle type and compression format. This option is used to prefer a particular bundle over another. The following keys are defined by Mercurial: BUNDLESPEC A bundle type specifier. These are strings passed to :hg:`bundle -t`. e.g. ``gzip-v2`` or ``bzip2-v1``. COMPRESSION The compression format of the bundle. e.g. ``gzip`` and ``bzip2``. Server operators may define custom keys. Example values: ``COMPRESSION=bzip2``, ``BUNDLESPEC=gzip-v2, COMPRESSION=gzip``. By default, the first bundle advertised by the server is used. ``commitsubrepos`` Whether to commit modified subrepositories when committing the parent repository. If False and one subrepository has uncommitted changes, abort the commit. (default: False) ``debug`` Print debugging information. (default: False) ``editor`` The editor to use during a commit. (default: ``$EDITOR`` or ``vi``) ``fallbackencoding`` Encoding to try if it's not possible to decode the changelog using UTF-8. (default: ISO-8859-1) ``graphnodetemplate`` The template used to print changeset nodes in an ASCII revision graph. (default: ``{graphnode}``) ``ignore`` A file to read per-user ignore patterns from. This file should be in the same format as a repository-wide .hgignore file. Filenames are relative to the repository root. This option supports hook syntax, so if you want to specify multiple ignore files, you can do so by setting something like ``ignore.other = ~/.hgignore2``. For details of the ignore file format, see the ``hgignore(5)`` man page. ``interactive`` Allow to prompt the user. (default: True) ``logtemplate`` Template string for commands that print changesets. ``merge`` The conflict resolution program to use during a manual merge. For more information on merge tools see :hg:`help merge-tools`. For configuring merge tools see the ``[merge-tools]`` section. ``mergemarkers`` Sets the merge conflict marker label styling. The ``detailed`` style uses the ``mergemarkertemplate`` setting to style the labels. The ``basic`` style just uses 'local' and 'other' as the marker label. One of ``basic`` or ``detailed``. (default: ``basic``) ``mergemarkertemplate`` The template used to print the commit description next to each conflict marker during merge conflicts. See :hg:`help templates` for the template format. Defaults to showing the hash, tags, branches, bookmarks, author, and the first line of the commit description. If you use non-ASCII characters in names for tags, branches, bookmarks, authors, and/or commit descriptions, you must pay attention to encodings of managed files. At template expansion, non-ASCII characters use the encoding specified by the ``--encoding`` global option, ``HGENCODING`` or other environment variables that govern your locale. If the encoding of the merge markers is different from the encoding of the merged files, serious problems may occur. ``origbackuppath`` The path to a directory used to store generated .orig files. If the path is not a directory, one will be created. ``patch`` An optional external tool that ``hg import`` and some extensions will use for applying patches. By default Mercurial uses an internal patch utility. The external tool must work as the common Unix ``patch`` program. In particular, it must accept a ``-p`` argument to strip patch headers, a ``-d`` argument to specify the current directory, a file name to patch, and a patch file to take from stdin. It is possible to specify a patch tool together with extra arguments. For example, setting this option to ``patch --merge`` will use the ``patch`` program with its 2-way merge option. ``portablefilenames`` Check for portable filenames. Can be ``warn``, ``ignore`` or ``abort``. (default: ``warn``) ``warn`` Print a warning message on POSIX platforms, if a file with a non-portable filename is added (e.g. a file with a name that can't be created on Windows because it contains reserved parts like ``AUX``, reserved characters like ``:``, or would cause a case collision with an existing file). ``ignore`` Don't print a warning. ``abort`` The command is aborted. ``true`` Alias for ``warn``. ``false`` Alias for ``ignore``. .. container:: windows On Windows, this configuration option is ignored and the command aborted. ``quiet`` Reduce the amount of output printed. (default: False) ``remotecmd`` Remote command to use for clone/push/pull operations. (default: ``hg``) ``report_untrusted`` Warn if a ``.hg/hgrc`` file is ignored due to not being owned by a trusted user or group. (default: True) ``slash`` Display paths using a slash (``/``) as the path separator. This only makes a difference on systems where the default path separator is not the slash character (e.g. Windows uses the backslash character (``\``)). (default: False) ``statuscopies`` Display copies in the status command. ``ssh`` Command to use for SSH connections. (default: ``ssh``) ``strict`` Require exact command names, instead of allowing unambiguous abbreviations. (default: False) ``style`` Name of style to use for command output. ``supportcontact`` A URL where users should report a Mercurial traceback. Use this if you are a large organisation with its own Mercurial deployment process and crash reports should be addressed to your internal support. ``timeout`` The timeout used when a lock is held (in seconds), a negative value means no timeout. (default: 600) ``traceback`` Mercurial always prints a traceback when an unknown exception occurs. Setting this to True will make Mercurial print a traceback on all exceptions, even those recognized by Mercurial (such as IOError or MemoryError). (default: False) ``username`` The committer of a changeset created when running "commit". Typically a person's name and email address, e.g. ``Fred Widget ``. Environment variables in the username are expanded. (default: ``$EMAIL`` or ``username@hostname``. If the username in hgrc is empty, e.g. if the system admin set ``username =`` in the system hgrc, it has to be specified manually or in a different hgrc file) ``verbose`` Increase the amount of output printed. (default: False) ``web`` ------- Web interface configuration. The settings in this section apply to both the builtin webserver (started by :hg:`serve`) and the script you run through a webserver (``hgweb.cgi`` and the derivatives for FastCGI and WSGI). The Mercurial webserver does no authentication (it does not prompt for usernames and passwords to validate *who* users are), but it does do authorization (it grants or denies access for *authenticated users* based on settings in this section). You must either configure your webserver to do authentication for you, or disable the authorization checks. For a quick setup in a trusted environment, e.g., a private LAN, where you want it to accept pushes from anybody, you can use the following command line:: $ hg --config web.allow_push=* --config web.push_ssl=False serve Note that this will allow anybody to push anything to the server and that this should not be used for public servers. The full set of options is: ``accesslog`` Where to output the access log. (default: stdout) ``address`` Interface address to bind to. (default: all) ``allow_archive`` List of archive format (bz2, gz, zip) allowed for downloading. (default: empty) ``allowbz2`` (DEPRECATED) Whether to allow .tar.bz2 downloading of repository revisions. (default: False) ``allowgz`` (DEPRECATED) Whether to allow .tar.gz downloading of repository revisions. (default: False) ``allowpull`` Whether to allow pulling from the repository. (default: True) ``allow_push`` Whether to allow pushing to the repository. If empty or not set, pushing is not allowed. If the special value ``*``, any remote user can push, including unauthenticated users. Otherwise, the remote user must have been authenticated, and the authenticated user name must be present in this list. The contents of the allow_push list are examined after the deny_push list. ``allow_read`` If the user has not already been denied repository access due to the contents of deny_read, this list determines whether to grant repository access to the user. If this list is not empty, and the user is unauthenticated or not present in the list, then access is denied for the user. If the list is empty or not set, then access is permitted to all users by default. Setting allow_read to the special value ``*`` is equivalent to it not being set (i.e. access is permitted to all users). The contents of the allow_read list are examined after the deny_read list. ``allowzip`` (DEPRECATED) Whether to allow .zip downloading of repository revisions. This feature creates temporary files. (default: False) ``archivesubrepos`` Whether to recurse into subrepositories when archiving. (default: False) ``baseurl`` Base URL to use when publishing URLs in other locations, so third-party tools like email notification hooks can construct URLs. Example: ``http://hgserver/repos/``. ``cacerts`` Path to file containing a list of PEM encoded certificate authority certificates. Environment variables and ``~user`` constructs are expanded in the filename. If specified on the client, then it will verify the identity of remote HTTPS servers with these certificates. This feature is only supported when using Python 2.6 or later. If you wish to use it with earlier versions of Python, install the backported version of the ssl library that is available from ``http://pypi.python.org``. To disable SSL verification temporarily, specify ``--insecure`` from command line. You can use OpenSSL's CA certificate file if your platform has one. On most Linux systems this will be ``/etc/ssl/certs/ca-certificates.crt``. Otherwise you will have to generate this file manually. The form must be as follows:: -----BEGIN CERTIFICATE----- ... (certificate in base64 PEM encoding) ... -----END CERTIFICATE----- -----BEGIN CERTIFICATE----- ... (certificate in base64 PEM encoding) ... -----END CERTIFICATE----- ``cache`` Whether to support caching in hgweb. (default: True) ``certificate`` Certificate to use when running :hg:`serve`. ``collapse`` With ``descend`` enabled, repositories in subdirectories are shown at a single level alongside repositories in the current path. With ``collapse`` also enabled, repositories residing at a deeper level than the current path are grouped behind navigable directory entries that lead to the locations of these repositories. In effect, this setting collapses each collection of repositories found within a subdirectory into a single entry for that subdirectory. (default: False) ``comparisoncontext`` Number of lines of context to show in side-by-side file comparison. If negative or the value ``full``, whole files are shown. (default: 5) This setting can be overridden by a ``context`` request parameter to the ``comparison`` command, taking the same values. ``contact`` Name or email address of the person in charge of the repository. (default: ui.username or ``$EMAIL`` or "unknown" if unset or empty) ``deny_push`` Whether to deny pushing to the repository. If empty or not set, push is not denied. If the special value ``*``, all remote users are denied push. Otherwise, unauthenticated users are all denied, and any authenticated user name present in this list is also denied. The contents of the deny_push list are examined before the allow_push list. ``deny_read`` Whether to deny reading/viewing of the repository. If this list is not empty, unauthenticated users are all denied, and any authenticated user name present in this list is also denied access to the repository. If set to the special value ``*``, all remote users are denied access (rarely needed ;). If deny_read is empty or not set, the determination of repository access depends on the presence and content of the allow_read list (see description). If both deny_read and allow_read are empty or not set, then access is permitted to all users by default. If the repository is being served via hgwebdir, denied users will not be able to see it in the list of repositories. The contents of the deny_read list have priority over (are examined before) the contents of the allow_read list. ``descend`` hgwebdir indexes will not descend into subdirectories. Only repositories directly in the current path will be shown (other repositories are still available from the index corresponding to their containing path). ``description`` Textual description of the repository's purpose or contents. (default: "unknown") ``encoding`` Character encoding name. (default: the current locale charset) Example: "UTF-8". ``errorlog`` Where to output the error log. (default: stderr) ``guessmime`` Control MIME types for raw download of file content. Set to True to let hgweb guess the content type from the file extension. This will serve HTML files as ``text/html`` and might allow cross-site scripting attacks when serving untrusted repositories. (default: False) ``hidden`` Whether to hide the repository in the hgwebdir index. (default: False) ``ipv6`` Whether to use IPv6. (default: False) ``logoimg`` File name of the logo image that some templates display on each page. The file name is relative to ``staticurl``. That is, the full path to the logo image is "staticurl/logoimg". If unset, ``hglogo.png`` will be used. ``logourl`` Base URL to use for logos. If unset, ``https://mercurial-scm.org/`` will be used. ``maxchanges`` Maximum number of changes to list on the changelog. (default: 10) ``maxfiles`` Maximum number of files to list per changeset. (default: 10) ``maxshortchanges`` Maximum number of changes to list on the shortlog, graph or filelog pages. (default: 60) ``name`` Repository name to use in the web interface. (default: current working directory) ``port`` Port to listen on. (default: 8000) ``prefix`` Prefix path to serve from. (default: '' (server root)) ``push_ssl`` Whether to require that inbound pushes be transported over SSL to prevent password sniffing. (default: True) ``refreshinterval`` How frequently directory listings re-scan the filesystem for new repositories, in seconds. This is relevant when wildcards are used to define paths. Depending on how much filesystem traversal is required, refreshing may negatively impact performance. Values less than or equal to 0 always refresh. (default: 20) ``staticurl`` Base URL to use for static files. If unset, static files (e.g. the hgicon.png favicon) will be served by the CGI script itself. Use this setting to serve them directly with the HTTP server. Example: ``http://hgserver/static/``. ``stripes`` How many lines a "zebra stripe" should span in multi-line output. Set to 0 to disable. (default: 1) ``style`` Which template map style to use. The available options are the names of subdirectories in the HTML templates path. (default: ``paper``) Example: ``monoblue``. ``templates`` Where to find the HTML templates. The default path to the HTML templates can be obtained from ``hg debuginstall``. ``websub`` ---------- Web substitution filter definition. You can use this section to define a set of regular expression substitution patterns which let you automatically modify the hgweb server output. The default hgweb templates only apply these substitution patterns on the revision description fields. You can apply them anywhere you want when you create your own templates by adding calls to the "websub" filter (usually after calling the "escape" filter). This can be used, for example, to convert issue references to links to your issue tracker, or to convert "markdown-like" syntax into HTML (see the examples below). Each entry in this section names a substitution filter. The value of each entry defines the substitution expression itself. The websub expressions follow the old interhg extension syntax, which in turn imitates the Unix sed replacement syntax:: patternname = s/SEARCH_REGEX/REPLACE_EXPRESSION/[i] You can use any separator other than "/". The final "i" is optional and indicates that the search must be case insensitive. Examples:: [websub] issues = s|issue(\d+)|issue\1|i italic = s/\b_(\S+)_\b/\1<\/i>/ bold = s/\*\b(\S+)\b\*/\1<\/b>/ ``worker`` ---------- Parallel master/worker configuration. We currently perform working directory updates in parallel on Unix-like systems, which greatly helps performance. ``numcpus`` Number of CPUs to use for parallel operations. A zero or negative value is treated as ``use the default``. (default: 4 or the number of CPUs on the system, whichever is larger) ``backgroundclose`` Whether to enable closing file handles on background threads during certain operations. Some platforms aren't very efficient at closing file handles that have been written or appended to. By performing file closing on background threads, file write rate can increase substantially. (default: true on Windows, false elsewhere) ``backgroundcloseminfilecount`` Minimum number of files required to trigger background file closing. Operations not writing this many files won't start background close threads. (default: 2048) ``backgroundclosemaxqueue`` The maximum number of opened file handles waiting to be closed in the background. This option only has an effect if ``backgroundclose`` is enabled. (default: 384) ``backgroundclosethreadcount`` Number of threads to process background file closes. Only relevant if ``backgroundclose`` is enabled. (default: 4) mercurial-3.7.3/mercurial/help/hgweb.txt0000644000175000017500000000644712676531524017711 0ustar mpmmpm00000000000000Mercurial's internal web server, hgweb, can serve either a single repository, or a tree of repositories. In the second case, repository paths and global options can be defined using a dedicated configuration file common to :hg:`serve`, ``hgweb.wsgi``, ``hgweb.cgi`` and ``hgweb.fcgi``. This file uses the same syntax as other Mercurial configuration files but recognizes only the following sections: - web - paths - collections The ``web`` options are thoroughly described in :hg:`help config`. The ``paths`` section maps URL paths to paths of repositories in the filesystem. hgweb will not expose the filesystem directly - only Mercurial repositories can be published and only according to the configuration. The left hand side is the path in the URL. Note that hgweb reserves subpaths like ``rev`` or ``file``, try using different names for nested repositories to avoid confusing effects. The right hand side is the path in the filesystem. If the specified path ends with ``*`` or ``**`` the filesystem will be searched recursively for repositories below that point. With ``*`` it will not recurse into the repositories it finds (except for ``.hg/patches``). With ``**`` it will also search inside repository working directories and possibly find subrepositories. In this example:: [paths] /projects/a = /srv/tmprepos/a /projects/b = c:/repos/b / = /srv/repos/* /user/bob = /home/bob/repos/** - The first two entries make two repositories in different directories appear under the same directory in the web interface - The third entry will publish every Mercurial repository found in ``/srv/repos/``, for instance the repository ``/srv/repos/quux/`` will appear as ``http://server/quux/`` - The fourth entry will publish both ``http://server/user/bob/quux/`` and ``http://server/user/bob/quux/testsubrepo/`` The ``collections`` section is deprecated and has been superseded by ``paths``. URLs and Common Arguments ========================= URLs under each repository have the form ``/{command}[/{arguments}]`` where ``{command}`` represents the name of a command or handler and ``{arguments}`` represents any number of additional URL parameters to that command. The web server has a default style associated with it. Styles map to a collection of named templates. Each template is used to render a specific piece of data, such as a changeset or diff. The style for the current request can be overwritten two ways. First, if ``{command}`` contains a hyphen (``-``), the text before the hyphen defines the style. For example, ``/atom-log`` will render the ``log`` command handler with the ``atom`` style. The second way to set the style is with the ``style`` query string argument. For example, ``/log?style=atom``. The hyphenated URL parameter is preferred. Not all templates are available for all styles. Attempting to use a style that doesn't have all templates defined may result in an error rendering the page. Many commands take a ``{revision}`` URL parameter. This defines the changeset to operate on. This is commonly specified as the short, 12 digit hexadecimal abbreviation for the full 40 character unique revision identifier. However, any value described by :hg:`help revisions` typically works. Commands and URLs ================= The following web commands and their URLs are available: .. webcommandsmarker mercurial-3.7.3/mercurial/help/hgignore.5.txt0000644000175000017500000000131412676531525020547 0ustar mpmmpm00000000000000========== hgignore ========== --------------------------------- syntax for Mercurial ignore files --------------------------------- :Author: Vadim Gelfer :Organization: Mercurial :Manual section: 5 :Manual group: Mercurial Manual .. include:: hgignore.5.gendoc.txt Author ====== Vadim Gelfer Mercurial was written by Matt Mackall . See Also ======== |hg(1)|_, |hgrc(5)|_ Copying ======= This manual page is copyright 2006 Vadim Gelfer. Mercurial is copyright 2005-2016 Matt Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. .. include:: common.txt mercurial-3.7.3/mercurial/help/hgrc.5.txt0000644000175000017500000000142412676531525017672 0ustar mpmmpm00000000000000====== hgrc ====== --------------------------------- configuration files for Mercurial --------------------------------- :Author: Bryan O'Sullivan :Organization: Mercurial :Manual section: 5 :Manual group: Mercurial Manual .. contents:: :backlinks: top :class: htmlonly Description =========== .. include:: hgrc.5.gendoc.txt Author ====== Bryan O'Sullivan . Mercurial was written by Matt Mackall . See Also ======== |hg(1)|_, |hgignore(5)|_ Copying ======= This manual page is copyright 2005 Bryan O'Sullivan. Mercurial is copyright 2005-2016 Matt Mackall. Free use of this software is granted under the terms of the GNU General Public License version 2 or any later version. .. include:: common.txt mercurial-3.7.3/mercurial/help/merge-tools.txt0000644000175000017500000000715312676531524021045 0ustar mpmmpm00000000000000To merge files Mercurial uses merge tools. A merge tool combines two different versions of a file into a merged file. Merge tools are given the two files and the greatest common ancestor of the two file versions, so they can determine the changes made on both branches. Merge tools are used both for :hg:`resolve`, :hg:`merge`, :hg:`update`, :hg:`backout` and in several extensions. Usually, the merge tool tries to automatically reconcile the files by combining all non-overlapping changes that occurred separately in the two different evolutions of the same initial base file. Furthermore, some interactive merge programs make it easier to manually resolve conflicting merges, either in a graphical way, or by inserting some conflict markers. Mercurial does not include any interactive merge programs but relies on external tools for that. Available merge tools ===================== External merge tools and their properties are configured in the merge-tools configuration section - see hgrc(5) - but they can often just be named by their executable. A merge tool is generally usable if its executable can be found on the system and if it can handle the merge. The executable is found if it is an absolute or relative executable path or the name of an application in the executable search path. The tool is assumed to be able to handle the merge if it can handle symlinks if the file is a symlink, if it can handle binary files if the file is binary, and if a GUI is available if the tool requires a GUI. There are some internal merge tools which can be used. The internal merge tools are: .. internaltoolsmarker Internal tools are always available and do not require a GUI but will by default not handle symlinks or binary files. Choosing a merge tool ===================== Mercurial uses these rules when deciding which merge tool to use: 1. If a tool has been specified with the --tool option to merge or resolve, it is used. If it is the name of a tool in the merge-tools configuration, its configuration is used. Otherwise the specified tool must be executable by the shell. 2. If the ``HGMERGE`` environment variable is present, its value is used and must be executable by the shell. 3. If the filename of the file to be merged matches any of the patterns in the merge-patterns configuration section, the first usable merge tool corresponding to a matching pattern is used. Here, binary capabilities of the merge tool are not considered. 4. If ui.merge is set it will be considered next. If the value is not the name of a configured tool, the specified value is used and must be executable by the shell. Otherwise the named tool is used if it is usable. 5. If any usable merge tools are present in the merge-tools configuration section, the one with the highest priority is used. 6. If a program named ``hgmerge`` can be found on the system, it is used - but it will by default not be used for symlinks and binary files. 7. If the file to be merged is not binary and is not a symlink, then internal ``:merge`` is used. 8. The merge of the file fails and must be resolved before commit. .. note:: After selecting a merge program, Mercurial will by default attempt to merge the files using a simple merge algorithm first. Only if it doesn't succeed because of conflicting changes Mercurial will actually execute the merge program. Whether to use the simple merge algorithm first can be controlled by the premerge setting of the merge tool. Premerge is enabled by default unless the file is binary or a symlink. See the merge-tools and ui sections of hgrc(5) for details on the configuration of merge tools. mercurial-3.7.3/mercurial/help/environment.txt0000644000175000017500000000745412676531524021160 0ustar mpmmpm00000000000000HG Path to the 'hg' executable, automatically passed when running hooks, extensions or external tools. If unset or empty, this is the hg executable's name if it's frozen, or an executable named 'hg' (with %PATHEXT% [defaulting to COM/EXE/BAT/CMD] extensions on Windows) is searched. HGEDITOR This is the name of the editor to run when committing. See EDITOR. (deprecated, use configuration file) HGENCODING This overrides the default locale setting detected by Mercurial. This setting is used to convert data including usernames, changeset descriptions, tag names, and branches. This setting can be overridden with the --encoding command-line option. HGENCODINGMODE This sets Mercurial's behavior for handling unknown characters while transcoding user input. The default is "strict", which causes Mercurial to abort if it can't map a character. Other settings include "replace", which replaces unknown characters, and "ignore", which drops them. This setting can be overridden with the --encodingmode command-line option. HGENCODINGAMBIGUOUS This sets Mercurial's behavior for handling characters with "ambiguous" widths like accented Latin characters with East Asian fonts. By default, Mercurial assumes ambiguous characters are narrow, set this variable to "wide" if such characters cause formatting problems. HGMERGE An executable to use for resolving merge conflicts. The program will be executed with three arguments: local file, remote file, ancestor file. (deprecated, use configuration file) HGRCPATH A list of files or directories to search for configuration files. Item separator is ":" on Unix, ";" on Windows. If HGRCPATH is not set, platform default search path is used. If empty, only the .hg/hgrc from the current repository is read. For each element in HGRCPATH: - if it's a directory, all files ending with .rc are added - otherwise, the file itself will be added HGPLAIN When set, this disables any configuration settings that might change Mercurial's default output. This includes encoding, defaults, verbose mode, debug mode, quiet mode, tracebacks, and localization. This can be useful when scripting against Mercurial in the face of existing user configuration. Equivalent options set via command line flags or environment variables are not overridden. HGPLAINEXCEPT This is a comma-separated list of features to preserve when HGPLAIN is enabled. Currently the following values are supported: ``alias`` Don't remove aliases. ``i18n`` Preserve internationalization. ``revsetalias`` Don't remove revset aliases. Setting HGPLAINEXCEPT to anything (even an empty string) will enable plain mode. HGUSER This is the string used as the author of a commit. If not set, available values will be considered in this order: - HGUSER (deprecated) - configuration files from the HGRCPATH - EMAIL - interactive prompt - LOGNAME (with ``@hostname`` appended) (deprecated, use configuration file) EMAIL May be used as the author of a commit; see HGUSER. LOGNAME May be used as the author of a commit; see HGUSER. VISUAL This is the name of the editor to use when committing. See EDITOR. EDITOR Sometimes Mercurial needs to open a text file in an editor for a user to modify, for example when writing commit messages. The editor it uses is determined by looking at the environment variables HGEDITOR, VISUAL and EDITOR, in that order. The first non-empty one is chosen. If all of them are empty, the editor defaults to 'vi'. PYTHONPATH This is used by Python to find imported modules and may need to be set appropriately if this Mercurial is not installed system-wide. mercurial-3.7.3/mercurial/help/urls.txt0000644000175000017500000000442112676531525017571 0ustar mpmmpm00000000000000Valid URLs are of the form:: local/filesystem/path[#revision] file://local/filesystem/path[#revision] http://[user[:pass]@]host[:port]/[path][#revision] https://[user[:pass]@]host[:port]/[path][#revision] ssh://[user@]host[:port]/[path][#revision] Paths in the local filesystem can either point to Mercurial repositories or to bundle files (as created by :hg:`bundle` or :hg:`incoming --bundle`). See also :hg:`help paths`. An optional identifier after # indicates a particular branch, tag, or changeset to use from the remote repository. See also :hg:`help revisions`. Some features, such as pushing to http:// and https:// URLs are only possible if the feature is explicitly enabled on the remote Mercurial server. Note that the security of HTTPS URLs depends on proper configuration of web.cacerts. Some notes about using SSH with Mercurial: - SSH requires an accessible shell account on the destination machine and a copy of hg in the remote path or specified with as remotecmd. - path is relative to the remote user's home directory by default. Use an extra slash at the start of a path to specify an absolute path:: ssh://example.com//tmp/repository - Mercurial doesn't use its own compression via SSH; the right thing to do is to configure it in your ~/.ssh/config, e.g.:: Host *.mylocalnetwork.example.com Compression no Host * Compression yes Alternatively specify "ssh -C" as your ssh command in your configuration file or with the --ssh command line option. These URLs can all be stored in your configuration file with path aliases under the [paths] section like so:: [paths] alias1 = URL1 alias2 = URL2 ... You can then use the alias for any command that uses a URL (for example :hg:`pull alias1` will be treated as :hg:`pull URL1`). Two path aliases are special because they are used as defaults when you do not provide the URL to a command: default: When you create a repository with hg clone, the clone command saves the location of the source repository as the new repository's 'default' path. This is then used when you omit path from push- and pull-like commands (including incoming and outgoing). default-push: The push command will look for a path named 'default-push', and prefer it over 'default' if both are defined. mercurial-3.7.3/mercurial/help/patterns.txt0000644000175000017500000000542512676531525020451 0ustar mpmmpm00000000000000Mercurial accepts several notations for identifying one or more files at a time. By default, Mercurial treats filenames as shell-style extended glob patterns. Alternate pattern notations must be specified explicitly. .. note:: Patterns specified in ``.hgignore`` are not rooted. Please see :hg:`help hgignore` for details. To use a plain path name without any pattern matching, start it with ``path:``. These path names must completely match starting at the current repository root. To use an extended glob, start a name with ``glob:``. Globs are rooted at the current directory; a glob such as ``*.c`` will only match files in the current directory ending with ``.c``. The supported glob syntax extensions are ``**`` to match any string across path separators and ``{a,b}`` to mean "a or b". To use a Perl/Python regular expression, start a name with ``re:``. Regexp pattern matching is anchored at the root of the repository. To read name patterns from a file, use ``listfile:`` or ``listfile0:``. The latter expects null delimited patterns while the former expects line feeds. Each string read from the file is itself treated as a file pattern. To read a set of patterns from a file, use ``include:`` or ``subinclude:``. ``include:`` will use all the patterns from the given file and treat them as if they had been passed in manually. ``subinclude:`` will only apply the patterns against files that are under the subinclude file's directory. See :hg:`help hgignore` for details on the format of these files. All patterns, except for ``glob:`` specified in command line (not for ``-I`` or ``-X`` options), can match also against directories: files under matched directories are treated as matched. Plain examples:: path:foo/bar a name bar in a directory named foo in the root of the repository path:path:name a file or directory named "path:name" Glob examples:: glob:*.c any name ending in ".c" in the current directory *.c any name ending in ".c" in the current directory **.c any name ending in ".c" in any subdirectory of the current directory including itself. foo/*.c any name ending in ".c" in the directory foo foo/**.c any name ending in ".c" in any subdirectory of foo including itself. Regexp examples:: re:.*\.c$ any name ending in ".c", anywhere in the repository File examples:: listfile:list.txt read list from list.txt with one file pattern per line listfile0:list.txt read list from list.txt with null byte delimiters See also :hg:`help filesets`. Include examples:: include:path/to/mypatternfile reads patterns to be applied to all paths subinclude:path/to/subignorefile reads patterns specifically for paths in the subdirectory mercurial-3.7.3/mercurial/help/multirevs.txt0000644000175000017500000000120712676531525020635 0ustar mpmmpm00000000000000When Mercurial accepts more than one revision, they may be specified individually, or provided as a topologically continuous range, separated by the ":" character. The syntax of range notation is [BEGIN]:[END], where BEGIN and END are revision identifiers. Both BEGIN and END are optional. If BEGIN is not specified, it defaults to revision number 0. If END is not specified, it defaults to the tip. The range ":" thus means "all revisions". If BEGIN is greater than END, revisions are treated in reverse order. A range acts as a closed interval. This means that a range of 3:5 gives 3, 4 and 5. Similarly, a range of 9:6 gives 9, 8, 7, and 6. mercurial-3.7.3/mercurial/help/common.txt0000644000175000017500000000036012676531524020071 0ustar mpmmpm00000000000000.. Common link and substitution definitions. .. |hg(1)| replace:: **hg**\ (1) .. _hg(1): hg.1.html .. |hgrc(5)| replace:: **hgrc**\ (5) .. _hgrc(5): hgrc.5.html .. |hgignore(5)| replace:: **hgignore**\ (5) .. _hgignore(5): hgignore.5.html mercurial-3.7.3/mercurial/help/subrepos.txt0000644000175000017500000001575212676531525020457 0ustar mpmmpm00000000000000Subrepositories let you nest external repositories or projects into a parent Mercurial repository, and make commands operate on them as a group. Mercurial currently supports Mercurial, Git, and Subversion subrepositories. Subrepositories are made of three components: 1. Nested repository checkouts. They can appear anywhere in the parent working directory. 2. Nested repository references. They are defined in ``.hgsub``, which should be placed in the root of working directory, and tell where the subrepository checkouts come from. Mercurial subrepositories are referenced like:: path/to/nested = https://example.com/nested/repo/path Git and Subversion subrepos are also supported:: path/to/nested = [git]git://example.com/nested/repo/path path/to/nested = [svn]https://example.com/nested/trunk/path where ``path/to/nested`` is the checkout location relatively to the parent Mercurial root, and ``https://example.com/nested/repo/path`` is the source repository path. The source can also reference a filesystem path. Note that ``.hgsub`` does not exist by default in Mercurial repositories, you have to create and add it to the parent repository before using subrepositories. 3. Nested repository states. They are defined in ``.hgsubstate``, which is placed in the root of working directory, and capture whatever information is required to restore the subrepositories to the state they were committed in a parent repository changeset. Mercurial automatically record the nested repositories states when committing in the parent repository. .. note:: The ``.hgsubstate`` file should not be edited manually. Adding a Subrepository ====================== If ``.hgsub`` does not exist, create it and add it to the parent repository. Clone or checkout the external projects where you want it to live in the parent repository. Edit ``.hgsub`` and add the subrepository entry as described above. At this point, the subrepository is tracked and the next commit will record its state in ``.hgsubstate`` and bind it to the committed changeset. Synchronizing a Subrepository ============================= Subrepos do not automatically track the latest changeset of their sources. Instead, they are updated to the changeset that corresponds with the changeset checked out in the top-level changeset. This is so developers always get a consistent set of compatible code and libraries when they update. Thus, updating subrepos is a manual process. Simply check out target subrepo at the desired revision, test in the top-level repo, then commit in the parent repository to record the new combination. Deleting a Subrepository ======================== To remove a subrepository from the parent repository, delete its reference from ``.hgsub``, then remove its files. Interaction with Mercurial Commands =================================== :add: add does not recurse in subrepos unless -S/--subrepos is specified. However, if you specify the full path of a file in a subrepo, it will be added even without -S/--subrepos specified. Subversion subrepositories are currently silently ignored. :addremove: addremove does not recurse into subrepos unless -S/--subrepos is specified. However, if you specify the full path of a directory in a subrepo, addremove will be performed on it even without -S/--subrepos being specified. Git and Subversion subrepositories will print a warning and continue. :archive: archive does not recurse in subrepositories unless -S/--subrepos is specified. :cat: cat currently only handles exact file matches in subrepos. Subversion subrepositories are currently ignored. :commit: commit creates a consistent snapshot of the state of the entire project and its subrepositories. If any subrepositories have been modified, Mercurial will abort. Mercurial can be made to instead commit all modified subrepositories by specifying -S/--subrepos, or setting "ui.commitsubrepos=True" in a configuration file (see :hg:`help config`). After there are no longer any modified subrepositories, it records their state and finally commits it in the parent repository. The --addremove option also honors the -S/--subrepos option. However, Git and Subversion subrepositories will print a warning and abort. :diff: diff does not recurse in subrepos unless -S/--subrepos is specified. Changes are displayed as usual, on the subrepositories elements. Subversion subrepositories are currently silently ignored. :files: files does not recurse into subrepos unless -S/--subrepos is specified. However, if you specify the full path of a file or directory in a subrepo, it will be displayed even without -S/--subrepos being specified. Git and Subversion subrepositories are currently silently ignored. :forget: forget currently only handles exact file matches in subrepos. Git and Subversion subrepositories are currently silently ignored. :incoming: incoming does not recurse in subrepos unless -S/--subrepos is specified. Git and Subversion subrepositories are currently silently ignored. :outgoing: outgoing does not recurse in subrepos unless -S/--subrepos is specified. Git and Subversion subrepositories are currently silently ignored. :pull: pull is not recursive since it is not clear what to pull prior to running :hg:`update`. Listing and retrieving all subrepositories changes referenced by the parent repository pulled changesets is expensive at best, impossible in the Subversion case. :push: Mercurial will automatically push all subrepositories first when the parent repository is being pushed. This ensures new subrepository changes are available when referenced by top-level repositories. Push is a no-op for Subversion subrepositories. :status: status does not recurse into subrepositories unless -S/--subrepos is specified. Subrepository changes are displayed as regular Mercurial changes on the subrepository elements. Subversion subrepositories are currently silently ignored. :remove: remove does not recurse into subrepositories unless -S/--subrepos is specified. However, if you specify a file or directory path in a subrepo, it will be removed even without -S/--subrepos. Git and Subversion subrepositories are currently silently ignored. :update: update restores the subrepos in the state they were originally committed in target changeset. If the recorded changeset is not available in the current subrepository, Mercurial will pull it in first before updating. This means that updating can require network access when using subrepositories. Remapping Subrepositories Sources ================================= A subrepository source location may change during a project life, invalidating references stored in the parent repository history. To fix this, rewriting rules can be defined in parent repository ``hgrc`` file or in Mercurial configuration. See the ``[subpaths]`` section in hgrc(5) for more details. mercurial-3.7.3/mercurial/help/hgignore.txt0000644000175000017500000000604412676531525020411 0ustar mpmmpm00000000000000Synopsis ======== The Mercurial system uses a file called ``.hgignore`` in the root directory of a repository to control its behavior when it searches for files that it is not currently tracking. Description =========== The working directory of a Mercurial repository will often contain files that should not be tracked by Mercurial. These include backup files created by editors and build products created by compilers. These files can be ignored by listing them in a ``.hgignore`` file in the root of the working directory. The ``.hgignore`` file must be created manually. It is typically put under version control, so that the settings will propagate to other repositories with push and pull. An untracked file is ignored if its path relative to the repository root directory, or any prefix path of that path, is matched against any pattern in ``.hgignore``. For example, say we have an untracked file, ``file.c``, at ``a/b/file.c`` inside our repository. Mercurial will ignore ``file.c`` if any pattern in ``.hgignore`` matches ``a/b/file.c``, ``a/b`` or ``a``. In addition, a Mercurial configuration file can reference a set of per-user or global ignore files. See the ``ignore`` configuration key on the ``[ui]`` section of :hg:`help config` for details of how to configure these files. To control Mercurial's handling of files that it manages, many commands support the ``-I`` and ``-X`` options; see :hg:`help ` and :hg:`help patterns` for details. Files that are already tracked are not affected by .hgignore, even if they appear in .hgignore. An untracked file X can be explicitly added with :hg:`add X`, even if X would be excluded by a pattern in .hgignore. Syntax ====== An ignore file is a plain text file consisting of a list of patterns, with one pattern per line. Empty lines are skipped. The ``#`` character is treated as a comment character, and the ``\`` character is treated as an escape character. Mercurial supports several pattern syntaxes. The default syntax used is Python/Perl-style regular expressions. To change the syntax used, use a line of the following form:: syntax: NAME where ``NAME`` is one of the following: ``regexp`` Regular expression, Python/Perl syntax. ``glob`` Shell-style glob. The chosen syntax stays in effect when parsing all patterns that follow, until another syntax is selected. Neither glob nor regexp patterns are rooted. A glob-syntax pattern of the form ``*.c`` will match a file ending in ``.c`` in any directory, and a regexp pattern of the form ``\.c$`` will do the same. To root a regexp pattern, start it with ``^``. Subdirectories can have their own .hgignore settings by adding ``subinclude:path/to/subdir/.hgignore`` to the root ``.hgignore``. See :hg:`help patterns` for details on ``subinclude:`` and ``include:``. .. note:: Patterns specified in other than ``.hgignore`` are always rooted. Please see :hg:`help patterns` for details. Example ======= Here is an example ignore file. :: # use glob syntax. syntax: glob *.elc *.pyc *~ # switch to regexp syntax. syntax: regexp ^\.pc/ mercurial-3.7.3/mercurial/help/revsets.txt0000644000175000017500000000733312676531525020304 0ustar mpmmpm00000000000000Mercurial supports a functional language for selecting a set of revisions. The language supports a number of predicates which are joined by infix operators. Parenthesis can be used for grouping. Identifiers such as branch names may need quoting with single or double quotes if they contain characters like ``-`` or if they match one of the predefined predicates. Special characters can be used in quoted identifiers by escaping them, e.g., ``\n`` is interpreted as a newline. To prevent them from being interpreted, strings can be prefixed with ``r``, e.g. ``r'...'``. There is a single prefix operator: ``not x`` Changesets not in x. Short form is ``! x``. These are the supported infix operators: ``x::y`` A DAG range, meaning all changesets that are descendants of x and ancestors of y, including x and y themselves. If the first endpoint is left out, this is equivalent to ``ancestors(y)``, if the second is left out it is equivalent to ``descendants(x)``. An alternative syntax is ``x..y``. ``x:y`` All changesets with revision numbers between x and y, both inclusive. Either endpoint can be left out, they default to 0 and tip. ``x and y`` The intersection of changesets in x and y. Short form is ``x & y``. ``x or y`` The union of changesets in x and y. There are two alternative short forms: ``x | y`` and ``x + y``. ``x - y`` Changesets in x but not in y. ``x^n`` The nth parent of x, n == 0, 1, or 2. For n == 0, x; for n == 1, the first parent of each changeset in x; for n == 2, the second parent of changeset in x. ``x~n`` The nth first ancestor of x; ``x~0`` is x; ``x~3`` is ``x^^^``. There is a single postfix operator: ``x^`` Equivalent to ``x^1``, the first parent of each changeset in x. The following predicates are supported: .. predicatesmarker New predicates (known as "aliases") can be defined, using any combination of existing predicates or other aliases. An alias definition looks like:: = in the ``revsetalias`` section of a Mercurial configuration file. Arguments of the form `$1`, `$2`, etc. are substituted from the alias into the definition. For example, :: [revsetalias] h = heads() d($1) = sort($1, date) rs($1, $2) = reverse(sort($1, $2)) defines three aliases, ``h``, ``d``, and ``rs``. ``rs(0:tip, author)`` is exactly equivalent to ``reverse(sort(0:tip, author))``. An infix operator ``##`` can concatenate strings and identifiers into one string. For example:: [revsetalias] issue($1) = grep(r'\bissue[ :]?' ## $1 ## r'\b|\bbug\(' ## $1 ## r'\)') ``issue(1234)`` is equivalent to ``grep(r'\bissue[ :]?1234\b|\bbug\(1234\)')`` in this case. This matches against all of "issue 1234", "issue:1234", "issue1234" and "bug(1234)". All other prefix, infix and postfix operators have lower priority than ``##``. For example, ``$1 ## $2~2`` is equivalent to ``($1 ## $2)~2``. Command line equivalents for :hg:`log`:: -f -> ::. -d x -> date(x) -k x -> keyword(x) -m -> merge() -u x -> user(x) -b x -> branch(x) -P x -> !::x -l x -> limit(expr, x) Some sample queries: - Changesets on the default branch:: hg log -r "branch(default)" - Changesets on the default branch since tag 1.5 (excluding merges):: hg log -r "branch(default) and 1.5:: and not merge()" - Open branch heads:: hg log -r "head() and not closed()" - Changesets between tags 1.3 and 1.5 mentioning "bug" that affect ``hgext/*``:: hg log -r "1.3::1.5 and keyword(bug) and file('hgext/*')" - Changesets committed in May 2008, sorted by user:: hg log -r "sort(date('May 2008'), user)" - Changesets mentioning "bug" or "issue" that are not in a tagged release:: hg log -r "(keyword(bug) or keyword(issue)) and not ancestors(tag())" mercurial-3.7.3/mercurial/localrepo.py0000644000175000017500000022213112676531525017445 0ustar mpmmpm00000000000000# localrepo.py - read/write repository class for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import inspect import os import random import time import urllib import weakref from .i18n import _ from .node import ( hex, nullid, short, wdirrev, ) from . import ( bookmarks, branchmap, bundle2, changegroup, changelog, cmdutil, context, dirstate, encoding, error, exchange, extensions, filelog, hook, lock as lockmod, manifest, match as matchmod, merge as mergemod, namespaces, obsolete, pathutil, peer, phases, pushkey, repoview, revset, scmutil, store, subrepo, tags as tagsmod, transaction, util, ) release = lockmod.release propertycache = util.propertycache filecache = scmutil.filecache class repofilecache(filecache): """All filecache usage on repo are done for logic that should be unfiltered """ def __get__(self, repo, type=None): return super(repofilecache, self).__get__(repo.unfiltered(), type) def __set__(self, repo, value): return super(repofilecache, self).__set__(repo.unfiltered(), value) def __delete__(self, repo): return super(repofilecache, self).__delete__(repo.unfiltered()) class storecache(repofilecache): """filecache for files in the store""" def join(self, obj, fname): return obj.sjoin(fname) class unfilteredpropertycache(propertycache): """propertycache that apply to unfiltered repo only""" def __get__(self, repo, type=None): unfi = repo.unfiltered() if unfi is repo: return super(unfilteredpropertycache, self).__get__(unfi) return getattr(unfi, self.name) class filteredpropertycache(propertycache): """propertycache that must take filtering in account""" def cachevalue(self, obj, value): object.__setattr__(obj, self.name, value) def hasunfilteredcache(repo, name): """check if a repo has an unfilteredpropertycache value for """ return name in vars(repo.unfiltered()) def unfilteredmethod(orig): """decorate method that always need to be run on unfiltered version""" def wrapper(repo, *args, **kwargs): return orig(repo.unfiltered(), *args, **kwargs) return wrapper moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle', 'unbundle')) legacycaps = moderncaps.union(set(['changegroupsubset'])) class localpeer(peer.peerrepository): '''peer for a local repo; reflects only the most recent API''' def __init__(self, repo, caps=moderncaps): peer.peerrepository.__init__(self) self._repo = repo.filtered('served') self.ui = repo.ui self._caps = repo._restrictcapabilities(caps) self.requirements = repo.requirements self.supportedformats = repo.supportedformats def close(self): self._repo.close() def _capabilities(self): return self._caps def local(self): return self._repo def canpush(self): return True def url(self): return self._repo.url() def lookup(self, key): return self._repo.lookup(key) def branchmap(self): return self._repo.branchmap() def heads(self): return self._repo.heads() def known(self, nodes): return self._repo.known(nodes) def getbundle(self, source, heads=None, common=None, bundlecaps=None, **kwargs): cg = exchange.getbundle(self._repo, source, heads=heads, common=common, bundlecaps=bundlecaps, **kwargs) if bundlecaps is not None and 'HG20' in bundlecaps: # When requesting a bundle2, getbundle returns a stream to make the # wire level function happier. We need to build a proper object # from it in local peer. cg = bundle2.getunbundler(self.ui, cg) return cg # TODO We might want to move the next two calls into legacypeer and add # unbundle instead. def unbundle(self, cg, heads, url): """apply a bundle on a repo This function handles the repo locking itself.""" try: try: cg = exchange.readbundle(self.ui, cg, None) ret = exchange.unbundle(self._repo, cg, heads, 'push', url) if util.safehasattr(ret, 'getchunks'): # This is a bundle20 object, turn it into an unbundler. # This little dance should be dropped eventually when the # API is finally improved. stream = util.chunkbuffer(ret.getchunks()) ret = bundle2.getunbundler(self.ui, stream) return ret except Exception as exc: # If the exception contains output salvaged from a bundle2 # reply, we need to make sure it is printed before continuing # to fail. So we build a bundle2 with such output and consume # it directly. # # This is not very elegant but allows a "simple" solution for # issue4594 output = getattr(exc, '_bundle2salvagedoutput', ()) if output: bundler = bundle2.bundle20(self._repo.ui) for out in output: bundler.addpart(out) stream = util.chunkbuffer(bundler.getchunks()) b = bundle2.getunbundler(self.ui, stream) bundle2.processbundle(self._repo, b) raise except error.PushRaced as exc: raise error.ResponseError(_('push failed:'), str(exc)) def lock(self): return self._repo.lock() def addchangegroup(self, cg, source, url): return cg.apply(self._repo, source, url) def pushkey(self, namespace, key, old, new): return self._repo.pushkey(namespace, key, old, new) def listkeys(self, namespace): return self._repo.listkeys(namespace) def debugwireargs(self, one, two, three=None, four=None, five=None): '''used to test argument passing over the wire''' return "%s %s %s %s %s" % (one, two, three, four, five) class locallegacypeer(localpeer): '''peer extension which implements legacy methods too; used for tests with restricted capabilities''' def __init__(self, repo): localpeer.__init__(self, repo, caps=legacycaps) def branches(self, nodes): return self._repo.branches(nodes) def between(self, pairs): return self._repo.between(pairs) def changegroup(self, basenodes, source): return changegroup.changegroup(self._repo, basenodes, source) def changegroupsubset(self, bases, heads, source): return changegroup.changegroupsubset(self._repo, bases, heads, source) class localrepository(object): supportedformats = set(('revlogv1', 'generaldelta', 'treemanifest', 'manifestv2')) _basesupported = supportedformats | set(('store', 'fncache', 'shared', 'dotencode')) openerreqs = set(('revlogv1', 'generaldelta', 'treemanifest', 'manifestv2')) filtername = None # a list of (ui, featureset) functions. # only functions defined in module of enabled extensions are invoked featuresetupfuncs = set() def _baserequirements(self, create): return ['revlogv1'] def __init__(self, baseui, path=None, create=False): self.requirements = set() self.wvfs = scmutil.vfs(path, expandpath=True, realpath=True) self.wopener = self.wvfs self.root = self.wvfs.base self.path = self.wvfs.join(".hg") self.origroot = path self.auditor = pathutil.pathauditor(self.root, self._checknested) self.nofsauditor = pathutil.pathauditor(self.root, self._checknested, realfs=False) self.vfs = scmutil.vfs(self.path) self.opener = self.vfs self.baseui = baseui self.ui = baseui.copy() self.ui.copy = baseui.copy # prevent copying repo configuration # A list of callback to shape the phase if no data were found. # Callback are in the form: func(repo, roots) --> processed root. # This list it to be filled by extension during repo setup self._phasedefaults = [] try: self.ui.readconfig(self.join("hgrc"), self.root) extensions.loadall(self.ui) except IOError: pass if self.featuresetupfuncs: self.supported = set(self._basesupported) # use private copy extmods = set(m.__name__ for n, m in extensions.extensions(self.ui)) for setupfunc in self.featuresetupfuncs: if setupfunc.__module__ in extmods: setupfunc(self.ui, self.supported) else: self.supported = self._basesupported if not self.vfs.isdir(): if create: if not self.wvfs.exists(): self.wvfs.makedirs() self.vfs.makedir(notindexed=True) self.requirements.update(self._baserequirements(create)) if self.ui.configbool('format', 'usestore', True): self.vfs.mkdir("store") self.requirements.add("store") if self.ui.configbool('format', 'usefncache', True): self.requirements.add("fncache") if self.ui.configbool('format', 'dotencode', True): self.requirements.add('dotencode') # create an invalid changelog self.vfs.append( "00changelog.i", '\0\0\0\2' # represents revlogv2 ' dummy changelog to prevent using the old repo layout' ) if scmutil.gdinitconfig(self.ui): self.requirements.add("generaldelta") if self.ui.configbool('experimental', 'treemanifest', False): self.requirements.add("treemanifest") if self.ui.configbool('experimental', 'manifestv2', False): self.requirements.add("manifestv2") else: raise error.RepoError(_("repository %s not found") % path) elif create: raise error.RepoError(_("repository %s already exists") % path) else: try: self.requirements = scmutil.readrequires( self.vfs, self.supported) except IOError as inst: if inst.errno != errno.ENOENT: raise self.sharedpath = self.path try: vfs = scmutil.vfs(self.vfs.read("sharedpath").rstrip('\n'), realpath=True) s = vfs.base if not vfs.exists(): raise error.RepoError( _('.hg/sharedpath points to nonexistent directory %s') % s) self.sharedpath = s except IOError as inst: if inst.errno != errno.ENOENT: raise self.store = store.store( self.requirements, self.sharedpath, scmutil.vfs) self.spath = self.store.path self.svfs = self.store.vfs self.sjoin = self.store.join self.vfs.createmode = self.store.createmode self._applyopenerreqs() if create: self._writerequirements() self._dirstatevalidatewarned = False self._branchcaches = {} self._revbranchcache = None self.filterpats = {} self._datafilters = {} self._transref = self._lockref = self._wlockref = None # A cache for various files under .hg/ that tracks file changes, # (used by the filecache decorator) # # Maps a property name to its util.filecacheentry self._filecache = {} # hold sets of revision to be filtered # should be cleared when something might have changed the filter value: # - new changesets, # - phase change, # - new obsolescence marker, # - working directory parent change, # - bookmark changes self.filteredrevcache = {} # generic mapping between names and nodes self.names = namespaces.namespaces() def close(self): self._writecaches() def _writecaches(self): if self._revbranchcache: self._revbranchcache.write() def _restrictcapabilities(self, caps): if self.ui.configbool('experimental', 'bundle2-advertise', True): caps = set(caps) capsblob = bundle2.encodecaps(bundle2.getrepocaps(self)) caps.add('bundle2=' + urllib.quote(capsblob)) return caps def _applyopenerreqs(self): self.svfs.options = dict((r, 1) for r in self.requirements if r in self.openerreqs) # experimental config: format.chunkcachesize chunkcachesize = self.ui.configint('format', 'chunkcachesize') if chunkcachesize is not None: self.svfs.options['chunkcachesize'] = chunkcachesize # experimental config: format.maxchainlen maxchainlen = self.ui.configint('format', 'maxchainlen') if maxchainlen is not None: self.svfs.options['maxchainlen'] = maxchainlen # experimental config: format.manifestcachesize manifestcachesize = self.ui.configint('format', 'manifestcachesize') if manifestcachesize is not None: self.svfs.options['manifestcachesize'] = manifestcachesize # experimental config: format.aggressivemergedeltas aggressivemergedeltas = self.ui.configbool('format', 'aggressivemergedeltas', False) self.svfs.options['aggressivemergedeltas'] = aggressivemergedeltas self.svfs.options['lazydeltabase'] = not scmutil.gddeltaconfig(self.ui) def _writerequirements(self): scmutil.writerequires(self.vfs, self.requirements) def _checknested(self, path): """Determine if path is a legal nested repository.""" if not path.startswith(self.root): return False subpath = path[len(self.root) + 1:] normsubpath = util.pconvert(subpath) # XXX: Checking against the current working copy is wrong in # the sense that it can reject things like # # $ hg cat -r 10 sub/x.txt # # if sub/ is no longer a subrepository in the working copy # parent revision. # # However, it can of course also allow things that would have # been rejected before, such as the above cat command if sub/ # is a subrepository now, but was a normal directory before. # The old path auditor would have rejected by mistake since it # panics when it sees sub/.hg/. # # All in all, checking against the working copy seems sensible # since we want to prevent access to nested repositories on # the filesystem *now*. ctx = self[None] parts = util.splitpath(subpath) while parts: prefix = '/'.join(parts) if prefix in ctx.substate: if prefix == normsubpath: return True else: sub = ctx.sub(prefix) return sub.checknested(subpath[len(prefix) + 1:]) else: parts.pop() return False def peer(self): return localpeer(self) # not cached to avoid reference cycle def unfiltered(self): """Return unfiltered version of the repository Intended to be overwritten by filtered repo.""" return self def filtered(self, name): """Return a filtered version of a repository""" # build a new class with the mixin and the current class # (possibly subclass of the repo) class proxycls(repoview.repoview, self.unfiltered().__class__): pass return proxycls(self, name) @repofilecache('bookmarks', 'bookmarks.current') def _bookmarks(self): return bookmarks.bmstore(self) @property def _activebookmark(self): return self._bookmarks.active def bookmarkheads(self, bookmark): name = bookmark.split('@', 1)[0] heads = [] for mark, n in self._bookmarks.iteritems(): if mark.split('@', 1)[0] == name: heads.append(n) return heads # _phaserevs and _phasesets depend on changelog. what we need is to # call _phasecache.invalidate() if '00changelog.i' was changed, but it # can't be easily expressed in filecache mechanism. @storecache('phaseroots', '00changelog.i') def _phasecache(self): return phases.phasecache(self, self._phasedefaults) @storecache('obsstore') def obsstore(self): # read default format for new obsstore. # developer config: format.obsstore-version defaultformat = self.ui.configint('format', 'obsstore-version', None) # rely on obsstore class default when possible. kwargs = {} if defaultformat is not None: kwargs['defaultformat'] = defaultformat readonly = not obsolete.isenabled(self, obsolete.createmarkersopt) store = obsolete.obsstore(self.svfs, readonly=readonly, **kwargs) if store and readonly: self.ui.warn( _('obsolete feature not enabled but %i markers found!\n') % len(list(store))) return store @storecache('00changelog.i') def changelog(self): c = changelog.changelog(self.svfs) if 'HG_PENDING' in os.environ: p = os.environ['HG_PENDING'] if p.startswith(self.root): c.readpending('00changelog.i.a') return c @storecache('00manifest.i') def manifest(self): return manifest.manifest(self.svfs) def dirlog(self, dir): return self.manifest.dirlog(dir) @repofilecache('dirstate') def dirstate(self): return dirstate.dirstate(self.vfs, self.ui, self.root, self._dirstatevalidate) def _dirstatevalidate(self, node): try: self.changelog.rev(node) return node except error.LookupError: if not self._dirstatevalidatewarned: self._dirstatevalidatewarned = True self.ui.warn(_("warning: ignoring unknown" " working parent %s!\n") % short(node)) return nullid def __getitem__(self, changeid): if changeid is None or changeid == wdirrev: return context.workingctx(self) if isinstance(changeid, slice): return [context.changectx(self, i) for i in xrange(*changeid.indices(len(self))) if i not in self.changelog.filteredrevs] return context.changectx(self, changeid) def __contains__(self, changeid): try: self[changeid] return True except error.RepoLookupError: return False def __nonzero__(self): return True def __len__(self): return len(self.changelog) def __iter__(self): return iter(self.changelog) def revs(self, expr, *args): '''Find revisions matching a revset. The revset is specified as a string ``expr`` that may contain %-formatting to escape certain types. See ``revset.formatspec``. Return a revset.abstractsmartset, which is a list-like interface that contains integer revisions. ''' expr = revset.formatspec(expr, *args) m = revset.match(None, expr) return m(self) def set(self, expr, *args): '''Find revisions matching a revset and emit changectx instances. This is a convenience wrapper around ``revs()`` that iterates the result and is a generator of changectx instances. ''' for r in self.revs(expr, *args): yield self[r] def url(self): return 'file:' + self.root def hook(self, name, throw=False, **args): """Call a hook, passing this repo instance. This a convenience method to aid invoking hooks. Extensions likely won't call this unless they have registered a custom hook or are replacing code that is expected to call a hook. """ return hook.hook(self.ui, self, name, throw, **args) @unfilteredmethod def _tag(self, names, node, message, local, user, date, extra=None, editor=False): if isinstance(names, str): names = (names,) branches = self.branchmap() for name in names: self.hook('pretag', throw=True, node=hex(node), tag=name, local=local) if name in branches: self.ui.warn(_("warning: tag %s conflicts with existing" " branch name\n") % name) def writetags(fp, names, munge, prevtags): fp.seek(0, 2) if prevtags and prevtags[-1] != '\n': fp.write('\n') for name in names: if munge: m = munge(name) else: m = name if (self._tagscache.tagtypes and name in self._tagscache.tagtypes): old = self.tags().get(name, nullid) fp.write('%s %s\n' % (hex(old), m)) fp.write('%s %s\n' % (hex(node), m)) fp.close() prevtags = '' if local: try: fp = self.vfs('localtags', 'r+') except IOError: fp = self.vfs('localtags', 'a') else: prevtags = fp.read() # local tags are stored in the current charset writetags(fp, names, None, prevtags) for name in names: self.hook('tag', node=hex(node), tag=name, local=local) return try: fp = self.wfile('.hgtags', 'rb+') except IOError as e: if e.errno != errno.ENOENT: raise fp = self.wfile('.hgtags', 'ab') else: prevtags = fp.read() # committed tags are stored in UTF-8 writetags(fp, names, encoding.fromlocal, prevtags) fp.close() self.invalidatecaches() if '.hgtags' not in self.dirstate: self[None].add(['.hgtags']) m = matchmod.exact(self.root, '', ['.hgtags']) tagnode = self.commit(message, user, date, extra=extra, match=m, editor=editor) for name in names: self.hook('tag', node=hex(node), tag=name, local=local) return tagnode def tag(self, names, node, message, local, user, date, editor=False): '''tag a revision with one or more symbolic names. names is a list of strings or, when adding a single tag, names may be a string. if local is True, the tags are stored in a per-repository file. otherwise, they are stored in the .hgtags file, and a new changeset is committed with the change. keyword arguments: local: whether to store tags in non-version-controlled file (default False) message: commit message to use if committing user: name of user to use if committing date: date tuple to use if committing''' if not local: m = matchmod.exact(self.root, '', ['.hgtags']) if any(self.status(match=m, unknown=True, ignored=True)): raise error.Abort(_('working copy of .hgtags is changed'), hint=_('please commit .hgtags manually')) self.tags() # instantiate the cache self._tag(names, node, message, local, user, date, editor=editor) @filteredpropertycache def _tagscache(self): '''Returns a tagscache object that contains various tags related caches.''' # This simplifies its cache management by having one decorated # function (this one) and the rest simply fetch things from it. class tagscache(object): def __init__(self): # These two define the set of tags for this repository. tags # maps tag name to node; tagtypes maps tag name to 'global' or # 'local'. (Global tags are defined by .hgtags across all # heads, and local tags are defined in .hg/localtags.) # They constitute the in-memory cache of tags. self.tags = self.tagtypes = None self.nodetagscache = self.tagslist = None cache = tagscache() cache.tags, cache.tagtypes = self._findtags() return cache def tags(self): '''return a mapping of tag to node''' t = {} if self.changelog.filteredrevs: tags, tt = self._findtags() else: tags = self._tagscache.tags for k, v in tags.iteritems(): try: # ignore tags to unknown nodes self.changelog.rev(v) t[k] = v except (error.LookupError, ValueError): pass return t def _findtags(self): '''Do the hard work of finding tags. Return a pair of dicts (tags, tagtypes) where tags maps tag name to node, and tagtypes maps tag name to a string like \'global\' or \'local\'. Subclasses or extensions are free to add their own tags, but should be aware that the returned dicts will be retained for the duration of the localrepo object.''' # XXX what tagtype should subclasses/extensions use? Currently # mq and bookmarks add tags, but do not set the tagtype at all. # Should each extension invent its own tag type? Should there # be one tagtype for all such "virtual" tags? Or is the status # quo fine? alltags = {} # map tag name to (node, hist) tagtypes = {} tagsmod.findglobaltags(self.ui, self, alltags, tagtypes) tagsmod.readlocaltags(self.ui, self, alltags, tagtypes) # Build the return dicts. Have to re-encode tag names because # the tags module always uses UTF-8 (in order not to lose info # writing to the cache), but the rest of Mercurial wants them in # local encoding. tags = {} for (name, (node, hist)) in alltags.iteritems(): if node != nullid: tags[encoding.tolocal(name)] = node tags['tip'] = self.changelog.tip() tagtypes = dict([(encoding.tolocal(name), value) for (name, value) in tagtypes.iteritems()]) return (tags, tagtypes) def tagtype(self, tagname): ''' return the type of the given tag. result can be: 'local' : a local tag 'global' : a global tag None : tag does not exist ''' return self._tagscache.tagtypes.get(tagname) def tagslist(self): '''return a list of tags ordered by revision''' if not self._tagscache.tagslist: l = [] for t, n in self.tags().iteritems(): l.append((self.changelog.rev(n), t, n)) self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)] return self._tagscache.tagslist def nodetags(self, node): '''return the tags associated with a node''' if not self._tagscache.nodetagscache: nodetagscache = {} for t, n in self._tagscache.tags.iteritems(): nodetagscache.setdefault(n, []).append(t) for tags in nodetagscache.itervalues(): tags.sort() self._tagscache.nodetagscache = nodetagscache return self._tagscache.nodetagscache.get(node, []) def nodebookmarks(self, node): """return the list of bookmarks pointing to the specified node""" marks = [] for bookmark, n in self._bookmarks.iteritems(): if n == node: marks.append(bookmark) return sorted(marks) def branchmap(self): '''returns a dictionary {branch: [branchheads]} with branchheads ordered by increasing revision number''' branchmap.updatecache(self) return self._branchcaches[self.filtername] @unfilteredmethod def revbranchcache(self): if not self._revbranchcache: self._revbranchcache = branchmap.revbranchcache(self.unfiltered()) return self._revbranchcache def branchtip(self, branch, ignoremissing=False): '''return the tip node for a given branch If ignoremissing is True, then this method will not raise an error. This is helpful for callers that only expect None for a missing branch (e.g. namespace). ''' try: return self.branchmap().branchtip(branch) except KeyError: if not ignoremissing: raise error.RepoLookupError(_("unknown branch '%s'") % branch) else: pass def lookup(self, key): return self[key].node() def lookupbranch(self, key, remote=None): repo = remote or self if key in repo.branchmap(): return key repo = (remote and remote.local()) and remote or self return repo[key].branch() def known(self, nodes): cl = self.changelog nm = cl.nodemap filtered = cl.filteredrevs result = [] for n in nodes: r = nm.get(n) resp = not (r is None or r in filtered) result.append(resp) return result def local(self): return self def publishing(self): # it's safe (and desirable) to trust the publish flag unconditionally # so that we don't finalize changes shared between users via ssh or nfs return self.ui.configbool('phases', 'publish', True, untrusted=True) def cancopy(self): # so statichttprepo's override of local() works if not self.local(): return False if not self.publishing(): return True # if publishing we can't copy if there is filtered content return not self.filtered('visible').changelog.filteredrevs def shared(self): '''the type of shared repository (None if not shared)''' if self.sharedpath != self.path: return 'store' return None def join(self, f, *insidef): return self.vfs.join(os.path.join(f, *insidef)) def wjoin(self, f, *insidef): return self.vfs.reljoin(self.root, f, *insidef) def file(self, f): if f[0] == '/': f = f[1:] return filelog.filelog(self.svfs, f) def parents(self, changeid=None): '''get list of changectxs for parents of changeid''' msg = 'repo.parents() is deprecated, use repo[%r].parents()' % changeid self.ui.deprecwarn(msg, '3.7') return self[changeid].parents() def changectx(self, changeid): return self[changeid] def setparents(self, p1, p2=nullid): self.dirstate.beginparentchange() copies = self.dirstate.setparents(p1, p2) pctx = self[p1] if copies: # Adjust copy records, the dirstate cannot do it, it # requires access to parents manifests. Preserve them # only for entries added to first parent. for f in copies: if f not in pctx and copies[f] in pctx: self.dirstate.copy(copies[f], f) if p2 == nullid: for f, s in sorted(self.dirstate.copies().items()): if f not in pctx and s not in pctx: self.dirstate.copy(None, f) self.dirstate.endparentchange() def filectx(self, path, changeid=None, fileid=None): """changeid can be a changeset revision, node, or tag. fileid can be a file revision or node.""" return context.filectx(self, path, changeid, fileid) def getcwd(self): return self.dirstate.getcwd() def pathto(self, f, cwd=None): return self.dirstate.pathto(f, cwd) def wfile(self, f, mode='r'): return self.wvfs(f, mode) def _link(self, f): return self.wvfs.islink(f) def _loadfilter(self, filter): if filter not in self.filterpats: l = [] for pat, cmd in self.ui.configitems(filter): if cmd == '!': continue mf = matchmod.match(self.root, '', [pat]) fn = None params = cmd for name, filterfn in self._datafilters.iteritems(): if cmd.startswith(name): fn = filterfn params = cmd[len(name):].lstrip() break if not fn: fn = lambda s, c, **kwargs: util.filter(s, c) # Wrap old filters not supporting keyword arguments if not inspect.getargspec(fn)[2]: oldfn = fn fn = lambda s, c, **kwargs: oldfn(s, c) l.append((mf, fn, params)) self.filterpats[filter] = l return self.filterpats[filter] def _filter(self, filterpats, filename, data): for mf, fn, cmd in filterpats: if mf(filename): self.ui.debug("filtering %s through %s\n" % (filename, cmd)) data = fn(data, cmd, ui=self.ui, repo=self, filename=filename) break return data @unfilteredpropertycache def _encodefilterpats(self): return self._loadfilter('encode') @unfilteredpropertycache def _decodefilterpats(self): return self._loadfilter('decode') def adddatafilter(self, name, filter): self._datafilters[name] = filter def wread(self, filename): if self._link(filename): data = self.wvfs.readlink(filename) else: data = self.wvfs.read(filename) return self._filter(self._encodefilterpats, filename, data) def wwrite(self, filename, data, flags): """write ``data`` into ``filename`` in the working directory This returns length of written (maybe decoded) data. """ data = self._filter(self._decodefilterpats, filename, data) if 'l' in flags: self.wvfs.symlink(data, filename) else: self.wvfs.write(filename, data) if 'x' in flags: self.wvfs.setflags(filename, False, True) return len(data) def wwritedata(self, filename, data): return self._filter(self._decodefilterpats, filename, data) def currenttransaction(self): """return the current transaction or None if non exists""" if self._transref: tr = self._transref() else: tr = None if tr and tr.running(): return tr return None def transaction(self, desc, report=None): if (self.ui.configbool('devel', 'all-warnings') or self.ui.configbool('devel', 'check-locks')): l = self._lockref and self._lockref() if l is None or not l.held: self.ui.develwarn('transaction with no lock') tr = self.currenttransaction() if tr is not None: return tr.nest() # abort here if the journal already exists if self.svfs.exists("journal"): raise error.RepoError( _("abandoned transaction found"), hint=_("run 'hg recover' to clean up transaction")) # make journal.dirstate contain in-memory changes at this point self.dirstate.write(None) idbase = "%.40f#%f" % (random.random(), time.time()) txnid = 'TXN:' + util.sha1(idbase).hexdigest() self.hook('pretxnopen', throw=True, txnname=desc, txnid=txnid) self._writejournal(desc) renames = [(vfs, x, undoname(x)) for vfs, x in self._journalfiles()] if report: rp = report else: rp = self.ui.warn vfsmap = {'plain': self.vfs} # root of .hg/ # we must avoid cyclic reference between repo and transaction. reporef = weakref.ref(self) def validate(tr): """will run pre-closing hooks""" reporef().hook('pretxnclose', throw=True, txnname=desc, **tr.hookargs) def releasefn(tr, success): repo = reporef() if success: # this should be explicitly invoked here, because # in-memory changes aren't written out at closing # transaction, if tr.addfilegenerator (via # dirstate.write or so) isn't invoked while # transaction running repo.dirstate.write(None) else: # prevent in-memory changes from being written out at # the end of outer wlock scope or so repo.dirstate.invalidate() # discard all changes (including ones already written # out) in this transaction repo.vfs.rename('journal.dirstate', 'dirstate') repo.invalidate(clearfilecache=True) tr = transaction.transaction(rp, self.svfs, vfsmap, "journal", "undo", aftertrans(renames), self.store.createmode, validator=validate, releasefn=releasefn) tr.hookargs['txnid'] = txnid # note: writing the fncache only during finalize mean that the file is # outdated when running hooks. As fncache is used for streaming clone, # this is not expected to break anything that happen during the hooks. tr.addfinalize('flush-fncache', self.store.write) def txnclosehook(tr2): """To be run if transaction is successful, will schedule a hook run """ # Don't reference tr2 in hook() so we don't hold a reference. # This reduces memory consumption when there are multiple # transactions per lock. This can likely go away if issue5045 # fixes the function accumulation. hookargs = tr2.hookargs def hook(): reporef().hook('txnclose', throw=False, txnname=desc, **hookargs) reporef()._afterlock(hook) tr.addfinalize('txnclose-hook', txnclosehook) def txnaborthook(tr2): """To be run if transaction is aborted """ reporef().hook('txnabort', throw=False, txnname=desc, **tr2.hookargs) tr.addabort('txnabort-hook', txnaborthook) # avoid eager cache invalidation. in-memory data should be identical # to stored data if transaction has no error. tr.addpostclose('refresh-filecachestats', self._refreshfilecachestats) self._transref = weakref.ref(tr) return tr def _journalfiles(self): return ((self.svfs, 'journal'), (self.vfs, 'journal.dirstate'), (self.vfs, 'journal.branch'), (self.vfs, 'journal.desc'), (self.vfs, 'journal.bookmarks'), (self.svfs, 'journal.phaseroots')) def undofiles(self): return [(vfs, undoname(x)) for vfs, x in self._journalfiles()] def _writejournal(self, desc): self.vfs.write("journal.dirstate", self.vfs.tryread("dirstate")) self.vfs.write("journal.branch", encoding.fromlocal(self.dirstate.branch())) self.vfs.write("journal.desc", "%d\n%s\n" % (len(self), desc)) self.vfs.write("journal.bookmarks", self.vfs.tryread("bookmarks")) self.svfs.write("journal.phaseroots", self.svfs.tryread("phaseroots")) def recover(self): with self.lock(): if self.svfs.exists("journal"): self.ui.status(_("rolling back interrupted transaction\n")) vfsmap = {'': self.svfs, 'plain': self.vfs,} transaction.rollback(self.svfs, vfsmap, "journal", self.ui.warn) self.invalidate() return True else: self.ui.warn(_("no interrupted transaction available\n")) return False def rollback(self, dryrun=False, force=False): wlock = lock = dsguard = None try: wlock = self.wlock() lock = self.lock() if self.svfs.exists("undo"): dsguard = cmdutil.dirstateguard(self, 'rollback') return self._rollback(dryrun, force, dsguard) else: self.ui.warn(_("no rollback information available\n")) return 1 finally: release(dsguard, lock, wlock) @unfilteredmethod # Until we get smarter cache management def _rollback(self, dryrun, force, dsguard): ui = self.ui try: args = self.vfs.read('undo.desc').splitlines() (oldlen, desc, detail) = (int(args[0]), args[1], None) if len(args) >= 3: detail = args[2] oldtip = oldlen - 1 if detail and ui.verbose: msg = (_('repository tip rolled back to revision %s' ' (undo %s: %s)\n') % (oldtip, desc, detail)) else: msg = (_('repository tip rolled back to revision %s' ' (undo %s)\n') % (oldtip, desc)) except IOError: msg = _('rolling back unknown transaction\n') desc = None if not force and self['.'] != self['tip'] and desc == 'commit': raise error.Abort( _('rollback of last commit while not checked out ' 'may lose data'), hint=_('use -f to force')) ui.status(msg) if dryrun: return 0 parents = self.dirstate.parents() self.destroying() vfsmap = {'plain': self.vfs, '': self.svfs} transaction.rollback(self.svfs, vfsmap, 'undo', ui.warn) if self.vfs.exists('undo.bookmarks'): self.vfs.rename('undo.bookmarks', 'bookmarks') if self.svfs.exists('undo.phaseroots'): self.svfs.rename('undo.phaseroots', 'phaseroots') self.invalidate() parentgone = (parents[0] not in self.changelog.nodemap or parents[1] not in self.changelog.nodemap) if parentgone: # prevent dirstateguard from overwriting already restored one dsguard.close() self.vfs.rename('undo.dirstate', 'dirstate') try: branch = self.vfs.read('undo.branch') self.dirstate.setbranch(encoding.tolocal(branch)) except IOError: ui.warn(_('named branch could not be reset: ' 'current branch is still \'%s\'\n') % self.dirstate.branch()) self.dirstate.invalidate() parents = tuple([p.rev() for p in self[None].parents()]) if len(parents) > 1: ui.status(_('working directory now based on ' 'revisions %d and %d\n') % parents) else: ui.status(_('working directory now based on ' 'revision %d\n') % parents) mergemod.mergestate.clean(self, self['.'].node()) # TODO: if we know which new heads may result from this rollback, pass # them to destroy(), which will prevent the branchhead cache from being # invalidated. self.destroyed() return 0 def invalidatecaches(self): if '_tagscache' in vars(self): # can't use delattr on proxy del self.__dict__['_tagscache'] self.unfiltered()._branchcaches.clear() self.invalidatevolatilesets() def invalidatevolatilesets(self): self.filteredrevcache.clear() obsolete.clearobscaches(self) def invalidatedirstate(self): '''Invalidates the dirstate, causing the next call to dirstate to check if it was modified since the last time it was read, rereading it if it has. This is different to dirstate.invalidate() that it doesn't always rereads the dirstate. Use dirstate.invalidate() if you want to explicitly read the dirstate again (i.e. restoring it to a previous known good state).''' if hasunfilteredcache(self, 'dirstate'): for k in self.dirstate._filecache: try: delattr(self.dirstate, k) except AttributeError: pass delattr(self.unfiltered(), 'dirstate') def invalidate(self, clearfilecache=False): unfiltered = self.unfiltered() # all file caches are stored unfiltered for k in self._filecache.keys(): # dirstate is invalidated separately in invalidatedirstate() if k == 'dirstate': continue if clearfilecache: del self._filecache[k] try: delattr(unfiltered, k) except AttributeError: pass self.invalidatecaches() self.store.invalidatecaches() def invalidateall(self): '''Fully invalidates both store and non-store parts, causing the subsequent operation to reread any outside changes.''' # extension should hook this to invalidate its caches self.invalidate() self.invalidatedirstate() def _refreshfilecachestats(self, tr): """Reload stats of cached files so that they are flagged as valid""" for k, ce in self._filecache.items(): if k == 'dirstate' or k not in self.__dict__: continue ce.refresh() def _lock(self, vfs, lockname, wait, releasefn, acquirefn, desc, inheritchecker=None, parentenvvar=None): parentlock = None # the contents of parentenvvar are used by the underlying lock to # determine whether it can be inherited if parentenvvar is not None: parentlock = os.environ.get(parentenvvar) try: l = lockmod.lock(vfs, lockname, 0, releasefn=releasefn, acquirefn=acquirefn, desc=desc, inheritchecker=inheritchecker, parentlock=parentlock) except error.LockHeld as inst: if not wait: raise self.ui.warn(_("waiting for lock on %s held by %r\n") % (desc, inst.locker)) # default to 600 seconds timeout l = lockmod.lock(vfs, lockname, int(self.ui.config("ui", "timeout", "600")), releasefn=releasefn, acquirefn=acquirefn, desc=desc) self.ui.warn(_("got lock after %s seconds\n") % l.delay) return l def _afterlock(self, callback): """add a callback to be run when the repository is fully unlocked The callback will be executed when the outermost lock is released (with wlock being higher level than 'lock').""" for ref in (self._wlockref, self._lockref): l = ref and ref() if l and l.held: l.postrelease.append(callback) break else: # no lock have been found. callback() def lock(self, wait=True): '''Lock the repository store (.hg/store) and return a weak reference to the lock. Use this before modifying the store (e.g. committing or stripping). If you are opening a transaction, get a lock as well.) If both 'lock' and 'wlock' must be acquired, ensure you always acquires 'wlock' first to avoid a dead-lock hazard.''' l = self._lockref and self._lockref() if l is not None and l.held: l.lock() return l l = self._lock(self.svfs, "lock", wait, None, self.invalidate, _('repository %s') % self.origroot) self._lockref = weakref.ref(l) return l def _wlockchecktransaction(self): if self.currenttransaction() is not None: raise error.LockInheritanceContractViolation( 'wlock cannot be inherited in the middle of a transaction') def wlock(self, wait=True): '''Lock the non-store parts of the repository (everything under .hg except .hg/store) and return a weak reference to the lock. Use this before modifying files in .hg. If both 'lock' and 'wlock' must be acquired, ensure you always acquires 'wlock' first to avoid a dead-lock hazard.''' l = self._wlockref and self._wlockref() if l is not None and l.held: l.lock() return l # We do not need to check for non-waiting lock acquisition. Such # acquisition would not cause dead-lock as they would just fail. if wait and (self.ui.configbool('devel', 'all-warnings') or self.ui.configbool('devel', 'check-locks')): l = self._lockref and self._lockref() if l is not None and l.held: self.ui.develwarn('"wlock" acquired after "lock"') def unlock(): if self.dirstate.pendingparentchange(): self.dirstate.invalidate() else: self.dirstate.write(None) self._filecache['dirstate'].refresh() l = self._lock(self.vfs, "wlock", wait, unlock, self.invalidatedirstate, _('working directory of %s') % self.origroot, inheritchecker=self._wlockchecktransaction, parentenvvar='HG_WLOCK_LOCKER') self._wlockref = weakref.ref(l) return l def _currentlock(self, lockref): """Returns the lock if it's held, or None if it's not.""" if lockref is None: return None l = lockref() if l is None or not l.held: return None return l def currentwlock(self): """Returns the wlock if it's held, or None if it's not.""" return self._currentlock(self._wlockref) def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist): """ commit an individual file as part of a larger transaction """ fname = fctx.path() fparent1 = manifest1.get(fname, nullid) fparent2 = manifest2.get(fname, nullid) if isinstance(fctx, context.filectx): node = fctx.filenode() if node in [fparent1, fparent2]: self.ui.debug('reusing %s filelog entry\n' % fname) return node flog = self.file(fname) meta = {} copy = fctx.renamed() if copy and copy[0] != fname: # Mark the new revision of this file as a copy of another # file. This copy data will effectively act as a parent # of this new revision. If this is a merge, the first # parent will be the nullid (meaning "look up the copy data") # and the second one will be the other parent. For example: # # 0 --- 1 --- 3 rev1 changes file foo # \ / rev2 renames foo to bar and changes it # \- 2 -/ rev3 should have bar with all changes and # should record that bar descends from # bar in rev2 and foo in rev1 # # this allows this merge to succeed: # # 0 --- 1 --- 3 rev4 reverts the content change from rev2 # \ / merging rev3 and rev4 should use bar@rev2 # \- 2 --- 4 as the merge base # cfname = copy[0] crev = manifest1.get(cfname) newfparent = fparent2 if manifest2: # branch merge if fparent2 == nullid or crev is None: # copied on remote side if cfname in manifest2: crev = manifest2[cfname] newfparent = fparent1 # Here, we used to search backwards through history to try to find # where the file copy came from if the source of a copy was not in # the parent directory. However, this doesn't actually make sense to # do (what does a copy from something not in your working copy even # mean?) and it causes bugs (eg, issue4476). Instead, we will warn # the user that copy information was dropped, so if they didn't # expect this outcome it can be fixed, but this is the correct # behavior in this circumstance. if crev: self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev))) meta["copy"] = cfname meta["copyrev"] = hex(crev) fparent1, fparent2 = nullid, newfparent else: self.ui.warn(_("warning: can't find ancestor for '%s' " "copied from '%s'!\n") % (fname, cfname)) elif fparent1 == nullid: fparent1, fparent2 = fparent2, nullid elif fparent2 != nullid: # is one parent an ancestor of the other? fparentancestors = flog.commonancestorsheads(fparent1, fparent2) if fparent1 in fparentancestors: fparent1, fparent2 = fparent2, nullid elif fparent2 in fparentancestors: fparent2 = nullid # is the file changed? text = fctx.data() if fparent2 != nullid or flog.cmp(fparent1, text) or meta: changelist.append(fname) return flog.add(text, meta, tr, linkrev, fparent1, fparent2) # are just the flags changed during merge? elif fname in manifest1 and manifest1.flags(fname) != fctx.flags(): changelist.append(fname) return fparent1 @unfilteredmethod def commit(self, text="", user=None, date=None, match=None, force=False, editor=False, extra=None): """Add a new revision to current repository. Revision information is gathered from the working directory, match can be used to filter the committed files. If editor is supplied, it is called to get a commit message. """ if extra is None: extra = {} def fail(f, msg): raise error.Abort('%s: %s' % (f, msg)) if not match: match = matchmod.always(self.root, '') if not force: vdirs = [] match.explicitdir = vdirs.append match.bad = fail wlock = lock = tr = None try: wlock = self.wlock() lock = self.lock() # for recent changelog (see issue4368) wctx = self[None] merge = len(wctx.parents()) > 1 if not force and merge and match.ispartial(): raise error.Abort(_('cannot partially commit a merge ' '(do not specify files or patterns)')) status = self.status(match=match, clean=force) if force: status.modified.extend(status.clean) # mq may commit clean files # check subrepos subs = [] commitsubs = set() newstate = wctx.substate.copy() # only manage subrepos and .hgsubstate if .hgsub is present if '.hgsub' in wctx: # we'll decide whether to track this ourselves, thanks for c in status.modified, status.added, status.removed: if '.hgsubstate' in c: c.remove('.hgsubstate') # compare current state to last committed state # build new substate based on last committed state oldstate = wctx.p1().substate for s in sorted(newstate.keys()): if not match(s): # ignore working copy, use old state if present if s in oldstate: newstate[s] = oldstate[s] continue if not force: raise error.Abort( _("commit with new subrepo %s excluded") % s) dirtyreason = wctx.sub(s).dirtyreason(True) if dirtyreason: if not self.ui.configbool('ui', 'commitsubrepos'): raise error.Abort(dirtyreason, hint=_("use --subrepos for recursive commit")) subs.append(s) commitsubs.add(s) else: bs = wctx.sub(s).basestate() newstate[s] = (newstate[s][0], bs, newstate[s][2]) if oldstate.get(s, (None, None, None))[1] != bs: subs.append(s) # check for removed subrepos for p in wctx.parents(): r = [s for s in p.substate if s not in newstate] subs += [s for s in r if match(s)] if subs: if (not match('.hgsub') and '.hgsub' in (wctx.modified() + wctx.added())): raise error.Abort( _("can't commit subrepos without .hgsub")) status.modified.insert(0, '.hgsubstate') elif '.hgsub' in status.removed: # clean up .hgsubstate when .hgsub is removed if ('.hgsubstate' in wctx and '.hgsubstate' not in (status.modified + status.added + status.removed)): status.removed.insert(0, '.hgsubstate') # make sure all explicit patterns are matched if not force and (match.isexact() or match.prefix()): matched = set(status.modified + status.added + status.removed) for f in match.files(): f = self.dirstate.normalize(f) if f == '.' or f in matched or f in wctx.substate: continue if f in status.deleted: fail(f, _('file not found!')) if f in vdirs: # visited directory d = f + '/' for mf in matched: if mf.startswith(d): break else: fail(f, _("no match under directory!")) elif f not in self.dirstate: fail(f, _("file not tracked!")) cctx = context.workingcommitctx(self, status, text, user, date, extra) # internal config: ui.allowemptycommit allowemptycommit = (wctx.branch() != wctx.p1().branch() or extra.get('close') or merge or cctx.files() or self.ui.configbool('ui', 'allowemptycommit')) if not allowemptycommit: return None if merge and cctx.deleted(): raise error.Abort(_("cannot commit merge with missing files")) ms = mergemod.mergestate.read(self) if list(ms.unresolved()): raise error.Abort(_('unresolved merge conflicts ' '(see "hg help resolve")')) if ms.mdstate() != 's' or list(ms.driverresolved()): raise error.Abort(_('driver-resolved merge conflicts'), hint=_('run "hg resolve --all" to resolve')) if editor: cctx._text = editor(self, cctx, subs) edited = (text != cctx._text) # Save commit message in case this transaction gets rolled back # (e.g. by a pretxncommit hook). Leave the content alone on # the assumption that the user will use the same editor again. msgfn = self.savecommitmessage(cctx._text) # commit subs and write new state if subs: for s in sorted(commitsubs): sub = wctx.sub(s) self.ui.status(_('committing subrepository %s\n') % subrepo.subrelpath(sub)) sr = sub.commit(cctx._text, user, date) newstate[s] = (newstate[s][0], sr) subrepo.writestate(self, newstate) p1, p2 = self.dirstate.parents() hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '') try: self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2) tr = self.transaction('commit') ret = self.commitctx(cctx, True) except: # re-raises if edited: self.ui.write( _('note: commit message saved in %s\n') % msgfn) raise # update bookmarks, dirstate and mergestate bookmarks.update(self, [p1, p2], ret) cctx.markcommitted(ret) ms.reset() tr.close() finally: lockmod.release(tr, lock, wlock) def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2): # hack for command that use a temporary commit (eg: histedit) # temporary commit got stripped before hook release if self.changelog.hasnode(ret): self.hook("commit", node=node, parent1=parent1, parent2=parent2) self._afterlock(commithook) return ret @unfilteredmethod def commitctx(self, ctx, error=False): """Add a new revision to current repository. Revision information is passed via the context argument. """ tr = None p1, p2 = ctx.p1(), ctx.p2() user = ctx.user() lock = self.lock() try: tr = self.transaction("commit") trp = weakref.proxy(tr) if ctx.files(): m1 = p1.manifest() m2 = p2.manifest() m = m1.copy() # check in files added = [] changed = [] removed = list(ctx.removed()) linkrev = len(self) self.ui.note(_("committing files:\n")) for f in sorted(ctx.modified() + ctx.added()): self.ui.note(f + "\n") try: fctx = ctx[f] if fctx is None: removed.append(f) else: added.append(f) m[f] = self._filecommit(fctx, m1, m2, linkrev, trp, changed) m.setflag(f, fctx.flags()) except OSError as inst: self.ui.warn(_("trouble committing %s!\n") % f) raise except IOError as inst: errcode = getattr(inst, 'errno', errno.ENOENT) if error or errcode and errcode != errno.ENOENT: self.ui.warn(_("trouble committing %s!\n") % f) raise # update manifest self.ui.note(_("committing manifest\n")) removed = [f for f in sorted(removed) if f in m1 or f in m2] drop = [f for f in removed if f in m] for f in drop: del m[f] mn = self.manifest.add(m, trp, linkrev, p1.manifestnode(), p2.manifestnode(), added, drop) files = changed + removed else: mn = p1.manifestnode() files = [] # update changelog self.ui.note(_("committing changelog\n")) self.changelog.delayupdate(tr) n = self.changelog.add(mn, files, ctx.description(), trp, p1.node(), p2.node(), user, ctx.date(), ctx.extra().copy()) xp1, xp2 = p1.hex(), p2 and p2.hex() or '' self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1, parent2=xp2) # set the new commit is proper phase targetphase = subrepo.newcommitphase(self.ui, ctx) if targetphase: # retract boundary do not alter parent changeset. # if a parent have higher the resulting phase will # be compliant anyway # # if minimal phase was 0 we don't need to retract anything phases.retractboundary(self, tr, targetphase, [n]) tr.close() branchmap.updatecache(self.filtered('served')) return n finally: if tr: tr.release() lock.release() @unfilteredmethod def destroying(self): '''Inform the repository that nodes are about to be destroyed. Intended for use by strip and rollback, so there's a common place for anything that has to be done before destroying history. This is mostly useful for saving state that is in memory and waiting to be flushed when the current lock is released. Because a call to destroyed is imminent, the repo will be invalidated causing those changes to stay in memory (waiting for the next unlock), or vanish completely. ''' # When using the same lock to commit and strip, the phasecache is left # dirty after committing. Then when we strip, the repo is invalidated, # causing those changes to disappear. if '_phasecache' in vars(self): self._phasecache.write() @unfilteredmethod def destroyed(self): '''Inform the repository that nodes have been destroyed. Intended for use by strip and rollback, so there's a common place for anything that has to be done after destroying history. ''' # When one tries to: # 1) destroy nodes thus calling this method (e.g. strip) # 2) use phasecache somewhere (e.g. commit) # # then 2) will fail because the phasecache contains nodes that were # removed. We can either remove phasecache from the filecache, # causing it to reload next time it is accessed, or simply filter # the removed nodes now and write the updated cache. self._phasecache.filterunknown(self) self._phasecache.write() # update the 'served' branch cache to help read only server process # Thanks to branchcache collaboration this is done from the nearest # filtered subset and it is expected to be fast. branchmap.updatecache(self.filtered('served')) # Ensure the persistent tag cache is updated. Doing it now # means that the tag cache only has to worry about destroyed # heads immediately after a strip/rollback. That in turn # guarantees that "cachetip == currenttip" (comparing both rev # and node) always means no nodes have been added or destroyed. # XXX this is suboptimal when qrefresh'ing: we strip the current # head, refresh the tag cache, then immediately add a new head. # But I think doing it this way is necessary for the "instant # tag cache retrieval" case to work. self.invalidate() def walk(self, match, node=None): ''' walk recursively through the directory tree or a given changeset, finding all files matched by the match function ''' return self[node].walk(match) def status(self, node1='.', node2=None, match=None, ignored=False, clean=False, unknown=False, listsubrepos=False): '''a convenience method that calls node1.status(node2)''' return self[node1].status(node2, match, ignored, clean, unknown, listsubrepos) def heads(self, start=None): heads = self.changelog.heads(start) # sort the output in rev descending order return sorted(heads, key=self.changelog.rev, reverse=True) def branchheads(self, branch=None, start=None, closed=False): '''return a (possibly filtered) list of heads for the given branch Heads are returned in topological order, from newest to oldest. If branch is None, use the dirstate branch. If start is not None, return only heads reachable from start. If closed is True, return heads that are marked as closed as well. ''' if branch is None: branch = self[None].branch() branches = self.branchmap() if branch not in branches: return [] # the cache returns heads ordered lowest to highest bheads = list(reversed(branches.branchheads(branch, closed=closed))) if start is not None: # filter out the heads that cannot be reached from startrev fbheads = set(self.changelog.nodesbetween([start], bheads)[2]) bheads = [h for h in bheads if h in fbheads] return bheads def branches(self, nodes): if not nodes: nodes = [self.changelog.tip()] b = [] for n in nodes: t = n while True: p = self.changelog.parents(n) if p[1] != nullid or p[0] == nullid: b.append((t, n, p[0], p[1])) break n = p[0] return b def between(self, pairs): r = [] for top, bottom in pairs: n, l, i = top, [], 0 f = 1 while n != bottom and n != nullid: p = self.changelog.parents(n)[0] if i == f: l.append(n) f = f * 2 n = p i += 1 r.append(l) return r def checkpush(self, pushop): """Extensions can override this function if additional checks have to be performed before pushing, or call it if they override push command. """ pass @unfilteredpropertycache def prepushoutgoinghooks(self): """Return util.hooks consists of "(repo, remote, outgoing)" functions, which are called before pushing changesets. """ return util.hooks() def pushkey(self, namespace, key, old, new): try: tr = self.currenttransaction() hookargs = {} if tr is not None: hookargs.update(tr.hookargs) hookargs['namespace'] = namespace hookargs['key'] = key hookargs['old'] = old hookargs['new'] = new self.hook('prepushkey', throw=True, **hookargs) except error.HookAbort as exc: self.ui.write_err(_("pushkey-abort: %s\n") % exc) if exc.hint: self.ui.write_err(_("(%s)\n") % exc.hint) return False self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key)) ret = pushkey.push(self, namespace, key, old, new) def runhook(): self.hook('pushkey', namespace=namespace, key=key, old=old, new=new, ret=ret) self._afterlock(runhook) return ret def listkeys(self, namespace): self.hook('prelistkeys', throw=True, namespace=namespace) self.ui.debug('listing keys for "%s"\n' % namespace) values = pushkey.list(self, namespace) self.hook('listkeys', namespace=namespace, values=values) return values def debugwireargs(self, one, two, three=None, four=None, five=None): '''used to test argument passing over the wire''' return "%s %s %s %s %s" % (one, two, three, four, five) def savecommitmessage(self, text): fp = self.vfs('last-message.txt', 'wb') try: fp.write(text) finally: fp.close() return self.pathto(fp.name[len(self.root) + 1:]) # used to avoid circular references so destructors work def aftertrans(files): renamefiles = [tuple(t) for t in files] def a(): for vfs, src, dest in renamefiles: try: vfs.rename(src, dest) except OSError: # journal file does not yet exist pass return a def undoname(fn): base, name = os.path.split(fn) assert name.startswith('journal') return os.path.join(base, name.replace('journal', 'undo', 1)) def instance(ui, path, create): return localrepository(ui, util.urllocalpath(path), create) def islocal(path): return True mercurial-3.7.3/mercurial/ui.py0000644000175000017500000013124412676531525016106 0ustar mpmmpm00000000000000# ui.py - user interface bits for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import getpass import inspect import os import re import socket import sys import tempfile import traceback from .i18n import _ from .node import hex from . import ( config, error, formatter, progress, scmutil, util, ) samplehgrcs = { 'user': """# example user config (see "hg help config" for more info) [ui] # name and email, e.g. # username = Jane Doe username = [extensions] # uncomment these lines to enable some popular extensions # (see "hg help extensions" for more info) # # pager = # progress = # color =""", 'cloned': """# example repository config (see "hg help config" for more info) [paths] default = %s # path aliases to other clones of this repo in URLs or filesystem paths # (see "hg help config.paths" for more info) # # default-push = ssh://jdoe@example.net/hg/jdoes-fork # my-fork = ssh://jdoe@example.net/hg/jdoes-fork # my-clone = /home/jdoe/jdoes-clone [ui] # name and email (local to this repository, optional), e.g. # username = Jane Doe """, 'local': """# example repository config (see "hg help config" for more info) [paths] # path aliases to other clones of this repo in URLs or filesystem paths # (see "hg help config.paths" for more info) # # default = http://example.com/hg/example-repo # default-push = ssh://jdoe@example.net/hg/jdoes-fork # my-fork = ssh://jdoe@example.net/hg/jdoes-fork # my-clone = /home/jdoe/jdoes-clone [ui] # name and email (local to this repository, optional), e.g. # username = Jane Doe """, 'global': """# example system-wide hg config (see "hg help config" for more info) [extensions] # uncomment these lines to enable some popular extensions # (see "hg help extensions" for more info) # # blackbox = # progress = # color = # pager =""", } class ui(object): def __init__(self, src=None): # _buffers: used for temporary capture of output self._buffers = [] # 3-tuple describing how each buffer in the stack behaves. # Values are (capture stderr, capture subprocesses, apply labels). self._bufferstates = [] # When a buffer is active, defines whether we are expanding labels. # This exists to prevent an extra list lookup. self._bufferapplylabels = None self.quiet = self.verbose = self.debugflag = self.tracebackflag = False self._reportuntrusted = True self._ocfg = config.config() # overlay self._tcfg = config.config() # trusted self._ucfg = config.config() # untrusted self._trustusers = set() self._trustgroups = set() self.callhooks = True if src: self.fout = src.fout self.ferr = src.ferr self.fin = src.fin self._tcfg = src._tcfg.copy() self._ucfg = src._ucfg.copy() self._ocfg = src._ocfg.copy() self._trustusers = src._trustusers.copy() self._trustgroups = src._trustgroups.copy() self.environ = src.environ self.callhooks = src.callhooks self.fixconfig() else: self.fout = sys.stdout self.ferr = sys.stderr self.fin = sys.stdin # shared read-only environment self.environ = os.environ # we always trust global config files for f in scmutil.rcpath(): self.readconfig(f, trust=True) def copy(self): return self.__class__(self) def formatter(self, topic, opts): return formatter.formatter(self, topic, opts) def _trusted(self, fp, f): st = util.fstat(fp) if util.isowner(st): return True tusers, tgroups = self._trustusers, self._trustgroups if '*' in tusers or '*' in tgroups: return True user = util.username(st.st_uid) group = util.groupname(st.st_gid) if user in tusers or group in tgroups or user == util.username(): return True if self._reportuntrusted: self.warn(_('not trusting file %s from untrusted ' 'user %s, group %s\n') % (f, user, group)) return False def readconfig(self, filename, root=None, trust=False, sections=None, remap=None): try: fp = open(filename) except IOError: if not sections: # ignore unless we were looking for something return raise cfg = config.config() trusted = sections or trust or self._trusted(fp, filename) try: cfg.read(filename, fp, sections=sections, remap=remap) fp.close() except error.ConfigError as inst: if trusted: raise self.warn(_("ignored: %s\n") % str(inst)) if self.plain(): for k in ('debug', 'fallbackencoding', 'quiet', 'slash', 'logtemplate', 'statuscopies', 'style', 'traceback', 'verbose'): if k in cfg['ui']: del cfg['ui'][k] for k, v in cfg.items('defaults'): del cfg['defaults'][k] # Don't remove aliases from the configuration if in the exceptionlist if self.plain('alias'): for k, v in cfg.items('alias'): del cfg['alias'][k] if self.plain('revsetalias'): for k, v in cfg.items('revsetalias'): del cfg['revsetalias'][k] if trusted: self._tcfg.update(cfg) self._tcfg.update(self._ocfg) self._ucfg.update(cfg) self._ucfg.update(self._ocfg) if root is None: root = os.path.expanduser('~') self.fixconfig(root=root) def fixconfig(self, root=None, section=None): if section in (None, 'paths'): # expand vars and ~ # translate paths relative to root (or home) into absolute paths root = root or os.getcwd() for c in self._tcfg, self._ucfg, self._ocfg: for n, p in c.items('paths'): if not p: continue if '%%' in p: self.warn(_("(deprecated '%%' in path %s=%s from %s)\n") % (n, p, self.configsource('paths', n))) p = p.replace('%%', '%') p = util.expandpath(p) if not util.hasscheme(p) and not os.path.isabs(p): p = os.path.normpath(os.path.join(root, p)) c.set("paths", n, p) if section in (None, 'ui'): # update ui options self.debugflag = self.configbool('ui', 'debug') self.verbose = self.debugflag or self.configbool('ui', 'verbose') self.quiet = not self.debugflag and self.configbool('ui', 'quiet') if self.verbose and self.quiet: self.quiet = self.verbose = False self._reportuntrusted = self.debugflag or self.configbool("ui", "report_untrusted", True) self.tracebackflag = self.configbool('ui', 'traceback', False) if section in (None, 'trusted'): # update trust information self._trustusers.update(self.configlist('trusted', 'users')) self._trustgroups.update(self.configlist('trusted', 'groups')) def backupconfig(self, section, item): return (self._ocfg.backup(section, item), self._tcfg.backup(section, item), self._ucfg.backup(section, item),) def restoreconfig(self, data): self._ocfg.restore(data[0]) self._tcfg.restore(data[1]) self._ucfg.restore(data[2]) def setconfig(self, section, name, value, source=''): for cfg in (self._ocfg, self._tcfg, self._ucfg): cfg.set(section, name, value, source) self.fixconfig(section=section) def _data(self, untrusted): return untrusted and self._ucfg or self._tcfg def configsource(self, section, name, untrusted=False): return self._data(untrusted).source(section, name) or 'none' def config(self, section, name, default=None, untrusted=False): if isinstance(name, list): alternates = name else: alternates = [name] for n in alternates: value = self._data(untrusted).get(section, n, None) if value is not None: name = n break else: value = default if self.debugflag and not untrusted and self._reportuntrusted: for n in alternates: uvalue = self._ucfg.get(section, n) if uvalue is not None and uvalue != value: self.debug("ignoring untrusted configuration option " "%s.%s = %s\n" % (section, n, uvalue)) return value def configsuboptions(self, section, name, default=None, untrusted=False): """Get a config option and all sub-options. Some config options have sub-options that are declared with the format "key:opt = value". This method is used to return the main option and all its declared sub-options. Returns a 2-tuple of ``(option, sub-options)``, where `sub-options`` is a dict of defined sub-options where keys and values are strings. """ data = self._data(untrusted) main = data.get(section, name, default) if self.debugflag and not untrusted and self._reportuntrusted: uvalue = self._ucfg.get(section, name) if uvalue is not None and uvalue != main: self.debug('ignoring untrusted configuration option ' '%s.%s = %s\n' % (section, name, uvalue)) sub = {} prefix = '%s:' % name for k, v in data.items(section): if k.startswith(prefix): sub[k[len(prefix):]] = v if self.debugflag and not untrusted and self._reportuntrusted: for k, v in sub.items(): uvalue = self._ucfg.get(section, '%s:%s' % (name, k)) if uvalue is not None and uvalue != v: self.debug('ignoring untrusted configuration option ' '%s:%s.%s = %s\n' % (section, name, k, uvalue)) return main, sub def configpath(self, section, name, default=None, untrusted=False): 'get a path config item, expanded relative to repo root or config file' v = self.config(section, name, default, untrusted) if v is None: return None if not os.path.isabs(v) or "://" not in v: src = self.configsource(section, name, untrusted) if ':' in src: base = os.path.dirname(src.rsplit(':')[0]) v = os.path.join(base, os.path.expanduser(v)) return v def configbool(self, section, name, default=False, untrusted=False): """parse a configuration element as a boolean >>> u = ui(); s = 'foo' >>> u.setconfig(s, 'true', 'yes') >>> u.configbool(s, 'true') True >>> u.setconfig(s, 'false', 'no') >>> u.configbool(s, 'false') False >>> u.configbool(s, 'unknown') False >>> u.configbool(s, 'unknown', True) True >>> u.setconfig(s, 'invalid', 'somevalue') >>> u.configbool(s, 'invalid') Traceback (most recent call last): ... ConfigError: foo.invalid is not a boolean ('somevalue') """ v = self.config(section, name, None, untrusted) if v is None: return default if isinstance(v, bool): return v b = util.parsebool(v) if b is None: raise error.ConfigError(_("%s.%s is not a boolean ('%s')") % (section, name, v)) return b def configint(self, section, name, default=None, untrusted=False): """parse a configuration element as an integer >>> u = ui(); s = 'foo' >>> u.setconfig(s, 'int1', '42') >>> u.configint(s, 'int1') 42 >>> u.setconfig(s, 'int2', '-42') >>> u.configint(s, 'int2') -42 >>> u.configint(s, 'unknown', 7) 7 >>> u.setconfig(s, 'invalid', 'somevalue') >>> u.configint(s, 'invalid') Traceback (most recent call last): ... ConfigError: foo.invalid is not an integer ('somevalue') """ v = self.config(section, name, None, untrusted) if v is None: return default try: return int(v) except ValueError: raise error.ConfigError(_("%s.%s is not an integer ('%s')") % (section, name, v)) def configbytes(self, section, name, default=0, untrusted=False): """parse a configuration element as a quantity in bytes Units can be specified as b (bytes), k or kb (kilobytes), m or mb (megabytes), g or gb (gigabytes). >>> u = ui(); s = 'foo' >>> u.setconfig(s, 'val1', '42') >>> u.configbytes(s, 'val1') 42 >>> u.setconfig(s, 'val2', '42.5 kb') >>> u.configbytes(s, 'val2') 43520 >>> u.configbytes(s, 'unknown', '7 MB') 7340032 >>> u.setconfig(s, 'invalid', 'somevalue') >>> u.configbytes(s, 'invalid') Traceback (most recent call last): ... ConfigError: foo.invalid is not a byte quantity ('somevalue') """ value = self.config(section, name) if value is None: if not isinstance(default, str): return default value = default try: return util.sizetoint(value) except error.ParseError: raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')") % (section, name, value)) def configlist(self, section, name, default=None, untrusted=False): """parse a configuration element as a list of comma/space separated strings >>> u = ui(); s = 'foo' >>> u.setconfig(s, 'list1', 'this,is "a small" ,test') >>> u.configlist(s, 'list1') ['this', 'is', 'a small', 'test'] """ def _parse_plain(parts, s, offset): whitespace = False while offset < len(s) and (s[offset].isspace() or s[offset] == ','): whitespace = True offset += 1 if offset >= len(s): return None, parts, offset if whitespace: parts.append('') if s[offset] == '"' and not parts[-1]: return _parse_quote, parts, offset + 1 elif s[offset] == '"' and parts[-1][-1] == '\\': parts[-1] = parts[-1][:-1] + s[offset] return _parse_plain, parts, offset + 1 parts[-1] += s[offset] return _parse_plain, parts, offset + 1 def _parse_quote(parts, s, offset): if offset < len(s) and s[offset] == '"': # "" parts.append('') offset += 1 while offset < len(s) and (s[offset].isspace() or s[offset] == ','): offset += 1 return _parse_plain, parts, offset while offset < len(s) and s[offset] != '"': if (s[offset] == '\\' and offset + 1 < len(s) and s[offset + 1] == '"'): offset += 1 parts[-1] += '"' else: parts[-1] += s[offset] offset += 1 if offset >= len(s): real_parts = _configlist(parts[-1]) if not real_parts: parts[-1] = '"' else: real_parts[0] = '"' + real_parts[0] parts = parts[:-1] parts.extend(real_parts) return None, parts, offset offset += 1 while offset < len(s) and s[offset] in [' ', ',']: offset += 1 if offset < len(s): if offset + 1 == len(s) and s[offset] == '"': parts[-1] += '"' offset += 1 else: parts.append('') else: return None, parts, offset return _parse_plain, parts, offset def _configlist(s): s = s.rstrip(' ,') if not s: return [] parser, parts, offset = _parse_plain, [''], 0 while parser: parser, parts, offset = parser(parts, s, offset) return parts result = self.config(section, name, untrusted=untrusted) if result is None: result = default or [] if isinstance(result, basestring): result = _configlist(result.lstrip(' ,\n')) if result is None: result = default or [] return result def hasconfig(self, section, name, untrusted=False): return self._data(untrusted).hasitem(section, name) def has_section(self, section, untrusted=False): '''tell whether section exists in config.''' return section in self._data(untrusted) def configitems(self, section, untrusted=False, ignoresub=False): items = self._data(untrusted).items(section) if ignoresub: newitems = {} for k, v in items: if ':' not in k: newitems[k] = v items = newitems.items() if self.debugflag and not untrusted and self._reportuntrusted: for k, v in self._ucfg.items(section): if self._tcfg.get(section, k) != v: self.debug("ignoring untrusted configuration option " "%s.%s = %s\n" % (section, k, v)) return items def walkconfig(self, untrusted=False): cfg = self._data(untrusted) for section in cfg.sections(): for name, value in self.configitems(section, untrusted): yield section, name, value def plain(self, feature=None): '''is plain mode active? Plain mode means that all configuration variables which affect the behavior and output of Mercurial should be ignored. Additionally, the output should be stable, reproducible and suitable for use in scripts or applications. The only way to trigger plain mode is by setting either the `HGPLAIN' or `HGPLAINEXCEPT' environment variables. The return value can either be - False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT - True otherwise ''' if 'HGPLAIN' not in os.environ and 'HGPLAINEXCEPT' not in os.environ: return False exceptions = os.environ.get('HGPLAINEXCEPT', '').strip().split(',') if feature and exceptions: return feature not in exceptions return True def username(self): """Return default username to be used in commits. Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL and stop searching if one of these is set. If not found and ui.askusername is True, ask the user, else use ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname". """ user = os.environ.get("HGUSER") if user is None: user = self.config("ui", ["username", "user"]) if user is not None: user = os.path.expandvars(user) if user is None: user = os.environ.get("EMAIL") if user is None and self.configbool("ui", "askusername"): user = self.prompt(_("enter a commit username:"), default=None) if user is None and not self.interactive(): try: user = '%s@%s' % (util.getuser(), socket.getfqdn()) self.warn(_("no username found, using '%s' instead\n") % user) except KeyError: pass if not user: raise error.Abort(_('no username supplied'), hint=_('use "hg config --edit" ' 'to set your username')) if "\n" in user: raise error.Abort(_("username %s contains a newline\n") % repr(user)) return user def shortuser(self, user): """Return a short representation of a user name or email address.""" if not self.verbose: user = util.shortuser(user) return user def expandpath(self, loc, default=None): """Return repository location relative to cwd or from [paths]""" try: p = self.paths.getpath(loc) if p: return p.rawloc except error.RepoError: pass if default: try: p = self.paths.getpath(default) if p: return p.rawloc except error.RepoError: pass return loc @util.propertycache def paths(self): return paths(self) def pushbuffer(self, error=False, subproc=False, labeled=False): """install a buffer to capture standard output of the ui object If error is True, the error output will be captured too. If subproc is True, output from subprocesses (typically hooks) will be captured too. If labeled is True, any labels associated with buffered output will be handled. By default, this has no effect on the output returned, but extensions and GUI tools may handle this argument and returned styled output. If output is being buffered so it can be captured and parsed or processed, labeled should not be set to True. """ self._buffers.append([]) self._bufferstates.append((error, subproc, labeled)) self._bufferapplylabels = labeled def popbuffer(self): '''pop the last buffer and return the buffered output''' self._bufferstates.pop() if self._bufferstates: self._bufferapplylabels = self._bufferstates[-1][2] else: self._bufferapplylabels = None return "".join(self._buffers.pop()) def write(self, *args, **opts): '''write args to output By default, this method simply writes to the buffer or stdout, but extensions or GUI tools may override this method, write_err(), popbuffer(), and label() to style output from various parts of hg. An optional keyword argument, "label", can be passed in. This should be a string containing label names separated by space. Label names take the form of "topic.type". For example, ui.debug() issues a label of "ui.debug". When labeling output for a specific command, a label of "cmdname.type" is recommended. For example, status issues a label of "status.modified" for modified files. ''' if self._buffers: self._buffers[-1].extend(a for a in args) else: self._progclear() for a in args: self.fout.write(a) def write_err(self, *args, **opts): self._progclear() try: if self._bufferstates and self._bufferstates[-1][0]: return self.write(*args, **opts) if not getattr(self.fout, 'closed', False): self.fout.flush() for a in args: self.ferr.write(a) # stderr may be buffered under win32 when redirected to files, # including stdout. if not getattr(self.ferr, 'closed', False): self.ferr.flush() except IOError as inst: if inst.errno not in (errno.EPIPE, errno.EIO, errno.EBADF): raise def flush(self): try: self.fout.flush() except (IOError, ValueError): pass try: self.ferr.flush() except (IOError, ValueError): pass def _isatty(self, fh): if self.configbool('ui', 'nontty', False): return False return util.isatty(fh) def interactive(self): '''is interactive input allowed? An interactive session is a session where input can be reasonably read from `sys.stdin'. If this function returns false, any attempt to read from stdin should fail with an error, unless a sensible default has been specified. Interactiveness is triggered by the value of the `ui.interactive' configuration variable or - if it is unset - when `sys.stdin' points to a terminal device. This function refers to input only; for output, see `ui.formatted()'. ''' i = self.configbool("ui", "interactive", None) if i is None: # some environments replace stdin without implementing isatty # usually those are non-interactive return self._isatty(self.fin) return i def termwidth(self): '''how wide is the terminal in columns? ''' if 'COLUMNS' in os.environ: try: return int(os.environ['COLUMNS']) except ValueError: pass return util.termwidth() def formatted(self): '''should formatted output be used? It is often desirable to format the output to suite the output medium. Examples of this are truncating long lines or colorizing messages. However, this is not often not desirable when piping output into other utilities, e.g. `grep'. Formatted output is triggered by the value of the `ui.formatted' configuration variable or - if it is unset - when `sys.stdout' points to a terminal device. Please note that `ui.formatted' should be considered an implementation detail; it is not intended for use outside Mercurial or its extensions. This function refers to output only; for input, see `ui.interactive()'. This function always returns false when in plain mode, see `ui.plain()'. ''' if self.plain(): return False i = self.configbool("ui", "formatted", None) if i is None: # some environments replace stdout without implementing isatty # usually those are non-interactive return self._isatty(self.fout) return i def _readline(self, prompt=''): if self._isatty(self.fin): try: # magically add command line editing support, where # available import readline # force demandimport to really load the module readline.read_history_file # windows sometimes raises something other than ImportError except Exception: pass # call write() so output goes through subclassed implementation # e.g. color extension on Windows self.write(prompt) # instead of trying to emulate raw_input, swap (self.fin, # self.fout) with (sys.stdin, sys.stdout) oldin = sys.stdin oldout = sys.stdout sys.stdin = self.fin sys.stdout = self.fout # prompt ' ' must exist; otherwise readline may delete entire line # - http://bugs.python.org/issue12833 line = raw_input(' ') sys.stdin = oldin sys.stdout = oldout # When stdin is in binary mode on Windows, it can cause # raw_input() to emit an extra trailing carriage return if os.linesep == '\r\n' and line and line[-1] == '\r': line = line[:-1] return line def prompt(self, msg, default="y"): """Prompt user with msg, read response. If ui is not interactive, the default is returned. """ if not self.interactive(): self.write(msg, ' ', default or '', "\n") return default try: r = self._readline(self.label(msg, 'ui.prompt')) if not r: r = default if self.configbool('ui', 'promptecho'): self.write(r, "\n") return r except EOFError: raise error.ResponseExpected() @staticmethod def extractchoices(prompt): """Extract prompt message and list of choices from specified prompt. This returns tuple "(message, choices)", and "choices" is the list of tuple "(response character, text without &)". >>> ui.extractchoices("awake? $$ &Yes $$ &No") ('awake? ', [('y', 'Yes'), ('n', 'No')]) >>> ui.extractchoices("line\\nbreak? $$ &Yes $$ &No") ('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')]) >>> ui.extractchoices("want lots of $$money$$?$$Ye&s$$N&o") ('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')]) """ # Sadly, the prompt string may have been built with a filename # containing "$$" so let's try to find the first valid-looking # prompt to start parsing. Sadly, we also can't rely on # choices containing spaces, ASCII, or basically anything # except an ampersand followed by a character. m = re.match(r'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt) msg = m.group(1) choices = [p.strip(' ') for p in m.group(2).split('$$')] return (msg, [(s[s.index('&') + 1].lower(), s.replace('&', '', 1)) for s in choices]) def promptchoice(self, prompt, default=0): """Prompt user with a message, read response, and ensure it matches one of the provided choices. The prompt is formatted as follows: "would you like fries with that (Yn)? $$ &Yes $$ &No" The index of the choice is returned. Responses are case insensitive. If ui is not interactive, the default is returned. """ msg, choices = self.extractchoices(prompt) resps = [r for r, t in choices] while True: r = self.prompt(msg, resps[default]) if r.lower() in resps: return resps.index(r.lower()) self.write(_("unrecognized response\n")) def getpass(self, prompt=None, default=None): if not self.interactive(): return default try: self.write_err(self.label(prompt or _('password: '), 'ui.prompt')) # disable getpass() only if explicitly specified. it's still valid # to interact with tty even if fin is not a tty. if self.configbool('ui', 'nontty'): return self.fin.readline().rstrip('\n') else: return getpass.getpass('') except EOFError: raise error.ResponseExpected() def status(self, *msg, **opts): '''write status message to output (if ui.quiet is False) This adds an output label of "ui.status". ''' if not self.quiet: opts['label'] = opts.get('label', '') + ' ui.status' self.write(*msg, **opts) def warn(self, *msg, **opts): '''write warning message to output (stderr) This adds an output label of "ui.warning". ''' opts['label'] = opts.get('label', '') + ' ui.warning' self.write_err(*msg, **opts) def note(self, *msg, **opts): '''write note to output (if ui.verbose is True) This adds an output label of "ui.note". ''' if self.verbose: opts['label'] = opts.get('label', '') + ' ui.note' self.write(*msg, **opts) def debug(self, *msg, **opts): '''write debug message to output (if ui.debugflag is True) This adds an output label of "ui.debug". ''' if self.debugflag: opts['label'] = opts.get('label', '') + ' ui.debug' self.write(*msg, **opts) def edit(self, text, user, extra=None, editform=None, pending=None): extra_defaults = { 'prefix': 'editor' } if extra is not None: extra_defaults.update(extra) extra = extra_defaults (fd, name) = tempfile.mkstemp(prefix='hg-' + extra['prefix'] + '-', suffix=".txt", text=True) try: f = os.fdopen(fd, "w") f.write(text) f.close() environ = {'HGUSER': user} if 'transplant_source' in extra: environ.update({'HGREVISION': hex(extra['transplant_source'])}) for label in ('intermediate-source', 'source', 'rebase_source'): if label in extra: environ.update({'HGREVISION': extra[label]}) break if editform: environ.update({'HGEDITFORM': editform}) if pending: environ.update({'HG_PENDING': pending}) editor = self.geteditor() self.system("%s \"%s\"" % (editor, name), environ=environ, onerr=error.Abort, errprefix=_("edit failed")) f = open(name) t = f.read() f.close() finally: os.unlink(name) return t def system(self, cmd, environ=None, cwd=None, onerr=None, errprefix=None): '''execute shell command with appropriate output stream. command output will be redirected if fout is not stdout. ''' out = self.fout if any(s[1] for s in self._bufferstates): out = self return util.system(cmd, environ=environ, cwd=cwd, onerr=onerr, errprefix=errprefix, out=out) def traceback(self, exc=None, force=False): '''print exception traceback if traceback printing enabled or forced. only to call in exception handler. returns true if traceback printed.''' if self.tracebackflag or force: if exc is None: exc = sys.exc_info() cause = getattr(exc[1], 'cause', None) if cause is not None: causetb = traceback.format_tb(cause[2]) exctb = traceback.format_tb(exc[2]) exconly = traceback.format_exception_only(cause[0], cause[1]) # exclude frame where 'exc' was chained and rethrown from exctb self.write_err('Traceback (most recent call last):\n', ''.join(exctb[:-1]), ''.join(causetb), ''.join(exconly)) else: output = traceback.format_exception(exc[0], exc[1], exc[2]) self.write_err(''.join(output)) return self.tracebackflag or force def geteditor(self): '''return editor to use''' if sys.platform == 'plan9': # vi is the MIPS instruction simulator on Plan 9. We # instead default to E to plumb commit messages to # avoid confusion. editor = 'E' else: editor = 'vi' return (os.environ.get("HGEDITOR") or self.config("ui", "editor") or os.environ.get("VISUAL") or os.environ.get("EDITOR", editor)) @util.propertycache def _progbar(self): """setup the progbar singleton to the ui object""" if (self.quiet or self.debugflag or self.configbool('progress', 'disable', False) or not progress.shouldprint(self)): return None return getprogbar(self) def _progclear(self): """clear progress bar output if any. use it before any output""" if '_progbar' not in vars(self): # nothing loaded yet return if self._progbar is not None and self._progbar.printed: self._progbar.clear() def progress(self, topic, pos, item="", unit="", total=None): '''show a progress message With stock hg, this is simply a debug message that is hidden by default, but with extensions or GUI tools it may be visible. 'topic' is the current operation, 'item' is a non-numeric marker of the current position (i.e. the currently in-process file), 'pos' is the current numeric position (i.e. revision, bytes, etc.), unit is a corresponding unit label, and total is the highest expected pos. Multiple nested topics may be active at a time. All topics should be marked closed by setting pos to None at termination. ''' if self._progbar is not None: self._progbar.progress(topic, pos, item=item, unit=unit, total=total) if pos is None or not self.configbool('progress', 'debug'): return if unit: unit = ' ' + unit if item: item = ' ' + item if total: pct = 100.0 * pos / total self.debug('%s:%s %s/%s%s (%4.2f%%)\n' % (topic, item, pos, total, unit, pct)) else: self.debug('%s:%s %s%s\n' % (topic, item, pos, unit)) def log(self, service, *msg, **opts): '''hook for logging facility extensions service should be a readily-identifiable subsystem, which will allow filtering. *msg should be a newline-terminated format string to log, and then any values to %-format into that format string. **opts currently has no defined meanings. ''' def label(self, msg, label): '''style msg based on supplied label Like ui.write(), this just returns msg unchanged, but extensions and GUI tools can override it to allow styling output without writing it. ui.write(s, 'label') is equivalent to ui.write(ui.label(s, 'label')). ''' return msg def develwarn(self, msg, stacklevel=1): """issue a developer warning message Use 'stacklevel' to report the offender some layers further up in the stack. """ msg = 'devel-warn: ' + msg stacklevel += 1 # get in develwarn if self.tracebackflag: util.debugstacktrace(msg, stacklevel, self.ferr, self.fout) else: curframe = inspect.currentframe() calframe = inspect.getouterframes(curframe, 2) self.write_err('%s at: %s:%s (%s)\n' % ((msg,) + calframe[stacklevel][1:4])) def deprecwarn(self, msg, version): """issue a deprecation warning - msg: message explaining what is deprecated and how to upgrade, - version: last version where the API will be supported, """ msg += ("\n(compatibility will be dropped after Mercurial-%s," " update your code.)") % version self.develwarn(msg, stacklevel=2) class paths(dict): """Represents a collection of paths and their configs. Data is initially derived from ui instances and the config files they have loaded. """ def __init__(self, ui): dict.__init__(self) for name, loc in ui.configitems('paths', ignoresub=True): # No location is the same as not existing. if not loc: continue loc, sub = ui.configsuboptions('paths', name) self[name] = path(ui, name, rawloc=loc, suboptions=sub) def getpath(self, name, default=None): """Return a ``path`` from a string, falling back to default. ``name`` can be a named path or locations. Locations are filesystem paths or URIs. Returns None if ``name`` is not a registered path, a URI, or a local path to a repo. """ # Only fall back to default if no path was requested. if name is None: if not default: default = () elif not isinstance(default, (tuple, list)): default = (default,) for k in default: try: return self[k] except KeyError: continue return None # Most likely empty string. # This may need to raise in the future. if not name: return None try: return self[name] except KeyError: # Try to resolve as a local path or URI. try: # We don't pass sub-options in, so no need to pass ui instance. return path(None, None, rawloc=name) except ValueError: raise error.RepoError(_('repository %s does not exist') % name) _pathsuboptions = {} def pathsuboption(option, attr): """Decorator used to declare a path sub-option. Arguments are the sub-option name and the attribute it should set on ``path`` instances. The decorated function will receive as arguments a ``ui`` instance, ``path`` instance, and the string value of this option from the config. The function should return the value that will be set on the ``path`` instance. This decorator can be used to perform additional verification of sub-options and to change the type of sub-options. """ def register(func): _pathsuboptions[option] = (attr, func) return func return register @pathsuboption('pushurl', 'pushloc') def pushurlpathoption(ui, path, value): u = util.url(value) # Actually require a URL. if not u.scheme: ui.warn(_('(paths.%s:pushurl not a URL; ignoring)\n') % path.name) return None # Don't support the #foo syntax in the push URL to declare branch to # push. if u.fragment: ui.warn(_('("#fragment" in paths.%s:pushurl not supported; ' 'ignoring)\n') % path.name) u.fragment = None return str(u) class path(object): """Represents an individual path and its configuration.""" def __init__(self, ui, name, rawloc=None, suboptions=None): """Construct a path from its config options. ``ui`` is the ``ui`` instance the path is coming from. ``name`` is the symbolic name of the path. ``rawloc`` is the raw location, as defined in the config. ``pushloc`` is the raw locations pushes should be made to. If ``name`` is not defined, we require that the location be a) a local filesystem path with a .hg directory or b) a URL. If not, ``ValueError`` is raised. """ if not rawloc: raise ValueError('rawloc must be defined') # Locations may define branches via syntax #. u = util.url(rawloc) branch = None if u.fragment: branch = u.fragment u.fragment = None self.url = u self.branch = branch self.name = name self.rawloc = rawloc self.loc = str(u) # When given a raw location but not a symbolic name, validate the # location is valid. if not name and not u.scheme and not self._isvalidlocalpath(self.loc): raise ValueError('location is not a URL or path to a local ' 'repo: %s' % rawloc) suboptions = suboptions or {} # Now process the sub-options. If a sub-option is registered, its # attribute will always be present. The value will be None if there # was no valid sub-option. for suboption, (attr, func) in _pathsuboptions.iteritems(): if suboption not in suboptions: setattr(self, attr, None) continue value = func(ui, self, suboptions[suboption]) setattr(self, attr, value) def _isvalidlocalpath(self, path): """Returns True if the given path is a potentially valid repository. This is its own function so that extensions can change the definition of 'valid' in this case (like when pulling from a git repo into a hg one).""" return os.path.isdir(os.path.join(path, '.hg')) @property def suboptions(self): """Return sub-options and their values for this path. This is intended to be used for presentation purposes. """ d = {} for subopt, (attr, _func) in _pathsuboptions.iteritems(): value = getattr(self, attr) if value is not None: d[subopt] = value return d # we instantiate one globally shared progress bar to avoid # competing progress bars when multiple UI objects get created _progresssingleton = None def getprogbar(ui): global _progresssingleton if _progresssingleton is None: # passing 'ui' object to the singleton is fishy, # this is how the extension used to work but feel free to rework it. _progresssingleton = progress.progbar(ui) return _progresssingleton mercurial-3.7.3/mercurial/patch.py0000644000175000017500000025622212676531525016574 0ustar mpmmpm00000000000000# patch.py - patch file parsing routines # # Copyright 2006 Brendan Cully # Copyright 2007 Chris Mason # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import cStringIO import collections import copy import email import errno import os import posixpath import re import shutil import tempfile import zlib from .i18n import _ from .node import ( hex, short, ) from . import ( base85, copies, diffhelpers, encoding, error, mdiff, pathutil, scmutil, util, ) gitre = re.compile('diff --git a/(.*) b/(.*)') tabsplitter = re.compile(r'(\t+|[^\t]+)') class PatchError(Exception): pass # public functions def split(stream): '''return an iterator of individual patches from a stream''' def isheader(line, inheader): if inheader and line[0] in (' ', '\t'): # continuation return True if line[0] in (' ', '-', '+'): # diff line - don't check for header pattern in there return False l = line.split(': ', 1) return len(l) == 2 and ' ' not in l[0] def chunk(lines): return cStringIO.StringIO(''.join(lines)) def hgsplit(stream, cur): inheader = True for line in stream: if not line.strip(): inheader = False if not inheader and line.startswith('# HG changeset patch'): yield chunk(cur) cur = [] inheader = True cur.append(line) if cur: yield chunk(cur) def mboxsplit(stream, cur): for line in stream: if line.startswith('From '): for c in split(chunk(cur[1:])): yield c cur = [] cur.append(line) if cur: for c in split(chunk(cur[1:])): yield c def mimesplit(stream, cur): def msgfp(m): fp = cStringIO.StringIO() g = email.Generator.Generator(fp, mangle_from_=False) g.flatten(m) fp.seek(0) return fp for line in stream: cur.append(line) c = chunk(cur) m = email.Parser.Parser().parse(c) if not m.is_multipart(): yield msgfp(m) else: ok_types = ('text/plain', 'text/x-diff', 'text/x-patch') for part in m.walk(): ct = part.get_content_type() if ct not in ok_types: continue yield msgfp(part) def headersplit(stream, cur): inheader = False for line in stream: if not inheader and isheader(line, inheader): yield chunk(cur) cur = [] inheader = True if inheader and not isheader(line, inheader): inheader = False cur.append(line) if cur: yield chunk(cur) def remainder(cur): yield chunk(cur) class fiter(object): def __init__(self, fp): self.fp = fp def __iter__(self): return self def next(self): l = self.fp.readline() if not l: raise StopIteration return l inheader = False cur = [] mimeheaders = ['content-type'] if not util.safehasattr(stream, 'next'): # http responses, for example, have readline but not next stream = fiter(stream) for line in stream: cur.append(line) if line.startswith('# HG changeset patch'): return hgsplit(stream, cur) elif line.startswith('From '): return mboxsplit(stream, cur) elif isheader(line, inheader): inheader = True if line.split(':', 1)[0].lower() in mimeheaders: # let email parser handle this return mimesplit(stream, cur) elif line.startswith('--- ') and inheader: # No evil headers seen by diff start, split by hand return headersplit(stream, cur) # Not enough info, keep reading # if we are here, we have a very plain patch return remainder(cur) ## Some facility for extensible patch parsing: # list of pairs ("header to match", "data key") patchheadermap = [('Date', 'date'), ('Branch', 'branch'), ('Node ID', 'nodeid'), ] def extract(ui, fileobj): '''extract patch from data read from fileobj. patch can be a normal patch or contained in an email message. return a dictionary. Standard keys are: - filename, - message, - user, - date, - branch, - node, - p1, - p2. Any item can be missing from the dictionary. If filename is missing, fileobj did not contain a patch. Caller must unlink filename when done.''' # attempt to detect the start of a patch # (this heuristic is borrowed from quilt) diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' r'retrieving revision [0-9]+(\.[0-9]+)*$|' r'---[ \t].*?^\+\+\+[ \t]|' r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL) data = {} fd, tmpname = tempfile.mkstemp(prefix='hg-patch-') tmpfp = os.fdopen(fd, 'w') try: msg = email.Parser.Parser().parse(fileobj) subject = msg['Subject'] data['user'] = msg['From'] if not subject and not data['user']: # Not an email, restore parsed headers if any subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n' # should try to parse msg['Date'] parents = [] if subject: if subject.startswith('[PATCH'): pend = subject.find(']') if pend >= 0: subject = subject[pend + 1:].lstrip() subject = re.sub(r'\n[ \t]+', ' ', subject) ui.debug('Subject: %s\n' % subject) if data['user']: ui.debug('From: %s\n' % data['user']) diffs_seen = 0 ok_types = ('text/plain', 'text/x-diff', 'text/x-patch') message = '' for part in msg.walk(): content_type = part.get_content_type() ui.debug('Content-Type: %s\n' % content_type) if content_type not in ok_types: continue payload = part.get_payload(decode=True) m = diffre.search(payload) if m: hgpatch = False hgpatchheader = False ignoretext = False ui.debug('found patch at byte %d\n' % m.start(0)) diffs_seen += 1 cfp = cStringIO.StringIO() for line in payload[:m.start(0)].splitlines(): if line.startswith('# HG changeset patch') and not hgpatch: ui.debug('patch generated by hg export\n') hgpatch = True hgpatchheader = True # drop earlier commit message content cfp.seek(0) cfp.truncate() subject = None elif hgpatchheader: if line.startswith('# User '): data['user'] = line[7:] ui.debug('From: %s\n' % data['user']) elif line.startswith("# Parent "): parents.append(line[9:].lstrip()) elif line.startswith("# "): for header, key in patchheadermap: prefix = '# %s ' % header if line.startswith(prefix): data[key] = line[len(prefix):] else: hgpatchheader = False elif line == '---': ignoretext = True if not hgpatchheader and not ignoretext: cfp.write(line) cfp.write('\n') message = cfp.getvalue() if tmpfp: tmpfp.write(payload) if not payload.endswith('\n'): tmpfp.write('\n') elif not diffs_seen and message and content_type == 'text/plain': message += '\n' + payload except: # re-raises tmpfp.close() os.unlink(tmpname) raise if subject and not message.startswith(subject): message = '%s\n%s' % (subject, message) data['message'] = message tmpfp.close() if parents: data['p1'] = parents.pop(0) if parents: data['p2'] = parents.pop(0) if diffs_seen: data['filename'] = tmpname else: os.unlink(tmpname) return data class patchmeta(object): """Patched file metadata 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY or COPY. 'path' is patched file path. 'oldpath' is set to the origin file when 'op' is either COPY or RENAME, None otherwise. If file mode is changed, 'mode' is a tuple (islink, isexec) where 'islink' is True if the file is a symlink and 'isexec' is True if the file is executable. Otherwise, 'mode' is None. """ def __init__(self, path): self.path = path self.oldpath = None self.mode = None self.op = 'MODIFY' self.binary = False def setmode(self, mode): islink = mode & 0o20000 isexec = mode & 0o100 self.mode = (islink, isexec) def copy(self): other = patchmeta(self.path) other.oldpath = self.oldpath other.mode = self.mode other.op = self.op other.binary = self.binary return other def _ispatchinga(self, afile): if afile == '/dev/null': return self.op == 'ADD' return afile == 'a/' + (self.oldpath or self.path) def _ispatchingb(self, bfile): if bfile == '/dev/null': return self.op == 'DELETE' return bfile == 'b/' + self.path def ispatching(self, afile, bfile): return self._ispatchinga(afile) and self._ispatchingb(bfile) def __repr__(self): return "" % (self.op, self.path) def readgitpatch(lr): """extract git-style metadata about patches from """ # Filter patch for git information gp = None gitpatches = [] for line in lr: line = line.rstrip(' \r\n') if line.startswith('diff --git a/'): m = gitre.match(line) if m: if gp: gitpatches.append(gp) dst = m.group(2) gp = patchmeta(dst) elif gp: if line.startswith('--- '): gitpatches.append(gp) gp = None continue if line.startswith('rename from '): gp.op = 'RENAME' gp.oldpath = line[12:] elif line.startswith('rename to '): gp.path = line[10:] elif line.startswith('copy from '): gp.op = 'COPY' gp.oldpath = line[10:] elif line.startswith('copy to '): gp.path = line[8:] elif line.startswith('deleted file'): gp.op = 'DELETE' elif line.startswith('new file mode '): gp.op = 'ADD' gp.setmode(int(line[-6:], 8)) elif line.startswith('new mode '): gp.setmode(int(line[-6:], 8)) elif line.startswith('GIT binary patch'): gp.binary = True if gp: gitpatches.append(gp) return gitpatches class linereader(object): # simple class to allow pushing lines back into the input stream def __init__(self, fp): self.fp = fp self.buf = [] def push(self, line): if line is not None: self.buf.append(line) def readline(self): if self.buf: l = self.buf[0] del self.buf[0] return l return self.fp.readline() def __iter__(self): while True: l = self.readline() if not l: break yield l class abstractbackend(object): def __init__(self, ui): self.ui = ui def getfile(self, fname): """Return target file data and flags as a (data, (islink, isexec)) tuple. Data is None if file is missing/deleted. """ raise NotImplementedError def setfile(self, fname, data, mode, copysource): """Write data to target file fname and set its mode. mode is a (islink, isexec) tuple. If data is None, the file content should be left unchanged. If the file is modified after being copied, copysource is set to the original file name. """ raise NotImplementedError def unlink(self, fname): """Unlink target file.""" raise NotImplementedError def writerej(self, fname, failed, total, lines): """Write rejected lines for fname. total is the number of hunks which failed to apply and total the total number of hunks for this files. """ pass def exists(self, fname): raise NotImplementedError class fsbackend(abstractbackend): def __init__(self, ui, basedir): super(fsbackend, self).__init__(ui) self.opener = scmutil.opener(basedir) def _join(self, f): return os.path.join(self.opener.base, f) def getfile(self, fname): if self.opener.islink(fname): return (self.opener.readlink(fname), (True, False)) isexec = False try: isexec = self.opener.lstat(fname).st_mode & 0o100 != 0 except OSError as e: if e.errno != errno.ENOENT: raise try: return (self.opener.read(fname), (False, isexec)) except IOError as e: if e.errno != errno.ENOENT: raise return None, None def setfile(self, fname, data, mode, copysource): islink, isexec = mode if data is None: self.opener.setflags(fname, islink, isexec) return if islink: self.opener.symlink(data, fname) else: self.opener.write(fname, data) if isexec: self.opener.setflags(fname, False, True) def unlink(self, fname): self.opener.unlinkpath(fname, ignoremissing=True) def writerej(self, fname, failed, total, lines): fname = fname + ".rej" self.ui.warn( _("%d out of %d hunks FAILED -- saving rejects to file %s\n") % (failed, total, fname)) fp = self.opener(fname, 'w') fp.writelines(lines) fp.close() def exists(self, fname): return self.opener.lexists(fname) class workingbackend(fsbackend): def __init__(self, ui, repo, similarity): super(workingbackend, self).__init__(ui, repo.root) self.repo = repo self.similarity = similarity self.removed = set() self.changed = set() self.copied = [] def _checkknown(self, fname): if self.repo.dirstate[fname] == '?' and self.exists(fname): raise PatchError(_('cannot patch %s: file is not tracked') % fname) def setfile(self, fname, data, mode, copysource): self._checkknown(fname) super(workingbackend, self).setfile(fname, data, mode, copysource) if copysource is not None: self.copied.append((copysource, fname)) self.changed.add(fname) def unlink(self, fname): self._checkknown(fname) super(workingbackend, self).unlink(fname) self.removed.add(fname) self.changed.add(fname) def close(self): wctx = self.repo[None] changed = set(self.changed) for src, dst in self.copied: scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst) if self.removed: wctx.forget(sorted(self.removed)) for f in self.removed: if f not in self.repo.dirstate: # File was deleted and no longer belongs to the # dirstate, it was probably marked added then # deleted, and should not be considered by # marktouched(). changed.discard(f) if changed: scmutil.marktouched(self.repo, changed, self.similarity) return sorted(self.changed) class filestore(object): def __init__(self, maxsize=None): self.opener = None self.files = {} self.created = 0 self.maxsize = maxsize if self.maxsize is None: self.maxsize = 4*(2**20) self.size = 0 self.data = {} def setfile(self, fname, data, mode, copied=None): if self.maxsize < 0 or (len(data) + self.size) <= self.maxsize: self.data[fname] = (data, mode, copied) self.size += len(data) else: if self.opener is None: root = tempfile.mkdtemp(prefix='hg-patch-') self.opener = scmutil.opener(root) # Avoid filename issues with these simple names fn = str(self.created) self.opener.write(fn, data) self.created += 1 self.files[fname] = (fn, mode, copied) def getfile(self, fname): if fname in self.data: return self.data[fname] if not self.opener or fname not in self.files: return None, None, None fn, mode, copied = self.files[fname] return self.opener.read(fn), mode, copied def close(self): if self.opener: shutil.rmtree(self.opener.base) class repobackend(abstractbackend): def __init__(self, ui, repo, ctx, store): super(repobackend, self).__init__(ui) self.repo = repo self.ctx = ctx self.store = store self.changed = set() self.removed = set() self.copied = {} def _checkknown(self, fname): if fname not in self.ctx: raise PatchError(_('cannot patch %s: file is not tracked') % fname) def getfile(self, fname): try: fctx = self.ctx[fname] except error.LookupError: return None, None flags = fctx.flags() return fctx.data(), ('l' in flags, 'x' in flags) def setfile(self, fname, data, mode, copysource): if copysource: self._checkknown(copysource) if data is None: data = self.ctx[fname].data() self.store.setfile(fname, data, mode, copysource) self.changed.add(fname) if copysource: self.copied[fname] = copysource def unlink(self, fname): self._checkknown(fname) self.removed.add(fname) def exists(self, fname): return fname in self.ctx def close(self): return self.changed | self.removed # @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1 unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@') contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)') eolmodes = ['strict', 'crlf', 'lf', 'auto'] class patchfile(object): def __init__(self, ui, gp, backend, store, eolmode='strict'): self.fname = gp.path self.eolmode = eolmode self.eol = None self.backend = backend self.ui = ui self.lines = [] self.exists = False self.missing = True self.mode = gp.mode self.copysource = gp.oldpath self.create = gp.op in ('ADD', 'COPY', 'RENAME') self.remove = gp.op == 'DELETE' if self.copysource is None: data, mode = backend.getfile(self.fname) else: data, mode = store.getfile(self.copysource)[:2] if data is not None: self.exists = self.copysource is None or backend.exists(self.fname) self.missing = False if data: self.lines = mdiff.splitnewlines(data) if self.mode is None: self.mode = mode if self.lines: # Normalize line endings if self.lines[0].endswith('\r\n'): self.eol = '\r\n' elif self.lines[0].endswith('\n'): self.eol = '\n' if eolmode != 'strict': nlines = [] for l in self.lines: if l.endswith('\r\n'): l = l[:-2] + '\n' nlines.append(l) self.lines = nlines else: if self.create: self.missing = False if self.mode is None: self.mode = (False, False) if self.missing: self.ui.warn(_("unable to find '%s' for patching\n") % self.fname) self.hash = {} self.dirty = 0 self.offset = 0 self.skew = 0 self.rej = [] self.fileprinted = False self.printfile(False) self.hunks = 0 def writelines(self, fname, lines, mode): if self.eolmode == 'auto': eol = self.eol elif self.eolmode == 'crlf': eol = '\r\n' else: eol = '\n' if self.eolmode != 'strict' and eol and eol != '\n': rawlines = [] for l in lines: if l and l[-1] == '\n': l = l[:-1] + eol rawlines.append(l) lines = rawlines self.backend.setfile(fname, ''.join(lines), mode, self.copysource) def printfile(self, warn): if self.fileprinted: return if warn or self.ui.verbose: self.fileprinted = True s = _("patching file %s\n") % self.fname if warn: self.ui.warn(s) else: self.ui.note(s) def findlines(self, l, linenum): # looks through the hash and finds candidate lines. The # result is a list of line numbers sorted based on distance # from linenum cand = self.hash.get(l, []) if len(cand) > 1: # resort our list of potentials forward then back. cand.sort(key=lambda x: abs(x - linenum)) return cand def write_rej(self): # our rejects are a little different from patch(1). This always # creates rejects in the same form as the original patch. A file # header is inserted so that you can run the reject through patch again # without having to type the filename. if not self.rej: return base = os.path.basename(self.fname) lines = ["--- %s\n+++ %s\n" % (base, base)] for x in self.rej: for l in x.hunk: lines.append(l) if l[-1] != '\n': lines.append("\n\ No newline at end of file\n") self.backend.writerej(self.fname, len(self.rej), self.hunks, lines) def apply(self, h): if not h.complete(): raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") % (h.number, h.desc, len(h.a), h.lena, len(h.b), h.lenb)) self.hunks += 1 if self.missing: self.rej.append(h) return -1 if self.exists and self.create: if self.copysource: self.ui.warn(_("cannot create %s: destination already " "exists\n") % self.fname) else: self.ui.warn(_("file %s already exists\n") % self.fname) self.rej.append(h) return -1 if isinstance(h, binhunk): if self.remove: self.backend.unlink(self.fname) else: l = h.new(self.lines) self.lines[:] = l self.offset += len(l) self.dirty = True return 0 horig = h if (self.eolmode in ('crlf', 'lf') or self.eolmode == 'auto' and self.eol): # If new eols are going to be normalized, then normalize # hunk data before patching. Otherwise, preserve input # line-endings. h = h.getnormalized() # fast case first, no offsets, no fuzz old, oldstart, new, newstart = h.fuzzit(0, False) oldstart += self.offset orig_start = oldstart # if there's skew we want to emit the "(offset %d lines)" even # when the hunk cleanly applies at start + skew, so skip the # fast case code if (self.skew == 0 and diffhelpers.testhunk(old, self.lines, oldstart) == 0): if self.remove: self.backend.unlink(self.fname) else: self.lines[oldstart:oldstart + len(old)] = new self.offset += len(new) - len(old) self.dirty = True return 0 # ok, we couldn't match the hunk. Lets look for offsets and fuzz it self.hash = {} for x, s in enumerate(self.lines): self.hash.setdefault(s, []).append(x) for fuzzlen in xrange(self.ui.configint("patch", "fuzz", 2) + 1): for toponly in [True, False]: old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly) oldstart = oldstart + self.offset + self.skew oldstart = min(oldstart, len(self.lines)) if old: cand = self.findlines(old[0][1:], oldstart) else: # Only adding lines with no or fuzzed context, just # take the skew in account cand = [oldstart] for l in cand: if not old or diffhelpers.testhunk(old, self.lines, l) == 0: self.lines[l : l + len(old)] = new self.offset += len(new) - len(old) self.skew = l - orig_start self.dirty = True offset = l - orig_start - fuzzlen if fuzzlen: msg = _("Hunk #%d succeeded at %d " "with fuzz %d " "(offset %d lines).\n") self.printfile(True) self.ui.warn(msg % (h.number, l + 1, fuzzlen, offset)) else: msg = _("Hunk #%d succeeded at %d " "(offset %d lines).\n") self.ui.note(msg % (h.number, l + 1, offset)) return fuzzlen self.printfile(True) self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start)) self.rej.append(horig) return -1 def close(self): if self.dirty: self.writelines(self.fname, self.lines, self.mode) self.write_rej() return len(self.rej) class header(object): """patch header """ diffgit_re = re.compile('diff --git a/(.*) b/(.*)$') diff_re = re.compile('diff -r .* (.*)$') allhunks_re = re.compile('(?:index|deleted file) ') pretty_re = re.compile('(?:new file|deleted file) ') special_re = re.compile('(?:index|deleted|copy|rename) ') newfile_re = re.compile('(?:new file)') def __init__(self, header): self.header = header self.hunks = [] def binary(self): return any(h.startswith('index ') for h in self.header) def pretty(self, fp): for h in self.header: if h.startswith('index '): fp.write(_('this modifies a binary file (all or nothing)\n')) break if self.pretty_re.match(h): fp.write(h) if self.binary(): fp.write(_('this is a binary file\n')) break if h.startswith('---'): fp.write(_('%d hunks, %d lines changed\n') % (len(self.hunks), sum([max(h.added, h.removed) for h in self.hunks]))) break fp.write(h) def write(self, fp): fp.write(''.join(self.header)) def allhunks(self): return any(self.allhunks_re.match(h) for h in self.header) def files(self): match = self.diffgit_re.match(self.header[0]) if match: fromfile, tofile = match.groups() if fromfile == tofile: return [fromfile] return [fromfile, tofile] else: return self.diff_re.match(self.header[0]).groups() def filename(self): return self.files()[-1] def __repr__(self): return '
' % (' '.join(map(repr, self.files()))) def isnewfile(self): return any(self.newfile_re.match(h) for h in self.header) def special(self): # Special files are shown only at the header level and not at the hunk # level for example a file that has been deleted is a special file. # The user cannot change the content of the operation, in the case of # the deleted file he has to take the deletion or not take it, he # cannot take some of it. # Newly added files are special if they are empty, they are not special # if they have some content as we want to be able to change it nocontent = len(self.header) == 2 emptynewfile = self.isnewfile() and nocontent return emptynewfile or \ any(self.special_re.match(h) for h in self.header) class recordhunk(object): """patch hunk XXX shouldn't we merge this with the other hunk class? """ maxcontext = 3 def __init__(self, header, fromline, toline, proc, before, hunk, after): def trimcontext(number, lines): delta = len(lines) - self.maxcontext if False and delta > 0: return number + delta, lines[:self.maxcontext] return number, lines self.header = header self.fromline, self.before = trimcontext(fromline, before) self.toline, self.after = trimcontext(toline, after) self.proc = proc self.hunk = hunk self.added, self.removed = self.countchanges(self.hunk) def __eq__(self, v): if not isinstance(v, recordhunk): return False return ((v.hunk == self.hunk) and (v.proc == self.proc) and (self.fromline == v.fromline) and (self.header.files() == v.header.files())) def __hash__(self): return hash((tuple(self.hunk), tuple(self.header.files()), self.fromline, self.proc)) def countchanges(self, hunk): """hunk -> (n+,n-)""" add = len([h for h in hunk if h[0] == '+']) rem = len([h for h in hunk if h[0] == '-']) return add, rem def write(self, fp): delta = len(self.before) + len(self.after) if self.after and self.after[-1] == '\\ No newline at end of file\n': delta -= 1 fromlen = delta + self.removed tolen = delta + self.added fp.write('@@ -%d,%d +%d,%d @@%s\n' % (self.fromline, fromlen, self.toline, tolen, self.proc and (' ' + self.proc))) fp.write(''.join(self.before + self.hunk + self.after)) pretty = write def filename(self): return self.header.filename() def __repr__(self): return '' % (self.filename(), self.fromline) def filterpatch(ui, headers, operation=None): """Interactively filter patch chunks into applied-only chunks""" if operation is None: operation = _('record') def prompt(skipfile, skipall, query, chunk): """prompt query, and process base inputs - y/n for the rest of file - y/n for the rest - ? (help) - q (quit) Return True/False and possibly updated skipfile and skipall. """ newpatches = None if skipall is not None: return skipall, skipfile, skipall, newpatches if skipfile is not None: return skipfile, skipfile, skipall, newpatches while True: resps = _('[Ynesfdaq?]' '$$ &Yes, record this change' '$$ &No, skip this change' '$$ &Edit this change manually' '$$ &Skip remaining changes to this file' '$$ Record remaining changes to this &file' '$$ &Done, skip remaining changes and files' '$$ Record &all changes to all remaining files' '$$ &Quit, recording no changes' '$$ &? (display help)') r = ui.promptchoice("%s %s" % (query, resps)) ui.write("\n") if r == 8: # ? for c, t in ui.extractchoices(resps)[1]: ui.write('%s - %s\n' % (c, t.lower())) continue elif r == 0: # yes ret = True elif r == 1: # no ret = False elif r == 2: # Edit patch if chunk is None: ui.write(_('cannot edit patch for whole file')) ui.write("\n") continue if chunk.header.binary(): ui.write(_('cannot edit patch for binary file')) ui.write("\n") continue # Patch comment based on the Git one (based on comment at end of # https://mercurial-scm.org/wiki/RecordExtension) phelp = '---' + _(""" To remove '-' lines, make them ' ' lines (context). To remove '+' lines, delete them. Lines starting with # will be removed from the patch. If the patch applies cleanly, the edited hunk will immediately be added to the record list. If it does not apply cleanly, a rejects file will be generated: you can use that when you try again. If all lines of the hunk are removed, then the edit is aborted and the hunk is left unchanged. """) (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-", suffix=".diff", text=True) ncpatchfp = None try: # Write the initial patch f = os.fdopen(patchfd, "w") chunk.header.write(f) chunk.write(f) f.write('\n'.join(['# ' + i for i in phelp.splitlines()])) f.close() # Start the editor and wait for it to complete editor = ui.geteditor() ret = ui.system("%s \"%s\"" % (editor, patchfn), environ={'HGUSER': ui.username()}) if ret != 0: ui.warn(_("editor exited with exit code %d\n") % ret) continue # Remove comment lines patchfp = open(patchfn) ncpatchfp = cStringIO.StringIO() for line in patchfp: if not line.startswith('#'): ncpatchfp.write(line) patchfp.close() ncpatchfp.seek(0) newpatches = parsepatch(ncpatchfp) finally: os.unlink(patchfn) del ncpatchfp # Signal that the chunk shouldn't be applied as-is, but # provide the new patch to be used instead. ret = False elif r == 3: # Skip ret = skipfile = False elif r == 4: # file (Record remaining) ret = skipfile = True elif r == 5: # done, skip remaining ret = skipall = False elif r == 6: # all ret = skipall = True elif r == 7: # quit raise error.Abort(_('user quit')) return ret, skipfile, skipall, newpatches seen = set() applied = {} # 'filename' -> [] of chunks skipfile, skipall = None, None pos, total = 1, sum(len(h.hunks) for h in headers) for h in headers: pos += len(h.hunks) skipfile = None fixoffset = 0 hdr = ''.join(h.header) if hdr in seen: continue seen.add(hdr) if skipall is None: h.pretty(ui) msg = (_('examine changes to %s?') % _(' and ').join("'%s'" % f for f in h.files())) r, skipfile, skipall, np = prompt(skipfile, skipall, msg, None) if not r: continue applied[h.filename()] = [h] if h.allhunks(): applied[h.filename()] += h.hunks continue for i, chunk in enumerate(h.hunks): if skipfile is None and skipall is None: chunk.pretty(ui) if total == 1: msg = _("record this change to '%s'?") % chunk.filename() else: idx = pos - len(h.hunks) + i msg = _("record change %d/%d to '%s'?") % (idx, total, chunk.filename()) r, skipfile, skipall, newpatches = prompt(skipfile, skipall, msg, chunk) if r: if fixoffset: chunk = copy.copy(chunk) chunk.toline += fixoffset applied[chunk.filename()].append(chunk) elif newpatches is not None: for newpatch in newpatches: for newhunk in newpatch.hunks: if fixoffset: newhunk.toline += fixoffset applied[newhunk.filename()].append(newhunk) else: fixoffset += chunk.removed - chunk.added return (sum([h for h in applied.itervalues() if h[0].special() or len(h) > 1], []), {}) class hunk(object): def __init__(self, desc, num, lr, context): self.number = num self.desc = desc self.hunk = [desc] self.a = [] self.b = [] self.starta = self.lena = None self.startb = self.lenb = None if lr is not None: if context: self.read_context_hunk(lr) else: self.read_unified_hunk(lr) def getnormalized(self): """Return a copy with line endings normalized to LF.""" def normalize(lines): nlines = [] for line in lines: if line.endswith('\r\n'): line = line[:-2] + '\n' nlines.append(line) return nlines # Dummy object, it is rebuilt manually nh = hunk(self.desc, self.number, None, None) nh.number = self.number nh.desc = self.desc nh.hunk = self.hunk nh.a = normalize(self.a) nh.b = normalize(self.b) nh.starta = self.starta nh.startb = self.startb nh.lena = self.lena nh.lenb = self.lenb return nh def read_unified_hunk(self, lr): m = unidesc.match(self.desc) if not m: raise PatchError(_("bad hunk #%d") % self.number) self.starta, self.lena, self.startb, self.lenb = m.groups() if self.lena is None: self.lena = 1 else: self.lena = int(self.lena) if self.lenb is None: self.lenb = 1 else: self.lenb = int(self.lenb) self.starta = int(self.starta) self.startb = int(self.startb) diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b) # if we hit eof before finishing out the hunk, the last line will # be zero length. Lets try to fix it up. while len(self.hunk[-1]) == 0: del self.hunk[-1] del self.a[-1] del self.b[-1] self.lena -= 1 self.lenb -= 1 self._fixnewline(lr) def read_context_hunk(self, lr): self.desc = lr.readline() m = contextdesc.match(self.desc) if not m: raise PatchError(_("bad hunk #%d") % self.number) self.starta, aend = m.groups() self.starta = int(self.starta) if aend is None: aend = self.starta self.lena = int(aend) - self.starta if self.starta: self.lena += 1 for x in xrange(self.lena): l = lr.readline() if l.startswith('---'): # lines addition, old block is empty lr.push(l) break s = l[2:] if l.startswith('- ') or l.startswith('! '): u = '-' + s elif l.startswith(' '): u = ' ' + s else: raise PatchError(_("bad hunk #%d old text line %d") % (self.number, x)) self.a.append(u) self.hunk.append(u) l = lr.readline() if l.startswith('\ '): s = self.a[-1][:-1] self.a[-1] = s self.hunk[-1] = s l = lr.readline() m = contextdesc.match(l) if not m: raise PatchError(_("bad hunk #%d") % self.number) self.startb, bend = m.groups() self.startb = int(self.startb) if bend is None: bend = self.startb self.lenb = int(bend) - self.startb if self.startb: self.lenb += 1 hunki = 1 for x in xrange(self.lenb): l = lr.readline() if l.startswith('\ '): # XXX: the only way to hit this is with an invalid line range. # The no-eol marker is not counted in the line range, but I # guess there are diff(1) out there which behave differently. s = self.b[-1][:-1] self.b[-1] = s self.hunk[hunki - 1] = s continue if not l: # line deletions, new block is empty and we hit EOF lr.push(l) break s = l[2:] if l.startswith('+ ') or l.startswith('! '): u = '+' + s elif l.startswith(' '): u = ' ' + s elif len(self.b) == 0: # line deletions, new block is empty lr.push(l) break else: raise PatchError(_("bad hunk #%d old text line %d") % (self.number, x)) self.b.append(s) while True: if hunki >= len(self.hunk): h = "" else: h = self.hunk[hunki] hunki += 1 if h == u: break elif h.startswith('-'): continue else: self.hunk.insert(hunki - 1, u) break if not self.a: # this happens when lines were only added to the hunk for x in self.hunk: if x.startswith('-') or x.startswith(' '): self.a.append(x) if not self.b: # this happens when lines were only deleted from the hunk for x in self.hunk: if x.startswith('+') or x.startswith(' '): self.b.append(x[1:]) # @@ -start,len +start,len @@ self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena, self.startb, self.lenb) self.hunk[0] = self.desc self._fixnewline(lr) def _fixnewline(self, lr): l = lr.readline() if l.startswith('\ '): diffhelpers.fix_newline(self.hunk, self.a, self.b) else: lr.push(l) def complete(self): return len(self.a) == self.lena and len(self.b) == self.lenb def _fuzzit(self, old, new, fuzz, toponly): # this removes context lines from the top and bottom of list 'l'. It # checks the hunk to make sure only context lines are removed, and then # returns a new shortened list of lines. fuzz = min(fuzz, len(old)) if fuzz: top = 0 bot = 0 hlen = len(self.hunk) for x in xrange(hlen - 1): # the hunk starts with the @@ line, so use x+1 if self.hunk[x + 1][0] == ' ': top += 1 else: break if not toponly: for x in xrange(hlen - 1): if self.hunk[hlen - bot - 1][0] == ' ': bot += 1 else: break bot = min(fuzz, bot) top = min(fuzz, top) return old[top:len(old) - bot], new[top:len(new) - bot], top return old, new, 0 def fuzzit(self, fuzz, toponly): old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly) oldstart = self.starta + top newstart = self.startb + top # zero length hunk ranges already have their start decremented if self.lena and oldstart > 0: oldstart -= 1 if self.lenb and newstart > 0: newstart -= 1 return old, oldstart, new, newstart class binhunk(object): 'A binary patch file.' def __init__(self, lr, fname): self.text = None self.delta = False self.hunk = ['GIT binary patch\n'] self._fname = fname self._read(lr) def complete(self): return self.text is not None def new(self, lines): if self.delta: return [applybindelta(self.text, ''.join(lines))] return [self.text] def _read(self, lr): def getline(lr, hunk): l = lr.readline() hunk.append(l) return l.rstrip('\r\n') size = 0 while True: line = getline(lr, self.hunk) if not line: raise PatchError(_('could not extract "%s" binary data') % self._fname) if line.startswith('literal '): size = int(line[8:].rstrip()) break if line.startswith('delta '): size = int(line[6:].rstrip()) self.delta = True break dec = [] line = getline(lr, self.hunk) while len(line) > 1: l = line[0] if l <= 'Z' and l >= 'A': l = ord(l) - ord('A') + 1 else: l = ord(l) - ord('a') + 27 try: dec.append(base85.b85decode(line[1:])[:l]) except ValueError as e: raise PatchError(_('could not decode "%s" binary patch: %s') % (self._fname, str(e))) line = getline(lr, self.hunk) text = zlib.decompress(''.join(dec)) if len(text) != size: raise PatchError(_('"%s" length is %d bytes, should be %d') % (self._fname, len(text), size)) self.text = text def parsefilename(str): # --- filename \t|space stuff s = str[4:].rstrip('\r\n') i = s.find('\t') if i < 0: i = s.find(' ') if i < 0: return s return s[:i] def reversehunks(hunks): '''reverse the signs in the hunks given as argument This function operates on hunks coming out of patch.filterpatch, that is a list of the form: [header1, hunk1, hunk2, header2...]. Example usage: >>> rawpatch = """diff --git a/folder1/g b/folder1/g ... --- a/folder1/g ... +++ b/folder1/g ... @@ -1,7 +1,7 @@ ... +firstline ... c ... 1 ... 2 ... + 3 ... -4 ... 5 ... d ... +lastline""" >>> hunks = parsepatch(rawpatch) >>> hunkscomingfromfilterpatch = [] >>> for h in hunks: ... hunkscomingfromfilterpatch.append(h) ... hunkscomingfromfilterpatch.extend(h.hunks) >>> reversedhunks = reversehunks(hunkscomingfromfilterpatch) >>> fp = cStringIO.StringIO() >>> for c in reversedhunks: ... c.write(fp) >>> fp.seek(0) >>> reversedpatch = fp.read() >>> print reversedpatch diff --git a/folder1/g b/folder1/g --- a/folder1/g +++ b/folder1/g @@ -1,4 +1,3 @@ -firstline c 1 2 @@ -1,6 +2,6 @@ c 1 2 - 3 +4 5 d @@ -5,3 +6,2 @@ 5 d -lastline ''' from . import crecord as crecordmod newhunks = [] for c in hunks: if isinstance(c, crecordmod.uihunk): # curses hunks encapsulate the record hunk in _hunk c = c._hunk if isinstance(c, recordhunk): for j, line in enumerate(c.hunk): if line.startswith("-"): c.hunk[j] = "+" + c.hunk[j][1:] elif line.startswith("+"): c.hunk[j] = "-" + c.hunk[j][1:] c.added, c.removed = c.removed, c.added newhunks.append(c) return newhunks def parsepatch(originalchunks): """patch -> [] of headers -> [] of hunks """ class parser(object): """patch parsing state machine""" def __init__(self): self.fromline = 0 self.toline = 0 self.proc = '' self.header = None self.context = [] self.before = [] self.hunk = [] self.headers = [] def addrange(self, limits): fromstart, fromend, tostart, toend, proc = limits self.fromline = int(fromstart) self.toline = int(tostart) self.proc = proc def addcontext(self, context): if self.hunk: h = recordhunk(self.header, self.fromline, self.toline, self.proc, self.before, self.hunk, context) self.header.hunks.append(h) self.fromline += len(self.before) + h.removed self.toline += len(self.before) + h.added self.before = [] self.hunk = [] self.context = context def addhunk(self, hunk): if self.context: self.before = self.context self.context = [] self.hunk = hunk def newfile(self, hdr): self.addcontext([]) h = header(hdr) self.headers.append(h) self.header = h def addother(self, line): pass # 'other' lines are ignored def finished(self): self.addcontext([]) return self.headers transitions = { 'file': {'context': addcontext, 'file': newfile, 'hunk': addhunk, 'range': addrange}, 'context': {'file': newfile, 'hunk': addhunk, 'range': addrange, 'other': addother}, 'hunk': {'context': addcontext, 'file': newfile, 'range': addrange}, 'range': {'context': addcontext, 'hunk': addhunk}, 'other': {'other': addother}, } p = parser() fp = cStringIO.StringIO() fp.write(''.join(originalchunks)) fp.seek(0) state = 'context' for newstate, data in scanpatch(fp): try: p.transitions[state][newstate](p, data) except KeyError: raise PatchError('unhandled transition: %s -> %s' % (state, newstate)) state = newstate del fp return p.finished() def pathtransform(path, strip, prefix): '''turn a path from a patch into a path suitable for the repository prefix, if not empty, is expected to be normalized with a / at the end. Returns (stripped components, path in repository). >>> pathtransform('a/b/c', 0, '') ('', 'a/b/c') >>> pathtransform(' a/b/c ', 0, '') ('', ' a/b/c') >>> pathtransform(' a/b/c ', 2, '') ('a/b/', 'c') >>> pathtransform('a/b/c', 0, 'd/e/') ('', 'd/e/a/b/c') >>> pathtransform(' a//b/c ', 2, 'd/e/') ('a//b/', 'd/e/c') >>> pathtransform('a/b/c', 3, '') Traceback (most recent call last): PatchError: unable to strip away 1 of 3 dirs from a/b/c ''' pathlen = len(path) i = 0 if strip == 0: return '', prefix + path.rstrip() count = strip while count > 0: i = path.find('/', i) if i == -1: raise PatchError(_("unable to strip away %d of %d dirs from %s") % (count, strip, path)) i += 1 # consume '//' in the path while i < pathlen - 1 and path[i] == '/': i += 1 count -= 1 return path[:i].lstrip(), prefix + path[i:].rstrip() def makepatchmeta(backend, afile_orig, bfile_orig, hunk, strip, prefix): nulla = afile_orig == "/dev/null" nullb = bfile_orig == "/dev/null" create = nulla and hunk.starta == 0 and hunk.lena == 0 remove = nullb and hunk.startb == 0 and hunk.lenb == 0 abase, afile = pathtransform(afile_orig, strip, prefix) gooda = not nulla and backend.exists(afile) bbase, bfile = pathtransform(bfile_orig, strip, prefix) if afile == bfile: goodb = gooda else: goodb = not nullb and backend.exists(bfile) missing = not goodb and not gooda and not create # some diff programs apparently produce patches where the afile is # not /dev/null, but afile starts with bfile abasedir = afile[:afile.rfind('/') + 1] bbasedir = bfile[:bfile.rfind('/') + 1] if (missing and abasedir == bbasedir and afile.startswith(bfile) and hunk.starta == 0 and hunk.lena == 0): create = True missing = False # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the # diff is between a file and its backup. In this case, the original # file should be patched (see original mpatch code). isbackup = (abase == bbase and bfile.startswith(afile)) fname = None if not missing: if gooda and goodb: if isbackup: fname = afile else: fname = bfile elif gooda: fname = afile if not fname: if not nullb: if isbackup: fname = afile else: fname = bfile elif not nulla: fname = afile else: raise PatchError(_("undefined source and destination files")) gp = patchmeta(fname) if create: gp.op = 'ADD' elif remove: gp.op = 'DELETE' return gp def scanpatch(fp): """like patch.iterhunks, but yield different events - ('file', [header_lines + fromfile + tofile]) - ('context', [context_lines]) - ('hunk', [hunk_lines]) - ('range', (-start,len, +start,len, proc)) """ lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)') lr = linereader(fp) def scanwhile(first, p): """scan lr while predicate holds""" lines = [first] while True: line = lr.readline() if not line: break if p(line): lines.append(line) else: lr.push(line) break return lines while True: line = lr.readline() if not line: break if line.startswith('diff --git a/') or line.startswith('diff -r '): def notheader(line): s = line.split(None, 1) return not s or s[0] not in ('---', 'diff') header = scanwhile(line, notheader) fromfile = lr.readline() if fromfile.startswith('---'): tofile = lr.readline() header += [fromfile, tofile] else: lr.push(fromfile) yield 'file', header elif line[0] == ' ': yield 'context', scanwhile(line, lambda l: l[0] in ' \\') elif line[0] in '-+': yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\') else: m = lines_re.match(line) if m: yield 'range', m.groups() else: yield 'other', line def scangitpatch(lr, firstline): """ Git patches can emit: - rename a to b - change b - copy a to c - change c We cannot apply this sequence as-is, the renamed 'a' could not be found for it would have been renamed already. And we cannot copy from 'b' instead because 'b' would have been changed already. So we scan the git patch for copy and rename commands so we can perform the copies ahead of time. """ pos = 0 try: pos = lr.fp.tell() fp = lr.fp except IOError: fp = cStringIO.StringIO(lr.fp.read()) gitlr = linereader(fp) gitlr.push(firstline) gitpatches = readgitpatch(gitlr) fp.seek(pos) return gitpatches def iterhunks(fp): """Read a patch and yield the following events: - ("file", afile, bfile, firsthunk): select a new target file. - ("hunk", hunk): a new hunk is ready to be applied, follows a "file" event. - ("git", gitchanges): current diff is in git format, gitchanges maps filenames to gitpatch records. Unique event. """ afile = "" bfile = "" state = None hunknum = 0 emitfile = newfile = False gitpatches = None # our states BFILE = 1 context = None lr = linereader(fp) while True: x = lr.readline() if not x: break if state == BFILE and ( (not context and x[0] == '@') or (context is not False and x.startswith('***************')) or x.startswith('GIT binary patch')): gp = None if (gitpatches and gitpatches[-1].ispatching(afile, bfile)): gp = gitpatches.pop() if x.startswith('GIT binary patch'): h = binhunk(lr, gp.path) else: if context is None and x.startswith('***************'): context = True h = hunk(x, hunknum + 1, lr, context) hunknum += 1 if emitfile: emitfile = False yield 'file', (afile, bfile, h, gp and gp.copy() or None) yield 'hunk', h elif x.startswith('diff --git a/'): m = gitre.match(x.rstrip(' \r\n')) if not m: continue if gitpatches is None: # scan whole input for git metadata gitpatches = scangitpatch(lr, x) yield 'git', [g.copy() for g in gitpatches if g.op in ('COPY', 'RENAME')] gitpatches.reverse() afile = 'a/' + m.group(1) bfile = 'b/' + m.group(2) while gitpatches and not gitpatches[-1].ispatching(afile, bfile): gp = gitpatches.pop() yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy()) if not gitpatches: raise PatchError(_('failed to synchronize metadata for "%s"') % afile[2:]) gp = gitpatches[-1] newfile = True elif x.startswith('---'): # check for a unified diff l2 = lr.readline() if not l2.startswith('+++'): lr.push(l2) continue newfile = True context = False afile = parsefilename(x) bfile = parsefilename(l2) elif x.startswith('***'): # check for a context diff l2 = lr.readline() if not l2.startswith('---'): lr.push(l2) continue l3 = lr.readline() lr.push(l3) if not l3.startswith("***************"): lr.push(l2) continue newfile = True context = True afile = parsefilename(x) bfile = parsefilename(l2) if newfile: newfile = False emitfile = True state = BFILE hunknum = 0 while gitpatches: gp = gitpatches.pop() yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy()) def applybindelta(binchunk, data): """Apply a binary delta hunk The algorithm used is the algorithm from git's patch-delta.c """ def deltahead(binchunk): i = 0 for c in binchunk: i += 1 if not (ord(c) & 0x80): return i return i out = "" s = deltahead(binchunk) binchunk = binchunk[s:] s = deltahead(binchunk) binchunk = binchunk[s:] i = 0 while i < len(binchunk): cmd = ord(binchunk[i]) i += 1 if (cmd & 0x80): offset = 0 size = 0 if (cmd & 0x01): offset = ord(binchunk[i]) i += 1 if (cmd & 0x02): offset |= ord(binchunk[i]) << 8 i += 1 if (cmd & 0x04): offset |= ord(binchunk[i]) << 16 i += 1 if (cmd & 0x08): offset |= ord(binchunk[i]) << 24 i += 1 if (cmd & 0x10): size = ord(binchunk[i]) i += 1 if (cmd & 0x20): size |= ord(binchunk[i]) << 8 i += 1 if (cmd & 0x40): size |= ord(binchunk[i]) << 16 i += 1 if size == 0: size = 0x10000 offset_end = offset + size out += data[offset:offset_end] elif cmd != 0: offset_end = i + cmd out += binchunk[i:offset_end] i += cmd else: raise PatchError(_('unexpected delta opcode 0')) return out def applydiff(ui, fp, backend, store, strip=1, prefix='', eolmode='strict'): """Reads a patch from fp and tries to apply it. Returns 0 for a clean patch, -1 if any rejects were found and 1 if there was any fuzz. If 'eolmode' is 'strict', the patch content and patched file are read in binary mode. Otherwise, line endings are ignored when patching then normalized according to 'eolmode'. """ return _applydiff(ui, fp, patchfile, backend, store, strip=strip, prefix=prefix, eolmode=eolmode) def _applydiff(ui, fp, patcher, backend, store, strip=1, prefix='', eolmode='strict'): if prefix: prefix = pathutil.canonpath(backend.repo.root, backend.repo.getcwd(), prefix) if prefix != '': prefix += '/' def pstrip(p): return pathtransform(p, strip - 1, prefix)[1] rejects = 0 err = 0 current_file = None for state, values in iterhunks(fp): if state == 'hunk': if not current_file: continue ret = current_file.apply(values) if ret > 0: err = 1 elif state == 'file': if current_file: rejects += current_file.close() current_file = None afile, bfile, first_hunk, gp = values if gp: gp.path = pstrip(gp.path) if gp.oldpath: gp.oldpath = pstrip(gp.oldpath) else: gp = makepatchmeta(backend, afile, bfile, first_hunk, strip, prefix) if gp.op == 'RENAME': backend.unlink(gp.oldpath) if not first_hunk: if gp.op == 'DELETE': backend.unlink(gp.path) continue data, mode = None, None if gp.op in ('RENAME', 'COPY'): data, mode = store.getfile(gp.oldpath)[:2] # FIXME: failing getfile has never been handled here assert data is not None if gp.mode: mode = gp.mode if gp.op == 'ADD': # Added files without content have no hunk and # must be created data = '' if data or mode: if (gp.op in ('ADD', 'RENAME', 'COPY') and backend.exists(gp.path)): raise PatchError(_("cannot create %s: destination " "already exists") % gp.path) backend.setfile(gp.path, data, mode, gp.oldpath) continue try: current_file = patcher(ui, gp, backend, store, eolmode=eolmode) except PatchError as inst: ui.warn(str(inst) + '\n') current_file = None rejects += 1 continue elif state == 'git': for gp in values: path = pstrip(gp.oldpath) data, mode = backend.getfile(path) if data is None: # The error ignored here will trigger a getfile() # error in a place more appropriate for error # handling, and will not interrupt the patching # process. pass else: store.setfile(path, data, mode) else: raise error.Abort(_('unsupported parser state: %s') % state) if current_file: rejects += current_file.close() if rejects: return -1 return err def _externalpatch(ui, repo, patcher, patchname, strip, files, similarity): """use to apply to the working directory. returns whether patch was applied with fuzz factor.""" fuzz = False args = [] cwd = repo.root if cwd: args.append('-d %s' % util.shellquote(cwd)) fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip, util.shellquote(patchname))) try: for line in fp: line = line.rstrip() ui.note(line + '\n') if line.startswith('patching file '): pf = util.parsepatchoutput(line) printed_file = False files.add(pf) elif line.find('with fuzz') >= 0: fuzz = True if not printed_file: ui.warn(pf + '\n') printed_file = True ui.warn(line + '\n') elif line.find('saving rejects to file') >= 0: ui.warn(line + '\n') elif line.find('FAILED') >= 0: if not printed_file: ui.warn(pf + '\n') printed_file = True ui.warn(line + '\n') finally: if files: scmutil.marktouched(repo, files, similarity) code = fp.close() if code: raise PatchError(_("patch command failed: %s") % util.explainexit(code)[0]) return fuzz def patchbackend(ui, backend, patchobj, strip, prefix, files=None, eolmode='strict'): if files is None: files = set() if eolmode is None: eolmode = ui.config('patch', 'eol', 'strict') if eolmode.lower() not in eolmodes: raise error.Abort(_('unsupported line endings type: %s') % eolmode) eolmode = eolmode.lower() store = filestore() try: fp = open(patchobj, 'rb') except TypeError: fp = patchobj try: ret = applydiff(ui, fp, backend, store, strip=strip, prefix=prefix, eolmode=eolmode) finally: if fp != patchobj: fp.close() files.update(backend.close()) store.close() if ret < 0: raise PatchError(_('patch failed to apply')) return ret > 0 def internalpatch(ui, repo, patchobj, strip, prefix='', files=None, eolmode='strict', similarity=0): """use builtin patch to apply to the working directory. returns whether patch was applied with fuzz factor.""" backend = workingbackend(ui, repo, similarity) return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode) def patchrepo(ui, repo, ctx, store, patchobj, strip, prefix, files=None, eolmode='strict'): backend = repobackend(ui, repo, ctx, store) return patchbackend(ui, backend, patchobj, strip, prefix, files, eolmode) def patch(ui, repo, patchname, strip=1, prefix='', files=None, eolmode='strict', similarity=0): """Apply to the working directory. 'eolmode' specifies how end of lines should be handled. It can be: - 'strict': inputs are read in binary mode, EOLs are preserved - 'crlf': EOLs are ignored when patching and reset to CRLF - 'lf': EOLs are ignored when patching and reset to LF - None: get it from user settings, default to 'strict' 'eolmode' is ignored when using an external patcher program. Returns whether patch was applied with fuzz factor. """ patcher = ui.config('ui', 'patch') if files is None: files = set() if patcher: return _externalpatch(ui, repo, patcher, patchname, strip, files, similarity) return internalpatch(ui, repo, patchname, strip, prefix, files, eolmode, similarity) def changedfiles(ui, repo, patchpath, strip=1): backend = fsbackend(ui, repo.root) with open(patchpath, 'rb') as fp: changed = set() for state, values in iterhunks(fp): if state == 'file': afile, bfile, first_hunk, gp = values if gp: gp.path = pathtransform(gp.path, strip - 1, '')[1] if gp.oldpath: gp.oldpath = pathtransform(gp.oldpath, strip - 1, '')[1] else: gp = makepatchmeta(backend, afile, bfile, first_hunk, strip, '') changed.add(gp.path) if gp.op == 'RENAME': changed.add(gp.oldpath) elif state not in ('hunk', 'git'): raise error.Abort(_('unsupported parser state: %s') % state) return changed class GitDiffRequired(Exception): pass def diffallopts(ui, opts=None, untrusted=False, section='diff'): '''return diffopts with all features supported and parsed''' return difffeatureopts(ui, opts=opts, untrusted=untrusted, section=section, git=True, whitespace=True, formatchanging=True) diffopts = diffallopts def difffeatureopts(ui, opts=None, untrusted=False, section='diff', git=False, whitespace=False, formatchanging=False): '''return diffopts with only opted-in features parsed Features: - git: git-style diffs - whitespace: whitespace options like ignoreblanklines and ignorews - formatchanging: options that will likely break or cause correctness issues with most diff parsers ''' def get(key, name=None, getter=ui.configbool, forceplain=None): if opts: v = opts.get(key) if v: return v if forceplain is not None and ui.plain(): return forceplain return getter(section, name or key, None, untrusted=untrusted) # core options, expected to be understood by every diff parser buildopts = { 'nodates': get('nodates'), 'showfunc': get('show_function', 'showfunc'), 'context': get('unified', getter=ui.config), } if git: buildopts['git'] = get('git') if whitespace: buildopts['ignorews'] = get('ignore_all_space', 'ignorews') buildopts['ignorewsamount'] = get('ignore_space_change', 'ignorewsamount') buildopts['ignoreblanklines'] = get('ignore_blank_lines', 'ignoreblanklines') if formatchanging: buildopts['text'] = opts and opts.get('text') buildopts['nobinary'] = get('nobinary', forceplain=False) buildopts['noprefix'] = get('noprefix', forceplain=False) return mdiff.diffopts(**buildopts) def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None, losedatafn=None, prefix='', relroot=''): '''yields diff of changes to files between two nodes, or node and working directory. if node1 is None, use first dirstate parent instead. if node2 is None, compare node1 with working directory. losedatafn(**kwarg) is a callable run when opts.upgrade=True and every time some change cannot be represented with the current patch format. Return False to upgrade to git patch format, True to accept the loss or raise an exception to abort the diff. It is called with the name of current file being diffed as 'fn'. If set to None, patches will always be upgraded to git format when necessary. prefix is a filename prefix that is prepended to all filenames on display (used for subrepos). relroot, if not empty, must be normalized with a trailing /. Any match patterns that fall outside it will be ignored.''' if opts is None: opts = mdiff.defaultopts if not node1 and not node2: node1 = repo.dirstate.p1() def lrugetfilectx(): cache = {} order = collections.deque() def getfilectx(f, ctx): fctx = ctx.filectx(f, filelog=cache.get(f)) if f not in cache: if len(cache) > 20: del cache[order.popleft()] cache[f] = fctx.filelog() else: order.remove(f) order.append(f) return fctx return getfilectx getfilectx = lrugetfilectx() ctx1 = repo[node1] ctx2 = repo[node2] relfiltered = False if relroot != '' and match.always(): # as a special case, create a new matcher with just the relroot pats = [relroot] match = scmutil.match(ctx2, pats, default='path') relfiltered = True if not changes: changes = repo.status(ctx1, ctx2, match=match) modified, added, removed = changes[:3] if not modified and not added and not removed: return [] if repo.ui.debugflag: hexfunc = hex else: hexfunc = short revs = [hexfunc(node) for node in [ctx1.node(), ctx2.node()] if node] copy = {} if opts.git or opts.upgrade: copy = copies.pathcopies(ctx1, ctx2, match=match) if relroot is not None: if not relfiltered: # XXX this would ideally be done in the matcher, but that is # generally meant to 'or' patterns, not 'and' them. In this case we # need to 'and' all the patterns from the matcher with relroot. def filterrel(l): return [f for f in l if f.startswith(relroot)] modified = filterrel(modified) added = filterrel(added) removed = filterrel(removed) relfiltered = True # filter out copies where either side isn't inside the relative root copy = dict(((dst, src) for (dst, src) in copy.iteritems() if dst.startswith(relroot) and src.startswith(relroot))) modifiedset = set(modified) addedset = set(added) removedset = set(removed) for f in modified: if f not in ctx1: # Fix up added, since merged-in additions appear as # modifications during merges modifiedset.remove(f) addedset.add(f) for f in removed: if f not in ctx1: # Merged-in additions that are then removed are reported as removed. # They are not in ctx1, so We don't want to show them in the diff. removedset.remove(f) modified = sorted(modifiedset) added = sorted(addedset) removed = sorted(removedset) for dst, src in copy.items(): if src not in ctx1: # Files merged in during a merge and then copied/renamed are # reported as copies. We want to show them in the diff as additions. del copy[dst] def difffn(opts, losedata): return trydiff(repo, revs, ctx1, ctx2, modified, added, removed, copy, getfilectx, opts, losedata, prefix, relroot) if opts.upgrade and not opts.git: try: def losedata(fn): if not losedatafn or not losedatafn(fn=fn): raise GitDiffRequired # Buffer the whole output until we are sure it can be generated return list(difffn(opts.copy(git=False), losedata)) except GitDiffRequired: return difffn(opts.copy(git=True), None) else: return difffn(opts, None) def difflabel(func, *args, **kw): '''yields 2-tuples of (output, label) based on the output of func()''' headprefixes = [('diff', 'diff.diffline'), ('copy', 'diff.extended'), ('rename', 'diff.extended'), ('old', 'diff.extended'), ('new', 'diff.extended'), ('deleted', 'diff.extended'), ('---', 'diff.file_a'), ('+++', 'diff.file_b')] textprefixes = [('@', 'diff.hunk'), ('-', 'diff.deleted'), ('+', 'diff.inserted')] head = False for chunk in func(*args, **kw): lines = chunk.split('\n') for i, line in enumerate(lines): if i != 0: yield ('\n', '') if head: if line.startswith('@'): head = False else: if line and line[0] not in ' +-@\\': head = True stripline = line diffline = False if not head and line and line[0] in '+-': # highlight tabs and trailing whitespace, but only in # changed lines stripline = line.rstrip() diffline = True prefixes = textprefixes if head: prefixes = headprefixes for prefix, label in prefixes: if stripline.startswith(prefix): if diffline: for token in tabsplitter.findall(stripline): if '\t' == token[0]: yield (token, 'diff.tab') else: yield (token, label) else: yield (stripline, label) break else: yield (line, '') if line != stripline: yield (line[len(stripline):], 'diff.trailingwhitespace') def diffui(*args, **kw): '''like diff(), but yields 2-tuples of (output, label) for ui.write()''' return difflabel(diff, *args, **kw) def _filepairs(modified, added, removed, copy, opts): '''generates tuples (f1, f2, copyop), where f1 is the name of the file before and f2 is the the name after. For added files, f1 will be None, and for removed files, f2 will be None. copyop may be set to None, 'copy' or 'rename' (the latter two only if opts.git is set).''' gone = set() copyto = dict([(v, k) for k, v in copy.items()]) addedset, removedset = set(added), set(removed) for f in sorted(modified + added + removed): copyop = None f1, f2 = f, f if f in addedset: f1 = None if f in copy: if opts.git: f1 = copy[f] if f1 in removedset and f1 not in gone: copyop = 'rename' gone.add(f1) else: copyop = 'copy' elif f in removedset: f2 = None if opts.git: # have we already reported a copy above? if (f in copyto and copyto[f] in addedset and copy[copyto[f]] == f): continue yield f1, f2, copyop def trydiff(repo, revs, ctx1, ctx2, modified, added, removed, copy, getfilectx, opts, losedatafn, prefix, relroot): '''given input data, generate a diff and yield it in blocks If generating a diff would lose data like flags or binary data and losedatafn is not None, it will be called. relroot is removed and prefix is added to every path in the diff output. If relroot is not empty, this function expects every path in modified, added, removed and copy to start with it.''' def gitindex(text): if not text: text = "" l = len(text) s = util.sha1('blob %d\0' % l) s.update(text) return s.hexdigest() if opts.noprefix: aprefix = bprefix = '' else: aprefix = 'a/' bprefix = 'b/' def diffline(f, revs): revinfo = ' '.join(["-r %s" % rev for rev in revs]) return 'diff %s %s' % (revinfo, f) date1 = util.datestr(ctx1.date()) date2 = util.datestr(ctx2.date()) gitmode = {'l': '120000', 'x': '100755', '': '100644'} if relroot != '' and (repo.ui.configbool('devel', 'all') or repo.ui.configbool('devel', 'check-relroot')): for f in modified + added + removed + copy.keys() + copy.values(): if f is not None and not f.startswith(relroot): raise AssertionError( "file %s doesn't start with relroot %s" % (f, relroot)) for f1, f2, copyop in _filepairs(modified, added, removed, copy, opts): content1 = None content2 = None flag1 = None flag2 = None if f1: content1 = getfilectx(f1, ctx1).data() if opts.git or losedatafn: flag1 = ctx1.flags(f1) if f2: content2 = getfilectx(f2, ctx2).data() if opts.git or losedatafn: flag2 = ctx2.flags(f2) binary = False if opts.git or losedatafn: binary = util.binary(content1) or util.binary(content2) if losedatafn and not opts.git: if (binary or # copy/rename f2 in copy or # empty file creation (not f1 and not content2) or # empty file deletion (not content1 and not f2) or # create with flags (not f1 and flag2) or # change flags (f1 and f2 and flag1 != flag2)): losedatafn(f2 or f1) path1 = f1 or f2 path2 = f2 or f1 path1 = posixpath.join(prefix, path1[len(relroot):]) path2 = posixpath.join(prefix, path2[len(relroot):]) header = [] if opts.git: header.append('diff --git %s%s %s%s' % (aprefix, path1, bprefix, path2)) if not f1: # added header.append('new file mode %s' % gitmode[flag2]) elif not f2: # removed header.append('deleted file mode %s' % gitmode[flag1]) else: # modified/copied/renamed mode1, mode2 = gitmode[flag1], gitmode[flag2] if mode1 != mode2: header.append('old mode %s' % mode1) header.append('new mode %s' % mode2) if copyop is not None: header.append('%s from %s' % (copyop, path1)) header.append('%s to %s' % (copyop, path2)) elif revs and not repo.ui.quiet: header.append(diffline(path1, revs)) if binary and opts.git and not opts.nobinary: text = mdiff.b85diff(content1, content2) if text: header.append('index %s..%s' % (gitindex(content1), gitindex(content2))) else: text = mdiff.unidiff(content1, date1, content2, date2, path1, path2, opts=opts) if header and (text or len(header) > 1): yield '\n'.join(header) + '\n' if text: yield text def diffstatsum(stats): maxfile, maxtotal, addtotal, removetotal, binary = 0, 0, 0, 0, False for f, a, r, b in stats: maxfile = max(maxfile, encoding.colwidth(f)) maxtotal = max(maxtotal, a + r) addtotal += a removetotal += r binary = binary or b return maxfile, maxtotal, addtotal, removetotal, binary def diffstatdata(lines): diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$') results = [] filename, adds, removes, isbinary = None, 0, 0, False def addresult(): if filename: results.append((filename, adds, removes, isbinary)) for line in lines: if line.startswith('diff'): addresult() # set numbers to 0 anyway when starting new file adds, removes, isbinary = 0, 0, False if line.startswith('diff --git a/'): filename = gitre.search(line).group(2) elif line.startswith('diff -r'): # format: "diff -r ... -r ... filename" filename = diffre.search(line).group(1) elif line.startswith('+') and not line.startswith('+++ '): adds += 1 elif line.startswith('-') and not line.startswith('--- '): removes += 1 elif (line.startswith('GIT binary patch') or line.startswith('Binary file')): isbinary = True addresult() return results def diffstat(lines, width=80, git=False): output = [] stats = diffstatdata(lines) maxname, maxtotal, totaladds, totalremoves, hasbinary = diffstatsum(stats) countwidth = len(str(maxtotal)) if hasbinary and countwidth < 3: countwidth = 3 graphwidth = width - countwidth - maxname - 6 if graphwidth < 10: graphwidth = 10 def scale(i): if maxtotal <= graphwidth: return i # If diffstat runs out of room it doesn't print anything, # which isn't very useful, so always print at least one + or - # if there were at least some changes. return max(i * graphwidth // maxtotal, int(bool(i))) for filename, adds, removes, isbinary in stats: if isbinary: count = 'Bin' else: count = adds + removes pluses = '+' * scale(adds) minuses = '-' * scale(removes) output.append(' %s%s | %*s %s%s\n' % (filename, ' ' * (maxname - encoding.colwidth(filename)), countwidth, count, pluses, minuses)) if stats: output.append(_(' %d files changed, %d insertions(+), ' '%d deletions(-)\n') % (len(stats), totaladds, totalremoves)) return ''.join(output) def diffstatui(*args, **kw): '''like diffstat(), but yields 2-tuples of (output, label) for ui.write() ''' for line in diffstat(*args, **kw).splitlines(): if line and line[-1] in '+-': name, graph = line.rsplit(' ', 1) yield (name + ' ', '') m = re.search(r'\++', graph) if m: yield (m.group(0), 'diffstat.inserted') m = re.search(r'-+', graph) if m: yield (m.group(0), 'diffstat.deleted') else: yield (line, '') yield ('\n', '') mercurial-3.7.3/mercurial/simplemerge.py0000644000175000017500000003256012676531525020003 0ustar mpmmpm00000000000000# Copyright (C) 2004, 2005 Canonical Ltd # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see . # mbp: "you know that thing where cvs gives you conflict markers?" # s: "i hate that." from __future__ import absolute_import import os import sys from .i18n import _ from . import ( error, mdiff, scmutil, util, ) class CantReprocessAndShowBase(Exception): pass def intersect(ra, rb): """Given two ranges return the range where they intersect or None. >>> intersect((0, 10), (0, 6)) (0, 6) >>> intersect((0, 10), (5, 15)) (5, 10) >>> intersect((0, 10), (10, 15)) >>> intersect((0, 9), (10, 15)) >>> intersect((0, 9), (7, 15)) (7, 9) """ assert ra[0] <= ra[1] assert rb[0] <= rb[1] sa = max(ra[0], rb[0]) sb = min(ra[1], rb[1]) if sa < sb: return sa, sb else: return None def compare_range(a, astart, aend, b, bstart, bend): """Compare a[astart:aend] == b[bstart:bend], without slicing. """ if (aend - astart) != (bend - bstart): return False for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)): if a[ia] != b[ib]: return False else: return True class Merge3Text(object): """3-way merge of texts. Given strings BASE, OTHER, THIS, tries to produce a combined text incorporating the changes from both BASE->OTHER and BASE->THIS.""" def __init__(self, basetext, atext, btext, base=None, a=None, b=None): self.basetext = basetext self.atext = atext self.btext = btext if base is None: base = mdiff.splitnewlines(basetext) if a is None: a = mdiff.splitnewlines(atext) if b is None: b = mdiff.splitnewlines(btext) self.base = base self.a = a self.b = b def merge_lines(self, name_a=None, name_b=None, name_base=None, start_marker='<<<<<<<', mid_marker='=======', end_marker='>>>>>>>', base_marker=None, localorother=None): """Return merge in cvs-like form. """ self.conflicts = False newline = '\n' if len(self.a) > 0: if self.a[0].endswith('\r\n'): newline = '\r\n' elif self.a[0].endswith('\r'): newline = '\r' if name_a and start_marker: start_marker = start_marker + ' ' + name_a if name_b and end_marker: end_marker = end_marker + ' ' + name_b if name_base and base_marker: base_marker = base_marker + ' ' + name_base merge_regions = self.merge_regions() for t in merge_regions: what = t[0] if what == 'unchanged': for i in range(t[1], t[2]): yield self.base[i] elif what == 'a' or what == 'same': for i in range(t[1], t[2]): yield self.a[i] elif what == 'b': for i in range(t[1], t[2]): yield self.b[i] elif what == 'conflict': if localorother == 'local': for i in range(t[3], t[4]): yield self.a[i] elif localorother == 'other': for i in range(t[5], t[6]): yield self.b[i] else: self.conflicts = True if start_marker is not None: yield start_marker + newline for i in range(t[3], t[4]): yield self.a[i] if base_marker is not None: yield base_marker + newline for i in range(t[1], t[2]): yield self.base[i] if mid_marker is not None: yield mid_marker + newline for i in range(t[5], t[6]): yield self.b[i] if end_marker is not None: yield end_marker + newline else: raise ValueError(what) def merge_groups(self): """Yield sequence of line groups. Each one is a tuple: 'unchanged', lines Lines unchanged from base 'a', lines Lines taken from a 'same', lines Lines taken from a (and equal to b) 'b', lines Lines taken from b 'conflict', base_lines, a_lines, b_lines Lines from base were changed to either a or b and conflict. """ for t in self.merge_regions(): what = t[0] if what == 'unchanged': yield what, self.base[t[1]:t[2]] elif what == 'a' or what == 'same': yield what, self.a[t[1]:t[2]] elif what == 'b': yield what, self.b[t[1]:t[2]] elif what == 'conflict': yield (what, self.base[t[1]:t[2]], self.a[t[3]:t[4]], self.b[t[5]:t[6]]) else: raise ValueError(what) def merge_regions(self): """Return sequences of matching and conflicting regions. This returns tuples, where the first value says what kind we have: 'unchanged', start, end Take a region of base[start:end] 'same', astart, aend b and a are different from base but give the same result 'a', start, end Non-clashing insertion from a[start:end] Method is as follows: The two sequences align only on regions which match the base and both descendants. These are found by doing a two-way diff of each one against the base, and then finding the intersections between those regions. These "sync regions" are by definition unchanged in both and easily dealt with. The regions in between can be in any of three cases: conflicted, or changed on only one side. """ # section a[0:ia] has been disposed of, etc iz = ia = ib = 0 for region in self.find_sync_regions(): zmatch, zend, amatch, aend, bmatch, bend = region #print 'match base [%d:%d]' % (zmatch, zend) matchlen = zend - zmatch assert matchlen >= 0 assert matchlen == (aend - amatch) assert matchlen == (bend - bmatch) len_a = amatch - ia len_b = bmatch - ib len_base = zmatch - iz assert len_a >= 0 assert len_b >= 0 assert len_base >= 0 #print 'unmatched a=%d, b=%d' % (len_a, len_b) if len_a or len_b: # try to avoid actually slicing the lists equal_a = compare_range(self.a, ia, amatch, self.base, iz, zmatch) equal_b = compare_range(self.b, ib, bmatch, self.base, iz, zmatch) same = compare_range(self.a, ia, amatch, self.b, ib, bmatch) if same: yield 'same', ia, amatch elif equal_a and not equal_b: yield 'b', ib, bmatch elif equal_b and not equal_a: yield 'a', ia, amatch elif not equal_a and not equal_b: yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch else: raise AssertionError("can't handle a=b=base but unmatched") ia = amatch ib = bmatch iz = zmatch # if the same part of the base was deleted on both sides # that's OK, we can just skip it. if matchlen > 0: assert ia == amatch assert ib == bmatch assert iz == zmatch yield 'unchanged', zmatch, zend iz = zend ia = aend ib = bend def find_sync_regions(self): """Return a list of sync regions, where both descendants match the base. Generates a list of (base1, base2, a1, a2, b1, b2). There is always a zero-length sync region at the end of all the files. """ ia = ib = 0 amatches = mdiff.get_matching_blocks(self.basetext, self.atext) bmatches = mdiff.get_matching_blocks(self.basetext, self.btext) len_a = len(amatches) len_b = len(bmatches) sl = [] while ia < len_a and ib < len_b: abase, amatch, alen = amatches[ia] bbase, bmatch, blen = bmatches[ib] # there is an unconflicted block at i; how long does it # extend? until whichever one ends earlier. i = intersect((abase, abase + alen), (bbase, bbase + blen)) if i: intbase = i[0] intend = i[1] intlen = intend - intbase # found a match of base[i[0], i[1]]; this may be less than # the region that matches in either one assert intlen <= alen assert intlen <= blen assert abase <= intbase assert bbase <= intbase asub = amatch + (intbase - abase) bsub = bmatch + (intbase - bbase) aend = asub + intlen bend = bsub + intlen assert self.base[intbase:intend] == self.a[asub:aend], \ (self.base[intbase:intend], self.a[asub:aend]) assert self.base[intbase:intend] == self.b[bsub:bend] sl.append((intbase, intend, asub, aend, bsub, bend)) # advance whichever one ends first in the base text if (abase + alen) < (bbase + blen): ia += 1 else: ib += 1 intbase = len(self.base) abase = len(self.a) bbase = len(self.b) sl.append((intbase, intbase, abase, abase, bbase, bbase)) return sl def find_unconflicted(self): """Return a list of ranges in base that are not conflicted.""" am = mdiff.get_matching_blocks(self.basetext, self.atext) bm = mdiff.get_matching_blocks(self.basetext, self.btext) unc = [] while am and bm: # there is an unconflicted block at i; how long does it # extend? until whichever one ends earlier. a1 = am[0][0] a2 = a1 + am[0][2] b1 = bm[0][0] b2 = b1 + bm[0][2] i = intersect((a1, a2), (b1, b2)) if i: unc.append(i) if a2 < b2: del am[0] else: del bm[0] return unc def simplemerge(ui, local, base, other, **opts): def readfile(filename): f = open(filename, "rb") text = f.read() f.close() if util.binary(text): msg = _("%s looks like a binary file.") % filename if not opts.get('quiet'): ui.warn(_('warning: %s\n') % msg) if not opts.get('text'): raise error.Abort(msg) return text mode = opts.get('mode','merge') if mode == 'union': name_a = None name_b = None name_base = None else: name_a = local name_b = other name_base = None labels = opts.get('label', []) if len(labels) > 0: name_a = labels[0] if len(labels) > 1: name_b = labels[1] if len(labels) > 2: name_base = labels[2] if len(labels) > 3: raise error.Abort(_("can only specify three labels.")) try: localtext = readfile(local) basetext = readfile(base) othertext = readfile(other) except error.Abort: return 1 local = os.path.realpath(local) if not opts.get('print'): opener = scmutil.opener(os.path.dirname(local)) out = opener(os.path.basename(local), "w", atomictemp=True) else: out = sys.stdout m3 = Merge3Text(basetext, localtext, othertext) extrakwargs = {"localorother": opts.get("localorother", None)} if mode == 'union': extrakwargs['start_marker'] = None extrakwargs['mid_marker'] = None extrakwargs['end_marker'] = None elif name_base is not None: extrakwargs['base_marker'] = '|||||||' extrakwargs['name_base'] = name_base for line in m3.merge_lines(name_a=name_a, name_b=name_b, **extrakwargs): out.write(line) if not opts.get('print'): out.close() if m3.conflicts and not mode == 'union': return 1 mercurial-3.7.3/mercurial/exewrapper.c0000644000175000017500000001056012676531525017442 0ustar mpmmpm00000000000000/* exewrapper.c - wrapper for calling a python script on Windows Copyright 2012 Adrian Buehlmann and others This software may be used and distributed according to the terms of the GNU General Public License version 2 or any later version. */ #include #include #include "hgpythonlib.h" #ifdef __GNUC__ int strcat_s(char *d, size_t n, const char *s) { return !strncat(d, s, n); } int strcpy_s(char *d, size_t n, const char *s) { return !strncpy(d, s, n); } #endif static char pyscript[MAX_PATH + 10]; static char pyhome[MAX_PATH + 10]; static char envpyhome[MAX_PATH + 10]; static char pydllfile[MAX_PATH + 10]; int main(int argc, char *argv[]) { char *p; int ret; int i; int n; char **pyargv; WIN32_FIND_DATA fdata; HANDLE hfind; const char *err; HMODULE pydll; void (__cdecl *Py_SetPythonHome)(char *home); int (__cdecl *Py_Main)(int argc, char *argv[]); if (GetModuleFileName(NULL, pyscript, sizeof(pyscript)) == 0) { err = "GetModuleFileName failed"; goto bail; } p = strrchr(pyscript, '.'); if (p == NULL) { err = "malformed module filename"; goto bail; } *p = 0; /* cut trailing ".exe" */ strcpy_s(pyhome, sizeof(pyhome), pyscript); hfind = FindFirstFile(pyscript, &fdata); if (hfind != INVALID_HANDLE_VALUE) { /* pyscript exists, close handle */ FindClose(hfind); } else { /* file pyscript isn't there, take exe.py */ strcat_s(pyscript, sizeof(pyscript), "exe.py"); } pydll = NULL; /* We first check, that environment variable PYTHONHOME is *not* set. This just mimicks the behavior of the regular python.exe, which uses PYTHONHOME to find its installation directory (if it has been set). Note: Users of HackableMercurial are expected to *not* set PYTHONHOME! */ if (GetEnvironmentVariable("PYTHONHOME", envpyhome, sizeof(envpyhome)) == 0) { /* Environment var PYTHONHOME is *not* set. Let's see if we are running inside a HackableMercurial. */ p = strrchr(pyhome, '\\'); if (p == NULL) { err = "can't find backslash in module filename"; goto bail; } *p = 0; /* cut at directory */ /* check for private Python of HackableMercurial */ strcat_s(pyhome, sizeof(pyhome), "\\hg-python"); hfind = FindFirstFile(pyhome, &fdata); if (hfind != INVALID_HANDLE_VALUE) { /* path pyhome exists, let's use it */ FindClose(hfind); strcpy_s(pydllfile, sizeof(pydllfile), pyhome); strcat_s(pydllfile, sizeof(pydllfile), "\\" HGPYTHONLIB); pydll = LoadLibrary(pydllfile); if (pydll == NULL) { err = "failed to load private Python DLL " HGPYTHONLIB ".dll"; goto bail; } Py_SetPythonHome = (void*)GetProcAddress(pydll, "Py_SetPythonHome"); if (Py_SetPythonHome == NULL) { err = "failed to get Py_SetPythonHome"; goto bail; } Py_SetPythonHome(pyhome); } } if (pydll == NULL) { pydll = LoadLibrary(HGPYTHONLIB); if (pydll == NULL) { err = "failed to load Python DLL " HGPYTHONLIB ".dll"; goto bail; } } Py_Main = (void*)GetProcAddress(pydll, "Py_Main"); if (Py_Main == NULL) { err = "failed to get Py_Main"; goto bail; } /* Only add the pyscript to the args, if it's not already there. It may already be there, if the script spawned a child process of itself, in the same way as it got called, that is, with the pyscript already in place. So we optionally accept the pyscript as the first argument (argv[1]), letting our exe taking the role of the python interpreter. */ if (argc >= 2 && strcmp(argv[1], pyscript) == 0) { /* pyscript is already in the args, so there is no need to copy the args and we can directly call the python interpreter with the original args. */ return Py_Main(argc, argv); } /* Start assembling the args for the Python interpreter call. We put the name of our exe (argv[0]) in the position where the python.exe canonically is, and insert the pyscript next. */ pyargv = malloc((argc + 5) * sizeof(char*)); if (pyargv == NULL) { err = "not enough memory"; goto bail; } n = 0; pyargv[n++] = argv[0]; pyargv[n++] = pyscript; /* copy remaining args from the command line */ for (i = 1; i < argc; i++) pyargv[n++] = argv[i]; /* argv[argc] is guaranteed to be NULL, so we forward that guarantee */ pyargv[n] = NULL; ret = Py_Main(n, pyargv); /* The Python interpreter call */ free(pyargv); return ret; bail: fprintf(stderr, "abort: %s\n", err); return 255; } mercurial-3.7.3/mercurial/crecord.py0000644000175000017500000017561612676531525017125 0ustar mpmmpm00000000000000# stuff related specifically to patch manipulation / parsing # # Copyright 2008 Mark Edgington # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. # # This code is based on the Mark Edgington's crecord extension. # (Itself based on Bryan O'Sullivan's record extension.) from __future__ import absolute_import import cStringIO import locale import os import re import signal import struct import sys import tempfile from .i18n import _ from . import ( encoding, error, patch as patchmod, util, ) # This is required for ncurses to display non-ASCII characters in default user # locale encoding correctly. --immerrr locale.setlocale(locale.LC_ALL, '') try: import curses import fcntl import termios curses.error fcntl.ioctl termios.TIOCGWINSZ except ImportError: # I have no idea if wcurses works with crecord... try: import wcurses as curses curses.error except ImportError: # wcurses is not shipped on Windows by default, or python is not # compiled with curses curses = False def checkcurses(ui): """Return True if the user wants to use curses This method returns True if curses is found (and that python is built with it) and that the user has the correct flag for the ui. """ return curses and ui.configbool('experimental', 'crecord', False) _origstdout = sys.__stdout__ # used by gethw() class patchnode(object): """abstract class for patch graph nodes (i.e. patchroot, header, hunk, hunkline) """ def firstchild(self): raise NotImplementedError("method must be implemented by subclass") def lastchild(self): raise NotImplementedError("method must be implemented by subclass") def allchildren(self): "Return a list of all of the direct children of this node" raise NotImplementedError("method must be implemented by subclass") def nextsibling(self): """ Return the closest next item of the same type where there are no items of different types between the current item and this closest item. If no such item exists, return None. """ raise NotImplementedError("method must be implemented by subclass") def prevsibling(self): """ Return the closest previous item of the same type where there are no items of different types between the current item and this closest item. If no such item exists, return None. """ raise NotImplementedError("method must be implemented by subclass") def parentitem(self): raise NotImplementedError("method must be implemented by subclass") def nextitem(self, constrainlevel=True, skipfolded=True): """ If constrainLevel == True, return the closest next item of the same type where there are no items of different types between the current item and this closest item. If constrainLevel == False, then try to return the next item closest to this item, regardless of item's type (header, hunk, or HunkLine). If skipFolded == True, and the current item is folded, then the child items that are hidden due to folding will be skipped when determining the next item. If it is not possible to get the next item, return None. """ try: itemfolded = self.folded except AttributeError: itemfolded = False if constrainlevel: return self.nextsibling() elif skipfolded and itemfolded: nextitem = self.nextsibling() if nextitem is None: try: nextitem = self.parentitem().nextsibling() except AttributeError: nextitem = None return nextitem else: # try child item = self.firstchild() if item is not None: return item # else try next sibling item = self.nextsibling() if item is not None: return item try: # else try parent's next sibling item = self.parentitem().nextsibling() if item is not None: return item # else return grandparent's next sibling (or None) return self.parentitem().parentitem().nextsibling() except AttributeError: # parent and/or grandparent was None return None def previtem(self, constrainlevel=True, skipfolded=True): """ If constrainLevel == True, return the closest previous item of the same type where there are no items of different types between the current item and this closest item. If constrainLevel == False, then try to return the previous item closest to this item, regardless of item's type (header, hunk, or HunkLine). If skipFolded == True, and the current item is folded, then the items that are hidden due to folding will be skipped when determining the next item. If it is not possible to get the previous item, return None. """ if constrainlevel: return self.prevsibling() else: # try previous sibling's last child's last child, # else try previous sibling's last child, else try previous sibling prevsibling = self.prevsibling() if prevsibling is not None: prevsiblinglastchild = prevsibling.lastchild() if ((prevsiblinglastchild is not None) and not prevsibling.folded): prevsiblinglclc = prevsiblinglastchild.lastchild() if ((prevsiblinglclc is not None) and not prevsiblinglastchild.folded): return prevsiblinglclc else: return prevsiblinglastchild else: return prevsibling # try parent (or None) return self.parentitem() class patch(patchnode, list): # todo: rename patchroot """ list of header objects representing the patch. """ def __init__(self, headerlist): self.extend(headerlist) # add parent patch object reference to each header for header in self: header.patch = self class uiheader(patchnode): """patch header xxx shouldn't we move this to mercurial/patch.py ? """ def __init__(self, header): self.nonuiheader = header # flag to indicate whether to apply this chunk self.applied = True # flag which only affects the status display indicating if a node's # children are partially applied (i.e. some applied, some not). self.partial = False # flag to indicate whether to display as folded/unfolded to user self.folded = True # list of all headers in patch self.patch = None # flag is False if this header was ever unfolded from initial state self.neverunfolded = True self.hunks = [uihunk(h, self) for h in self.hunks] def prettystr(self): x = cStringIO.StringIO() self.pretty(x) return x.getvalue() def nextsibling(self): numheadersinpatch = len(self.patch) indexofthisheader = self.patch.index(self) if indexofthisheader < numheadersinpatch - 1: nextheader = self.patch[indexofthisheader + 1] return nextheader else: return None def prevsibling(self): indexofthisheader = self.patch.index(self) if indexofthisheader > 0: previousheader = self.patch[indexofthisheader - 1] return previousheader else: return None def parentitem(self): """ there is no 'real' parent item of a header that can be selected, so return None. """ return None def firstchild(self): "return the first child of this item, if one exists. otherwise None." if len(self.hunks) > 0: return self.hunks[0] else: return None def lastchild(self): "return the last child of this item, if one exists. otherwise None." if len(self.hunks) > 0: return self.hunks[-1] else: return None def allchildren(self): "return a list of all of the direct children of this node" return self.hunks def __getattr__(self, name): return getattr(self.nonuiheader, name) class uihunkline(patchnode): "represents a changed line in a hunk" def __init__(self, linetext, hunk): self.linetext = linetext self.applied = True # the parent hunk to which this line belongs self.hunk = hunk # folding lines currently is not used/needed, but this flag is needed # in the previtem method. self.folded = False def prettystr(self): return self.linetext def nextsibling(self): numlinesinhunk = len(self.hunk.changedlines) indexofthisline = self.hunk.changedlines.index(self) if (indexofthisline < numlinesinhunk - 1): nextline = self.hunk.changedlines[indexofthisline + 1] return nextline else: return None def prevsibling(self): indexofthisline = self.hunk.changedlines.index(self) if indexofthisline > 0: previousline = self.hunk.changedlines[indexofthisline - 1] return previousline else: return None def parentitem(self): "return the parent to the current item" return self.hunk def firstchild(self): "return the first child of this item, if one exists. otherwise None." # hunk-lines don't have children return None def lastchild(self): "return the last child of this item, if one exists. otherwise None." # hunk-lines don't have children return None class uihunk(patchnode): """ui patch hunk, wraps a hunk and keep track of ui behavior """ maxcontext = 3 def __init__(self, hunk, header): self._hunk = hunk self.changedlines = [uihunkline(line, self) for line in hunk.hunk] self.header = header # used at end for detecting how many removed lines were un-applied self.originalremoved = self.removed # flag to indicate whether to display as folded/unfolded to user self.folded = True # flag to indicate whether to apply this chunk self.applied = True # flag which only affects the status display indicating if a node's # children are partially applied (i.e. some applied, some not). self.partial = False def nextsibling(self): numhunksinheader = len(self.header.hunks) indexofthishunk = self.header.hunks.index(self) if (indexofthishunk < numhunksinheader - 1): nexthunk = self.header.hunks[indexofthishunk + 1] return nexthunk else: return None def prevsibling(self): indexofthishunk = self.header.hunks.index(self) if indexofthishunk > 0: previoushunk = self.header.hunks[indexofthishunk - 1] return previoushunk else: return None def parentitem(self): "return the parent to the current item" return self.header def firstchild(self): "return the first child of this item, if one exists. otherwise None." if len(self.changedlines) > 0: return self.changedlines[0] else: return None def lastchild(self): "return the last child of this item, if one exists. otherwise None." if len(self.changedlines) > 0: return self.changedlines[-1] else: return None def allchildren(self): "return a list of all of the direct children of this node" return self.changedlines def countchanges(self): """changedlines -> (n+,n-)""" add = len([l for l in self.changedlines if l.applied and l.prettystr()[0] == '+']) rem = len([l for l in self.changedlines if l.applied and l.prettystr()[0] == '-']) return add, rem def getfromtoline(self): # calculate the number of removed lines converted to context lines removedconvertedtocontext = self.originalremoved - self.removed contextlen = (len(self.before) + len(self.after) + removedconvertedtocontext) if self.after and self.after[-1] == '\\ no newline at end of file\n': contextlen -= 1 fromlen = contextlen + self.removed tolen = contextlen + self.added # diffutils manual, section "2.2.2.2 detailed description of unified # format": "an empty hunk is considered to end at the line that # precedes the hunk." # # so, if either of hunks is empty, decrease its line start. --immerrr # but only do this if fromline > 0, to avoid having, e.g fromline=-1. fromline, toline = self.fromline, self.toline if fromline != 0: if fromlen == 0: fromline -= 1 if tolen == 0: toline -= 1 fromtoline = '@@ -%d,%d +%d,%d @@%s\n' % ( fromline, fromlen, toline, tolen, self.proc and (' ' + self.proc)) return fromtoline def write(self, fp): # updated self.added/removed, which are used by getfromtoline() self.added, self.removed = self.countchanges() fp.write(self.getfromtoline()) hunklinelist = [] # add the following to the list: (1) all applied lines, and # (2) all unapplied removal lines (convert these to context lines) for changedline in self.changedlines: changedlinestr = changedline.prettystr() if changedline.applied: hunklinelist.append(changedlinestr) elif changedlinestr[0] == "-": hunklinelist.append(" " + changedlinestr[1:]) fp.write(''.join(self.before + hunklinelist + self.after)) pretty = write def prettystr(self): x = cStringIO.StringIO() self.pretty(x) return x.getvalue() def __getattr__(self, name): return getattr(self._hunk, name) def __repr__(self): return '' % (self.filename(), self.fromline) def filterpatch(ui, chunks, chunkselector, operation=None): """interactively filter patch chunks into applied-only chunks""" if operation is None: operation = _('confirm') chunks = list(chunks) # convert chunks list into structure suitable for displaying/modifying # with curses. create a list of headers only. headers = [c for c in chunks if isinstance(c, patchmod.header)] # if there are no changed files if len(headers) == 0: return [], {} uiheaders = [uiheader(h) for h in headers] # let user choose headers/hunks/lines, and mark their applied flags # accordingly ret = chunkselector(ui, uiheaders) appliedhunklist = [] for hdr in uiheaders: if (hdr.applied and (hdr.special() or len([h for h in hdr.hunks if h.applied]) > 0)): appliedhunklist.append(hdr) fixoffset = 0 for hnk in hdr.hunks: if hnk.applied: appliedhunklist.append(hnk) # adjust the 'to'-line offset of the hunk to be correct # after de-activating some of the other hunks for this file if fixoffset: #hnk = copy.copy(hnk) # necessary?? hnk.toline += fixoffset else: fixoffset += hnk.removed - hnk.added return (appliedhunklist, ret) def gethw(): """ magically get the current height and width of the window (without initscr) this is a rip-off of a rip-off - taken from the bpython code. it is useful / necessary because otherwise curses.initscr() must be called, which can leave the terminal in a nasty state after exiting. """ h, w = struct.unpack( "hhhh", fcntl.ioctl(_origstdout, termios.TIOCGWINSZ, "\000"*8))[0:2] return h, w def chunkselector(ui, headerlist): """ curses interface to get selection of chunks, and mark the applied flags of the chosen chunks. """ ui.write(_('starting interactive selection\n')) chunkselector = curseschunkselector(headerlist, ui) f = signal.getsignal(signal.SIGTSTP) curses.wrapper(chunkselector.main) if chunkselector.initerr is not None: raise error.Abort(chunkselector.initerr) # ncurses does not restore signal handler for SIGTSTP signal.signal(signal.SIGTSTP, f) return chunkselector.opts def testdecorator(testfn, f): def u(*args, **kwargs): return f(testfn, *args, **kwargs) return u def testchunkselector(testfn, ui, headerlist): """ test interface to get selection of chunks, and mark the applied flags of the chosen chunks. """ chunkselector = curseschunkselector(headerlist, ui) if testfn and os.path.exists(testfn): testf = open(testfn) testcommands = map(lambda x: x.rstrip('\n'), testf.readlines()) testf.close() while True: if chunkselector.handlekeypressed(testcommands.pop(0), test=True): break return chunkselector.opts class curseschunkselector(object): def __init__(self, headerlist, ui): # put the headers into a patch object self.headerlist = patch(headerlist) self.ui = ui self.opts = {} self.errorstr = None # list of all chunks self.chunklist = [] for h in headerlist: self.chunklist.append(h) self.chunklist.extend(h.hunks) # dictionary mapping (fgcolor, bgcolor) pairs to the # corresponding curses color-pair value. self.colorpairs = {} # maps custom nicknames of color-pairs to curses color-pair values self.colorpairnames = {} # the currently selected header, hunk, or hunk-line self.currentselecteditem = self.headerlist[0] # updated when printing out patch-display -- the 'lines' here are the # line positions *in the pad*, not on the screen. self.selecteditemstartline = 0 self.selecteditemendline = None # define indentation levels self.headerindentnumchars = 0 self.hunkindentnumchars = 3 self.hunklineindentnumchars = 6 # the first line of the pad to print to the screen self.firstlineofpadtoprint = 0 # keeps track of the number of lines in the pad self.numpadlines = None self.numstatuslines = 2 # keep a running count of the number of lines printed to the pad # (used for determining when the selected item begins/ends) self.linesprintedtopadsofar = 0 # the first line of the pad which is visible on the screen self.firstlineofpadtoprint = 0 # stores optional text for a commit comment provided by the user self.commenttext = "" # if the last 'toggle all' command caused all changes to be applied self.waslasttoggleallapplied = True def uparrowevent(self): """ try to select the previous item to the current item that has the most-indented level. for example, if a hunk is selected, try to select the last hunkline of the hunk prior to the selected hunk. or, if the first hunkline of a hunk is currently selected, then select the hunk itself. if the currently selected item is already at the top of the screen, scroll the screen down to show the new-selected item. """ currentitem = self.currentselecteditem nextitem = currentitem.previtem(constrainlevel=False) if nextitem is None: # if no parent item (i.e. currentitem is the first header), then # no change... nextitem = currentitem self.currentselecteditem = nextitem def uparrowshiftevent(self): """ select (if possible) the previous item on the same level as the currently selected item. otherwise, select (if possible) the parent-item of the currently selected item. if the currently selected item is already at the top of the screen, scroll the screen down to show the new-selected item. """ currentitem = self.currentselecteditem nextitem = currentitem.previtem() # if there's no previous item on this level, try choosing the parent if nextitem is None: nextitem = currentitem.parentitem() if nextitem is None: # if no parent item (i.e. currentitem is the first header), then # no change... nextitem = currentitem self.currentselecteditem = nextitem def downarrowevent(self): """ try to select the next item to the current item that has the most-indented level. for example, if a hunk is selected, select the first hunkline of the selected hunk. or, if the last hunkline of a hunk is currently selected, then select the next hunk, if one exists, or if not, the next header if one exists. if the currently selected item is already at the bottom of the screen, scroll the screen up to show the new-selected item. """ #self.startprintline += 1 #debug currentitem = self.currentselecteditem nextitem = currentitem.nextitem(constrainlevel=False) # if there's no next item, keep the selection as-is if nextitem is None: nextitem = currentitem self.currentselecteditem = nextitem def downarrowshiftevent(self): """ if the cursor is already at the bottom chunk, scroll the screen up and move the cursor-position to the subsequent chunk. otherwise, only move the cursor position down one chunk. """ # todo: update docstring currentitem = self.currentselecteditem nextitem = currentitem.nextitem() # if there's no previous item on this level, try choosing the parent's # nextitem. if nextitem is None: try: nextitem = currentitem.parentitem().nextitem() except AttributeError: # parentitem returned None, so nextitem() can't be called nextitem = None if nextitem is None: # if no next item on parent-level, then no change... nextitem = currentitem self.currentselecteditem = nextitem def rightarrowevent(self): """ select (if possible) the first of this item's child-items. """ currentitem = self.currentselecteditem nextitem = currentitem.firstchild() # turn off folding if we want to show a child-item if currentitem.folded: self.togglefolded(currentitem) if nextitem is None: # if no next item on parent-level, then no change... nextitem = currentitem self.currentselecteditem = nextitem def leftarrowevent(self): """ if the current item can be folded (i.e. it is an unfolded header or hunk), then fold it. otherwise try select (if possible) the parent of this item. """ currentitem = self.currentselecteditem # try to fold the item if not isinstance(currentitem, uihunkline): if not currentitem.folded: self.togglefolded(item=currentitem) return # if it can't be folded, try to select the parent item nextitem = currentitem.parentitem() if nextitem is None: # if no item on parent-level, then no change... nextitem = currentitem if not nextitem.folded: self.togglefolded(item=nextitem) self.currentselecteditem = nextitem def leftarrowshiftevent(self): """ select the header of the current item (or fold current item if the current item is already a header). """ currentitem = self.currentselecteditem if isinstance(currentitem, uiheader): if not currentitem.folded: self.togglefolded(item=currentitem) return # select the parent item recursively until we're at a header while True: nextitem = currentitem.parentitem() if nextitem is None: break else: currentitem = nextitem self.currentselecteditem = currentitem def updatescroll(self): "scroll the screen to fully show the currently-selected" selstart = self.selecteditemstartline selend = self.selecteditemendline #selnumlines = selend - selstart padstart = self.firstlineofpadtoprint padend = padstart + self.yscreensize - self.numstatuslines - 1 # 'buffered' pad start/end values which scroll with a certain # top/bottom context margin padstartbuffered = padstart + 3 padendbuffered = padend - 3 if selend > padendbuffered: self.scrolllines(selend - padendbuffered) elif selstart < padstartbuffered: # negative values scroll in pgup direction self.scrolllines(selstart - padstartbuffered) def scrolllines(self, numlines): "scroll the screen up (down) by numlines when numlines >0 (<0)." self.firstlineofpadtoprint += numlines if self.firstlineofpadtoprint < 0: self.firstlineofpadtoprint = 0 if self.firstlineofpadtoprint > self.numpadlines - 1: self.firstlineofpadtoprint = self.numpadlines - 1 def toggleapply(self, item=None): """ toggle the applied flag of the specified item. if no item is specified, toggle the flag of the currently selected item. """ if item is None: item = self.currentselecteditem item.applied = not item.applied if isinstance(item, uiheader): item.partial = False if item.applied: # apply all its hunks for hnk in item.hunks: hnk.applied = True # apply all their hunklines for hunkline in hnk.changedlines: hunkline.applied = True else: # un-apply all its hunks for hnk in item.hunks: hnk.applied = False hnk.partial = False # un-apply all their hunklines for hunkline in hnk.changedlines: hunkline.applied = False elif isinstance(item, uihunk): item.partial = False # apply all it's hunklines for hunkline in item.changedlines: hunkline.applied = item.applied siblingappliedstatus = [hnk.applied for hnk in item.header.hunks] allsiblingsapplied = not (False in siblingappliedstatus) nosiblingsapplied = not (True in siblingappliedstatus) siblingspartialstatus = [hnk.partial for hnk in item.header.hunks] somesiblingspartial = (True in siblingspartialstatus) #cases where applied or partial should be removed from header # if no 'sibling' hunks are applied (including this hunk) if nosiblingsapplied: if not item.header.special(): item.header.applied = False item.header.partial = False else: # some/all parent siblings are applied item.header.applied = True item.header.partial = (somesiblingspartial or not allsiblingsapplied) elif isinstance(item, uihunkline): siblingappliedstatus = [ln.applied for ln in item.hunk.changedlines] allsiblingsapplied = not (False in siblingappliedstatus) nosiblingsapplied = not (True in siblingappliedstatus) # if no 'sibling' lines are applied if nosiblingsapplied: item.hunk.applied = False item.hunk.partial = False elif allsiblingsapplied: item.hunk.applied = True item.hunk.partial = False else: # some siblings applied item.hunk.applied = True item.hunk.partial = True parentsiblingsapplied = [hnk.applied for hnk in item.hunk.header.hunks] noparentsiblingsapplied = not (True in parentsiblingsapplied) allparentsiblingsapplied = not (False in parentsiblingsapplied) parentsiblingspartial = [hnk.partial for hnk in item.hunk.header.hunks] someparentsiblingspartial = (True in parentsiblingspartial) # if all parent hunks are not applied, un-apply header if noparentsiblingsapplied: if not item.hunk.header.special(): item.hunk.header.applied = False item.hunk.header.partial = False # set the applied and partial status of the header if needed else: # some/all parent siblings are applied item.hunk.header.applied = True item.hunk.header.partial = (someparentsiblingspartial or not allparentsiblingsapplied) def toggleall(self): "toggle the applied flag of all items." if self.waslasttoggleallapplied: # then unapply them this time for item in self.headerlist: if item.applied: self.toggleapply(item) else: for item in self.headerlist: if not item.applied: self.toggleapply(item) self.waslasttoggleallapplied = not self.waslasttoggleallapplied def togglefolded(self, item=None, foldparent=False): "toggle folded flag of specified item (defaults to currently selected)" if item is None: item = self.currentselecteditem if foldparent or (isinstance(item, uiheader) and item.neverunfolded): if not isinstance(item, uiheader): # we need to select the parent item in this case self.currentselecteditem = item = item.parentitem() elif item.neverunfolded: item.neverunfolded = False # also fold any foldable children of the parent/current item if isinstance(item, uiheader): # the original or 'new' item for child in item.allchildren(): child.folded = not item.folded if isinstance(item, (uiheader, uihunk)): item.folded = not item.folded def alignstring(self, instr, window): """ add whitespace to the end of a string in order to make it fill the screen in the x direction. the current cursor position is taken into account when making this calculation. the string can span multiple lines. """ y, xstart = window.getyx() width = self.xscreensize # turn tabs into spaces instr = instr.expandtabs(4) strwidth = encoding.colwidth(instr) numspaces = (width - ((strwidth + xstart) % width) - 1) return instr + " " * numspaces + "\n" def printstring(self, window, text, fgcolor=None, bgcolor=None, pair=None, pairname=None, attrlist=None, towin=True, align=True, showwhtspc=False): """ print the string, text, with the specified colors and attributes, to the specified curses window object. the foreground and background colors are of the form curses.color_xxxx, where xxxx is one of: [black, blue, cyan, green, magenta, red, white, yellow]. if pairname is provided, a color pair will be looked up in the self.colorpairnames dictionary. attrlist is a list containing text attributes in the form of curses.a_xxxx, where xxxx can be: [bold, dim, normal, standout, underline]. if align == True, whitespace is added to the printed string such that the string stretches to the right border of the window. if showwhtspc == True, trailing whitespace of a string is highlighted. """ # preprocess the text, converting tabs to spaces text = text.expandtabs(4) # strip \n, and convert control characters to ^[char] representation text = re.sub(r'[\x00-\x08\x0a-\x1f]', lambda m:'^' + chr(ord(m.group()) + 64), text.strip('\n')) if pair is not None: colorpair = pair elif pairname is not None: colorpair = self.colorpairnames[pairname] else: if fgcolor is None: fgcolor = -1 if bgcolor is None: bgcolor = -1 if (fgcolor, bgcolor) in self.colorpairs: colorpair = self.colorpairs[(fgcolor, bgcolor)] else: colorpair = self.getcolorpair(fgcolor, bgcolor) # add attributes if possible if attrlist is None: attrlist = [] if colorpair < 256: # then it is safe to apply all attributes for textattr in attrlist: colorpair |= textattr else: # just apply a select few (safe?) attributes for textattr in (curses.A_UNDERLINE, curses.A_BOLD): if textattr in attrlist: colorpair |= textattr y, xstart = self.chunkpad.getyx() t = "" # variable for counting lines printed # if requested, show trailing whitespace if showwhtspc: origlen = len(text) text = text.rstrip(' \n') # tabs have already been expanded strippedlen = len(text) numtrailingspaces = origlen - strippedlen if towin: window.addstr(text, colorpair) t += text if showwhtspc: wscolorpair = colorpair | curses.A_REVERSE if towin: for i in range(numtrailingspaces): window.addch(curses.ACS_CKBOARD, wscolorpair) t += " " * numtrailingspaces if align: if towin: extrawhitespace = self.alignstring("", window) window.addstr(extrawhitespace, colorpair) else: # need to use t, since the x position hasn't incremented extrawhitespace = self.alignstring(t, window) t += extrawhitespace # is reset to 0 at the beginning of printitem() linesprinted = (xstart + len(t)) / self.xscreensize self.linesprintedtopadsofar += linesprinted return t def updatescreen(self): self.statuswin.erase() self.chunkpad.erase() printstring = self.printstring # print out the status lines at the top try: if self.errorstr is not None: printstring(self.statuswin, self.errorstr, pairname='legend') printstring(self.statuswin, 'Press any key to continue', pairname='legend') self.statuswin.refresh() return line1 = ("SELECT CHUNKS: (j/k/up/dn/pgup/pgdn) move cursor; " "(space/A) toggle hunk/all; (e)dit hunk;") line2 = (" (f)old/unfold; (c)onfirm applied; (q)uit; (?) help " "| [X]=hunk applied **=folded, toggle [a]mend mode") printstring(self.statuswin, util.ellipsis(line1, self.xscreensize - 1), pairname="legend") printstring(self.statuswin, util.ellipsis(line2, self.xscreensize - 1), pairname="legend") except curses.error: pass # print out the patch in the remaining part of the window try: self.printitem() self.updatescroll() self.chunkpad.refresh(self.firstlineofpadtoprint, 0, self.numstatuslines, 0, self.yscreensize + 1 - self.numstatuslines, self.xscreensize) except curses.error: pass # refresh([pminrow, pmincol, sminrow, smincol, smaxrow, smaxcol]) self.statuswin.refresh() def getstatusprefixstring(self, item): """ create a string to prefix a line with which indicates whether 'item' is applied and/or folded. """ # create checkbox string if item.applied: if not isinstance(item, uihunkline) and item.partial: checkbox = "[~]" else: checkbox = "[x]" else: checkbox = "[ ]" try: if item.folded: checkbox += "**" if isinstance(item, uiheader): # one of "m", "a", or "d" (modified, added, deleted) filestatus = item.changetype checkbox += filestatus + " " else: checkbox += " " if isinstance(item, uiheader): # add two more spaces for headers checkbox += " " except AttributeError: # not foldable checkbox += " " return checkbox def printheader(self, header, selected=False, towin=True, ignorefolding=False): """ print the header to the pad. if countlines is True, don't print anything, but just count the number of lines which would be printed. """ outstr = "" text = header.prettystr() chunkindex = self.chunklist.index(header) if chunkindex != 0 and not header.folded: # add separating line before headers outstr += self.printstring(self.chunkpad, '_' * self.xscreensize, towin=towin, align=False) # select color-pair based on if the header is selected colorpair = self.getcolorpair(name=selected and "selected" or "normal", attrlist=[curses.A_BOLD]) # print out each line of the chunk, expanding it to screen width # number of characters to indent lines on this level by indentnumchars = 0 checkbox = self.getstatusprefixstring(header) if not header.folded or ignorefolding: textlist = text.split("\n") linestr = checkbox + textlist[0] else: linestr = checkbox + header.filename() outstr += self.printstring(self.chunkpad, linestr, pair=colorpair, towin=towin) if not header.folded or ignorefolding: if len(textlist) > 1: for line in textlist[1:]: linestr = " "*(indentnumchars + len(checkbox)) + line outstr += self.printstring(self.chunkpad, linestr, pair=colorpair, towin=towin) return outstr def printhunklinesbefore(self, hunk, selected=False, towin=True, ignorefolding=False): "includes start/end line indicator" outstr = "" # where hunk is in list of siblings hunkindex = hunk.header.hunks.index(hunk) if hunkindex != 0: # add separating line before headers outstr += self.printstring(self.chunkpad, ' '*self.xscreensize, towin=towin, align=False) colorpair = self.getcolorpair(name=selected and "selected" or "normal", attrlist=[curses.A_BOLD]) # print out from-to line with checkbox checkbox = self.getstatusprefixstring(hunk) lineprefix = " "*self.hunkindentnumchars + checkbox frtoline = " " + hunk.getfromtoline().strip("\n") outstr += self.printstring(self.chunkpad, lineprefix, towin=towin, align=False) # add uncolored checkbox/indent outstr += self.printstring(self.chunkpad, frtoline, pair=colorpair, towin=towin) if hunk.folded and not ignorefolding: # skip remainder of output return outstr # print out lines of the chunk preceeding changed-lines for line in hunk.before: linestr = " "*(self.hunklineindentnumchars + len(checkbox)) + line outstr += self.printstring(self.chunkpad, linestr, towin=towin) return outstr def printhunklinesafter(self, hunk, towin=True, ignorefolding=False): outstr = "" if hunk.folded and not ignorefolding: return outstr # a bit superfluous, but to avoid hard-coding indent amount checkbox = self.getstatusprefixstring(hunk) for line in hunk.after: linestr = " "*(self.hunklineindentnumchars + len(checkbox)) + line outstr += self.printstring(self.chunkpad, linestr, towin=towin) return outstr def printhunkchangedline(self, hunkline, selected=False, towin=True): outstr = "" checkbox = self.getstatusprefixstring(hunkline) linestr = hunkline.prettystr().strip("\n") # select color-pair based on whether line is an addition/removal if selected: colorpair = self.getcolorpair(name="selected") elif linestr.startswith("+"): colorpair = self.getcolorpair(name="addition") elif linestr.startswith("-"): colorpair = self.getcolorpair(name="deletion") elif linestr.startswith("\\"): colorpair = self.getcolorpair(name="normal") lineprefix = " "*self.hunklineindentnumchars + checkbox outstr += self.printstring(self.chunkpad, lineprefix, towin=towin, align=False) # add uncolored checkbox/indent outstr += self.printstring(self.chunkpad, linestr, pair=colorpair, towin=towin, showwhtspc=True) return outstr def printitem(self, item=None, ignorefolding=False, recursechildren=True, towin=True): """ use __printitem() to print the the specified item.applied. if item is not specified, then print the entire patch. (hiding folded elements, etc. -- see __printitem() docstring) """ if item is None: item = self.headerlist if recursechildren: self.linesprintedtopadsofar = 0 outstr = [] self.__printitem(item, ignorefolding, recursechildren, outstr, towin=towin) return ''.join(outstr) def outofdisplayedarea(self): y, _ = self.chunkpad.getyx() # cursor location # * 2 here works but an optimization would be the max number of # consecutive non selectable lines # i.e the max number of context line for any hunk in the patch miny = min(0, self.firstlineofpadtoprint - self.yscreensize) maxy = self.firstlineofpadtoprint + self.yscreensize * 2 return y < miny or y > maxy def handleselection(self, item, recursechildren): selected = (item is self.currentselecteditem) if selected and recursechildren: # assumes line numbering starting from line 0 self.selecteditemstartline = self.linesprintedtopadsofar selecteditemlines = self.getnumlinesdisplayed(item, recursechildren=False) self.selecteditemendline = (self.selecteditemstartline + selecteditemlines - 1) return selected def __printitem(self, item, ignorefolding, recursechildren, outstr, towin=True): """ recursive method for printing out patch/header/hunk/hunk-line data to screen. also returns a string with all of the content of the displayed patch (not including coloring, etc.). if ignorefolding is True, then folded items are printed out. if recursechildren is False, then only print the item without its child items. """ if towin and self.outofdisplayedarea(): return selected = self.handleselection(item, recursechildren) # patch object is a list of headers if isinstance(item, patch): if recursechildren: for hdr in item: self.__printitem(hdr, ignorefolding, recursechildren, outstr, towin) # todo: eliminate all isinstance() calls if isinstance(item, uiheader): outstr.append(self.printheader(item, selected, towin=towin, ignorefolding=ignorefolding)) if recursechildren: for hnk in item.hunks: self.__printitem(hnk, ignorefolding, recursechildren, outstr, towin) elif (isinstance(item, uihunk) and ((not item.header.folded) or ignorefolding)): # print the hunk data which comes before the changed-lines outstr.append(self.printhunklinesbefore(item, selected, towin=towin, ignorefolding=ignorefolding)) if recursechildren: for l in item.changedlines: self.__printitem(l, ignorefolding, recursechildren, outstr, towin) outstr.append(self.printhunklinesafter(item, towin=towin, ignorefolding=ignorefolding)) elif (isinstance(item, uihunkline) and ((not item.hunk.folded) or ignorefolding)): outstr.append(self.printhunkchangedline(item, selected, towin=towin)) return outstr def getnumlinesdisplayed(self, item=None, ignorefolding=False, recursechildren=True): """ return the number of lines which would be displayed if the item were to be printed to the display. the item will not be printed to the display (pad). if no item is given, assume the entire patch. if ignorefolding is True, folded items will be unfolded when counting the number of lines. """ # temporarily disable printing to windows by printstring patchdisplaystring = self.printitem(item, ignorefolding, recursechildren, towin=False) numlines = len(patchdisplaystring) / self.xscreensize return numlines def sigwinchhandler(self, n, frame): "handle window resizing" try: curses.endwin() self.yscreensize, self.xscreensize = gethw() self.statuswin.resize(self.numstatuslines, self.xscreensize) self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize) # todo: try to resize commit message window if possible except curses.error: pass def getcolorpair(self, fgcolor=None, bgcolor=None, name=None, attrlist=None): """ get a curses color pair, adding it to self.colorpairs if it is not already defined. an optional string, name, can be passed as a shortcut for referring to the color-pair. by default, if no arguments are specified, the white foreground / black background color-pair is returned. it is expected that this function will be used exclusively for initializing color pairs, and not curses.init_pair(). attrlist is used to 'flavor' the returned color-pair. this information is not stored in self.colorpairs. it contains attribute values like curses.A_BOLD. """ if (name is not None) and name in self.colorpairnames: # then get the associated color pair and return it colorpair = self.colorpairnames[name] else: if fgcolor is None: fgcolor = -1 if bgcolor is None: bgcolor = -1 if (fgcolor, bgcolor) in self.colorpairs: colorpair = self.colorpairs[(fgcolor, bgcolor)] else: pairindex = len(self.colorpairs) + 1 curses.init_pair(pairindex, fgcolor, bgcolor) colorpair = self.colorpairs[(fgcolor, bgcolor)] = ( curses.color_pair(pairindex)) if name is not None: self.colorpairnames[name] = curses.color_pair(pairindex) # add attributes if possible if attrlist is None: attrlist = [] if colorpair < 256: # then it is safe to apply all attributes for textattr in attrlist: colorpair |= textattr else: # just apply a select few (safe?) attributes for textattrib in (curses.A_UNDERLINE, curses.A_BOLD): if textattrib in attrlist: colorpair |= textattrib return colorpair def initcolorpair(self, *args, **kwargs): "same as getcolorpair." self.getcolorpair(*args, **kwargs) def helpwindow(self): "print a help window to the screen. exit after any keypress." helptext = """ [press any key to return to the patch-display] crecord allows you to interactively choose among the changes you have made, and confirm only those changes you select for further processing by the command you are running (commit/shelve/revert), after confirming the selected changes, the unselected changes are still present in your working copy, so you can use crecord multiple times to split large changes into smaller changesets. the following are valid keystrokes: [space] : (un-)select item ([~]/[x] = partly/fully applied) A : (un-)select all items up/down-arrow [k/j] : go to previous/next unfolded item pgup/pgdn [K/J] : go to previous/next item of same type right/left-arrow [l/h] : go to child item / parent item shift-left-arrow [H] : go to parent header / fold selected header f : fold / unfold item, hiding/revealing its children F : fold / unfold parent item and all of its ancestors m : edit / resume editing the commit message e : edit the currently selected hunk a : toggle amend mode (hg rev >= 2.2), only with commit -i c : confirm selected changes r : review/edit and confirm selected changes q : quit without confirming (no changes will be made) ? : help (what you're currently reading)""" helpwin = curses.newwin(self.yscreensize, 0, 0, 0) helplines = helptext.split("\n") helplines = helplines + [" "]*( self.yscreensize - self.numstatuslines - len(helplines) - 1) try: for line in helplines: self.printstring(helpwin, line, pairname="legend") except curses.error: pass helpwin.refresh() try: helpwin.getkey() except curses.error: pass def confirmationwindow(self, windowtext): "display an informational window, then wait for and return a keypress." confirmwin = curses.newwin(self.yscreensize, 0, 0, 0) try: lines = windowtext.split("\n") for line in lines: self.printstring(confirmwin, line, pairname="selected") except curses.error: pass self.stdscr.refresh() confirmwin.refresh() try: response = chr(self.stdscr.getch()) except ValueError: response = None return response def confirmcommit(self, review=False): """ask for 'y' to be pressed to confirm selected. return True if confirmed.""" if review: confirmtext = ( """if you answer yes to the following, the your currently chosen patch chunks will be loaded into an editor. you may modify the patch from the editor, and save the changes if you wish to change the patch. otherwise, you can just close the editor without saving to accept the current patch as-is. note: don't add/remove lines unless you also modify the range information. failing to follow this rule will result in the commit aborting. are you sure you want to review/edit and confirm the selected changes [yn]? """) else: confirmtext = ( "are you sure you want to confirm the selected changes [yn]? ") response = self.confirmationwindow(confirmtext) if response is None: response = "n" if response.lower().startswith("y"): return True else: return False def toggleamend(self, opts, test): """Toggle the amend flag. When the amend flag is set, a commit will modify the most recently committed changeset, instead of creating a new changeset. Otherwise, a new changeset will be created (the normal commit behavior). """ try: ver = float(util.version()[:3]) except ValueError: ver = 1 if ver < 2.19: msg = ("The amend option is unavailable with hg versions < 2.2\n\n" "Press any key to continue.") elif opts.get('amend') is None: opts['amend'] = True msg = ("Amend option is turned on -- commiting the currently " "selected changes will not create a new changeset, but " "instead update the most recently committed changeset.\n\n" "Press any key to continue.") elif opts.get('amend') is True: opts['amend'] = None msg = ("Amend option is turned off -- commiting the currently " "selected changes will create a new changeset.\n\n" "Press any key to continue.") if not test: self.confirmationwindow(msg) def recenterdisplayedarea(self): """ once we scrolled with pg up pg down we can be pointing outside of the display zone. we print the patch with towin=False to compute the location of the selected item even though it is outside of the displayed zone and then update the scroll. """ self.printitem(towin=False) self.updatescroll() def toggleedit(self, item=None, test=False): """ edit the currently selected chunk """ def updateui(self): self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1 self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize) self.updatescroll() self.stdscr.refresh() self.statuswin.refresh() self.stdscr.keypad(1) def editpatchwitheditor(self, chunk): if chunk is None: self.ui.write(_('cannot edit patch for whole file')) self.ui.write("\n") return None if chunk.header.binary(): self.ui.write(_('cannot edit patch for binary file')) self.ui.write("\n") return None # patch comment based on the git one (based on comment at end of # https://mercurial-scm.org/wiki/recordextension) phelp = '---' + _(""" to remove '-' lines, make them ' ' lines (context). to remove '+' lines, delete them. lines starting with # will be removed from the patch. if the patch applies cleanly, the edited hunk will immediately be added to the record list. if it does not apply cleanly, a rejects file will be generated: you can use that when you try again. if all lines of the hunk are removed, then the edit is aborted and the hunk is left unchanged. """) (patchfd, patchfn) = tempfile.mkstemp(prefix="hg-editor-", suffix=".diff", text=True) ncpatchfp = None try: # write the initial patch f = os.fdopen(patchfd, "w") chunk.header.write(f) chunk.write(f) f.write('\n'.join(['# ' + i for i in phelp.splitlines()])) f.close() # start the editor and wait for it to complete editor = self.ui.geteditor() ret = self.ui.system("%s \"%s\"" % (editor, patchfn), environ={'hguser': self.ui.username()}) if ret != 0: self.errorstr = "Editor exited with status %d" % ret return None # remove comment lines patchfp = open(patchfn) ncpatchfp = cStringIO.StringIO() for line in patchfp: if not line.startswith('#'): ncpatchfp.write(line) patchfp.close() ncpatchfp.seek(0) newpatches = patchmod.parsepatch(ncpatchfp) finally: os.unlink(patchfn) del ncpatchfp return newpatches if item is None: item = self.currentselecteditem if isinstance(item, uiheader): return if isinstance(item, uihunkline): item = item.parentitem() if not isinstance(item, uihunk): return # To go back to that hunk or its replacement at the end of the edit itemindex = item.parentitem().hunks.index(item) beforeadded, beforeremoved = item.added, item.removed newpatches = editpatchwitheditor(self, item) if newpatches is None: if not test: updateui(self) return header = item.header editedhunkindex = header.hunks.index(item) hunksbefore = header.hunks[:editedhunkindex] hunksafter = header.hunks[editedhunkindex + 1:] newpatchheader = newpatches[0] newhunks = [uihunk(h, header) for h in newpatchheader.hunks] newadded = sum([h.added for h in newhunks]) newremoved = sum([h.removed for h in newhunks]) offset = (newadded - beforeadded) - (newremoved - beforeremoved) for h in hunksafter: h.toline += offset for h in newhunks: h.folded = False header.hunks = hunksbefore + newhunks + hunksafter if self.emptypatch(): header.hunks = hunksbefore + [item] + hunksafter self.currentselecteditem = header if len(header.hunks) > itemindex: self.currentselecteditem = header.hunks[itemindex] if not test: updateui(self) def emptypatch(self): item = self.headerlist if not item: return True for header in item: if header.hunks: return False return True def handlekeypressed(self, keypressed, test=False): if keypressed in ["k", "KEY_UP"]: self.uparrowevent() if keypressed in ["K", "KEY_PPAGE"]: self.uparrowshiftevent() elif keypressed in ["j", "KEY_DOWN"]: self.downarrowevent() elif keypressed in ["J", "KEY_NPAGE"]: self.downarrowshiftevent() elif keypressed in ["l", "KEY_RIGHT"]: self.rightarrowevent() elif keypressed in ["h", "KEY_LEFT"]: self.leftarrowevent() elif keypressed in ["H", "KEY_SLEFT"]: self.leftarrowshiftevent() elif keypressed in ["q"]: raise error.Abort(_('user quit')) elif keypressed in ['a']: self.toggleamend(self.opts, test) elif keypressed in ["c"]: if self.confirmcommit(): return True elif keypressed in ["r"]: if self.confirmcommit(review=True): return True elif test and keypressed in ['X']: return True elif keypressed in [' '] or (test and keypressed in ["TOGGLE"]): self.toggleapply() elif keypressed in ['A']: self.toggleall() elif keypressed in ['e']: self.toggleedit(test=test) elif keypressed in ["f"]: self.togglefolded() elif keypressed in ["F"]: self.togglefolded(foldparent=True) elif keypressed in ["?"]: self.helpwindow() self.stdscr.clear() self.stdscr.refresh() def main(self, stdscr): """ method to be wrapped by curses.wrapper() for selecting chunks. """ signal.signal(signal.SIGWINCH, self.sigwinchhandler) self.stdscr = stdscr # error during initialization, cannot be printed in the curses # interface, it should be printed by the calling code self.initerr = None self.yscreensize, self.xscreensize = self.stdscr.getmaxyx() curses.start_color() curses.use_default_colors() # available colors: black, blue, cyan, green, magenta, white, yellow # init_pair(color_id, foreground_color, background_color) self.initcolorpair(None, None, name="normal") self.initcolorpair(curses.COLOR_WHITE, curses.COLOR_MAGENTA, name="selected") self.initcolorpair(curses.COLOR_RED, None, name="deletion") self.initcolorpair(curses.COLOR_GREEN, None, name="addition") self.initcolorpair(curses.COLOR_WHITE, curses.COLOR_BLUE, name="legend") # newwin([height, width,] begin_y, begin_x) self.statuswin = curses.newwin(self.numstatuslines, 0, 0, 0) self.statuswin.keypad(1) # interpret arrow-key, etc. esc sequences # figure out how much space to allocate for the chunk-pad which is # used for displaying the patch # stupid hack to prevent getnumlinesdisplayed from failing self.chunkpad = curses.newpad(1, self.xscreensize) # add 1 so to account for last line text reaching end of line self.numpadlines = self.getnumlinesdisplayed(ignorefolding=True) + 1 try: self.chunkpad = curses.newpad(self.numpadlines, self.xscreensize) except curses.error: self.initerr = _('this diff is too large to be displayed') return # initialize selecteitemendline (initial start-line is 0) self.selecteditemendline = self.getnumlinesdisplayed( self.currentselecteditem, recursechildren=False) while True: self.updatescreen() try: keypressed = self.statuswin.getkey() if self.errorstr is not None: self.errorstr = None continue except curses.error: keypressed = "foobar" if self.handlekeypressed(keypressed): break mercurial-3.7.3/mercurial/commands.py0000644000175000017500000077106012676531525017300 0ustar mpmmpm00000000000000# commands.py - command processing for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from node import hex, bin, nullhex, nullid, nullrev, short from lock import release from i18n import _ import os, re, difflib, time, tempfile, errno, shlex import sys, socket import hg, scmutil, util, revlog, copies, error, bookmarks import patch, help, encoding, templatekw, discovery import archival, changegroup, cmdutil, hbisect import sshserver, hgweb import extensions import merge as mergemod import minirst, revset, fileset import dagparser, context, simplemerge, graphmod, copies import random, operator import setdiscovery, treediscovery, dagutil, pvec, localrepo, destutil import phases, obsolete, exchange, bundle2, repair, lock as lockmod import ui as uimod import streamclone import commandserver table = {} command = cmdutil.command(table) # Space delimited list of commands that don't require local repositories. # This should be populated by passing norepo=True into the @command decorator. norepo = '' # Space delimited list of commands that optionally require local repositories. # This should be populated by passing optionalrepo=True into the @command # decorator. optionalrepo = '' # Space delimited list of commands that will examine arguments looking for # a repository. This should be populated by passing inferrepo=True into the # @command decorator. inferrepo = '' # label constants # until 3.5, bookmarks.current was the advertised name, not # bookmarks.active, so we must use both to avoid breaking old # custom styles activebookmarklabel = 'bookmarks.active bookmarks.current' # common command options globalopts = [ ('R', 'repository', '', _('repository root directory or name of overlay bundle file'), _('REPO')), ('', 'cwd', '', _('change working directory'), _('DIR')), ('y', 'noninteractive', None, _('do not prompt, automatically pick the first choice for all prompts')), ('q', 'quiet', None, _('suppress output')), ('v', 'verbose', None, _('enable additional output')), ('', 'config', [], _('set/override config option (use \'section.name=value\')'), _('CONFIG')), ('', 'debug', None, _('enable debugging output')), ('', 'debugger', None, _('start debugger')), ('', 'encoding', encoding.encoding, _('set the charset encoding'), _('ENCODE')), ('', 'encodingmode', encoding.encodingmode, _('set the charset encoding mode'), _('MODE')), ('', 'traceback', None, _('always print a traceback on exception')), ('', 'time', None, _('time how long the command takes')), ('', 'profile', None, _('print command execution profile')), ('', 'version', None, _('output version information and exit')), ('h', 'help', None, _('display help and exit')), ('', 'hidden', False, _('consider hidden changesets')), ] dryrunopts = [('n', 'dry-run', None, _('do not perform actions, just print output'))] remoteopts = [ ('e', 'ssh', '', _('specify ssh command to use'), _('CMD')), ('', 'remotecmd', '', _('specify hg command to run on the remote side'), _('CMD')), ('', 'insecure', None, _('do not verify server certificate (ignoring web.cacerts config)')), ] walkopts = [ ('I', 'include', [], _('include names matching the given patterns'), _('PATTERN')), ('X', 'exclude', [], _('exclude names matching the given patterns'), _('PATTERN')), ] commitopts = [ ('m', 'message', '', _('use text as commit message'), _('TEXT')), ('l', 'logfile', '', _('read commit message from file'), _('FILE')), ] commitopts2 = [ ('d', 'date', '', _('record the specified date as commit date'), _('DATE')), ('u', 'user', '', _('record the specified user as committer'), _('USER')), ] # hidden for now formatteropts = [ ('T', 'template', '', _('display with template (EXPERIMENTAL)'), _('TEMPLATE')), ] templateopts = [ ('', 'style', '', _('display using template map file (DEPRECATED)'), _('STYLE')), ('T', 'template', '', _('display with template'), _('TEMPLATE')), ] logopts = [ ('p', 'patch', None, _('show patch')), ('g', 'git', None, _('use git extended diff format')), ('l', 'limit', '', _('limit number of changes displayed'), _('NUM')), ('M', 'no-merges', None, _('do not show merges')), ('', 'stat', None, _('output diffstat-style summary of changes')), ('G', 'graph', None, _("show the revision DAG")), ] + templateopts diffopts = [ ('a', 'text', None, _('treat all files as text')), ('g', 'git', None, _('use git extended diff format')), ('', 'nodates', None, _('omit dates from diff headers')) ] diffwsopts = [ ('w', 'ignore-all-space', None, _('ignore white space when comparing lines')), ('b', 'ignore-space-change', None, _('ignore changes in the amount of white space')), ('B', 'ignore-blank-lines', None, _('ignore changes whose lines are all blank')), ] diffopts2 = [ ('', 'noprefix', None, _('omit a/ and b/ prefixes from filenames')), ('p', 'show-function', None, _('show which function each change is in')), ('', 'reverse', None, _('produce a diff that undoes the changes')), ] + diffwsopts + [ ('U', 'unified', '', _('number of lines of context to show'), _('NUM')), ('', 'stat', None, _('output diffstat-style summary of changes')), ('', 'root', '', _('produce diffs relative to subdirectory'), _('DIR')), ] mergetoolopts = [ ('t', 'tool', '', _('specify merge tool')), ] similarityopts = [ ('s', 'similarity', '', _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY')) ] subrepoopts = [ ('S', 'subrepos', None, _('recurse into subrepositories')) ] debugrevlogopts = [ ('c', 'changelog', False, _('open changelog')), ('m', 'manifest', False, _('open manifest')), ('', 'dir', False, _('open directory manifest')), ] # Commands start here, listed alphabetically @command('^add', walkopts + subrepoopts + dryrunopts, _('[OPTION]... [FILE]...'), inferrepo=True) def add(ui, repo, *pats, **opts): """add the specified files on the next commit Schedule files to be version controlled and added to the repository. The files will be added to the repository at the next commit. To undo an add before that, see :hg:`forget`. If no names are given, add all files to the repository (except files matching ``.hgignore``). .. container:: verbose Examples: - New (unknown) files are added automatically by :hg:`add`:: $ ls foo.c $ hg status ? foo.c $ hg add adding foo.c $ hg status A foo.c - Specific files to be added can be specified:: $ ls bar.c foo.c $ hg status ? bar.c ? foo.c $ hg add bar.c $ hg status A bar.c ? foo.c Returns 0 if all files are successfully added. """ m = scmutil.match(repo[None], pats, opts) rejected = cmdutil.add(ui, repo, m, "", False, **opts) return rejected and 1 or 0 @command('addremove', similarityopts + subrepoopts + walkopts + dryrunopts, _('[OPTION]... [FILE]...'), inferrepo=True) def addremove(ui, repo, *pats, **opts): """add all new files, delete all missing files Add all new files and remove all missing files from the repository. Unless names are given, new files are ignored if they match any of the patterns in ``.hgignore``. As with add, these changes take effect at the next commit. Use the -s/--similarity option to detect renamed files. This option takes a percentage between 0 (disabled) and 100 (files must be identical) as its parameter. With a parameter greater than 0, this compares every removed file with every added file and records those similar enough as renames. Detecting renamed files this way can be expensive. After using this option, :hg:`status -C` can be used to check which files were identified as moved or renamed. If not specified, -s/--similarity defaults to 100 and only renames of identical files are detected. .. container:: verbose Examples: - A number of files (bar.c and foo.c) are new, while foobar.c has been removed (without using :hg:`remove`) from the repository:: $ ls bar.c foo.c $ hg status ! foobar.c ? bar.c ? foo.c $ hg addremove adding bar.c adding foo.c removing foobar.c $ hg status A bar.c A foo.c R foobar.c - A file foobar.c was moved to foo.c without using :hg:`rename`. Afterwards, it was edited slightly:: $ ls foo.c $ hg status ! foobar.c ? foo.c $ hg addremove --similarity 90 removing foobar.c adding foo.c recording removal of foobar.c as rename to foo.c (94% similar) $ hg status -C A foo.c foobar.c R foobar.c Returns 0 if all files are successfully added. """ try: sim = float(opts.get('similarity') or 100) except ValueError: raise error.Abort(_('similarity must be a number')) if sim < 0 or sim > 100: raise error.Abort(_('similarity must be between 0 and 100')) matcher = scmutil.match(repo[None], pats, opts) return scmutil.addremove(repo, matcher, "", opts, similarity=sim / 100.0) @command('^annotate|blame', [('r', 'rev', '', _('annotate the specified revision'), _('REV')), ('', 'follow', None, _('follow copies/renames and list the filename (DEPRECATED)')), ('', 'no-follow', None, _("don't follow copies and renames")), ('a', 'text', None, _('treat all files as text')), ('u', 'user', None, _('list the author (long with -v)')), ('f', 'file', None, _('list the filename')), ('d', 'date', None, _('list the date (short with -q)')), ('n', 'number', None, _('list the revision number (default)')), ('c', 'changeset', None, _('list the changeset')), ('l', 'line-number', None, _('show line number at the first appearance')) ] + diffwsopts + walkopts + formatteropts, _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'), inferrepo=True) def annotate(ui, repo, *pats, **opts): """show changeset information by line for each file List changes in files, showing the revision id responsible for each line. This command is useful for discovering when a change was made and by whom. If you include --file, --user, or --date, the revision number is suppressed unless you also include --number. Without the -a/--text option, annotate will avoid processing files it detects as binary. With -a, annotate will annotate the file anyway, although the results will probably be neither useful nor desirable. Returns 0 on success. """ if not pats: raise error.Abort(_('at least one filename or pattern is required')) if opts.get('follow'): # --follow is deprecated and now just an alias for -f/--file # to mimic the behavior of Mercurial before version 1.5 opts['file'] = True ctx = scmutil.revsingle(repo, opts.get('rev')) fm = ui.formatter('annotate', opts) if ui.quiet: datefunc = util.shortdate else: datefunc = util.datestr if ctx.rev() is None: def hexfn(node): if node is None: return None else: return fm.hexfunc(node) if opts.get('changeset'): # omit "+" suffix which is appended to node hex def formatrev(rev): if rev is None: return '%d' % ctx.p1().rev() else: return '%d' % rev else: def formatrev(rev): if rev is None: return '%d+' % ctx.p1().rev() else: return '%d ' % rev def formathex(hex): if hex is None: return '%s+' % fm.hexfunc(ctx.p1().node()) else: return '%s ' % hex else: hexfn = fm.hexfunc formatrev = formathex = str opmap = [('user', ' ', lambda x: x[0].user(), ui.shortuser), ('number', ' ', lambda x: x[0].rev(), formatrev), ('changeset', ' ', lambda x: hexfn(x[0].node()), formathex), ('date', ' ', lambda x: x[0].date(), util.cachefunc(datefunc)), ('file', ' ', lambda x: x[0].path(), str), ('line_number', ':', lambda x: x[1], str), ] fieldnamemap = {'number': 'rev', 'changeset': 'node'} if (not opts.get('user') and not opts.get('changeset') and not opts.get('date') and not opts.get('file')): opts['number'] = True linenumber = opts.get('line_number') is not None if linenumber and (not opts.get('changeset')) and (not opts.get('number')): raise error.Abort(_('at least one of -n/-c is required for -l')) if fm: def makefunc(get, fmt): return get else: def makefunc(get, fmt): return lambda x: fmt(get(x)) funcmap = [(makefunc(get, fmt), sep) for op, sep, get, fmt in opmap if opts.get(op)] funcmap[0] = (funcmap[0][0], '') # no separator in front of first column fields = ' '.join(fieldnamemap.get(op, op) for op, sep, get, fmt in opmap if opts.get(op)) def bad(x, y): raise error.Abort("%s: %s" % (x, y)) m = scmutil.match(ctx, pats, opts, badfn=bad) follow = not opts.get('no_follow') diffopts = patch.difffeatureopts(ui, opts, section='annotate', whitespace=True) for abs in ctx.walk(m): fctx = ctx[abs] if not opts.get('text') and util.binary(fctx.data()): fm.plain(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs)) continue lines = fctx.annotate(follow=follow, linenumber=linenumber, diffopts=diffopts) formats = [] pieces = [] for f, sep in funcmap: l = [f(n) for n, dummy in lines] if l: if fm: formats.append(['%s' for x in l]) else: sizes = [encoding.colwidth(x) for x in l] ml = max(sizes) formats.append([sep + ' ' * (ml - w) + '%s' for w in sizes]) pieces.append(l) for f, p, l in zip(zip(*formats), zip(*pieces), lines): fm.startitem() fm.write(fields, "".join(f), *p) fm.write('line', ": %s", l[1]) if lines and not lines[-1][1].endswith('\n'): fm.plain('\n') fm.end() @command('archive', [('', 'no-decode', None, _('do not pass files through decoders')), ('p', 'prefix', '', _('directory prefix for files in archive'), _('PREFIX')), ('r', 'rev', '', _('revision to distribute'), _('REV')), ('t', 'type', '', _('type of distribution to create'), _('TYPE')), ] + subrepoopts + walkopts, _('[OPTION]... DEST')) def archive(ui, repo, dest, **opts): '''create an unversioned archive of a repository revision By default, the revision used is the parent of the working directory; use -r/--rev to specify a different revision. The archive type is automatically detected based on file extension (to override, use -t/--type). .. container:: verbose Examples: - create a zip file containing the 1.0 release:: hg archive -r 1.0 project-1.0.zip - create a tarball excluding .hg files:: hg archive project.tar.gz -X ".hg*" Valid types are: :``files``: a directory full of files (default) :``tar``: tar archive, uncompressed :``tbz2``: tar archive, compressed using bzip2 :``tgz``: tar archive, compressed using gzip :``uzip``: zip archive, uncompressed :``zip``: zip archive, compressed using deflate The exact name of the destination archive or directory is given using a format string; see :hg:`help export` for details. Each member added to an archive file has a directory prefix prepended. Use -p/--prefix to specify a format string for the prefix. The default is the basename of the archive, with suffixes removed. Returns 0 on success. ''' ctx = scmutil.revsingle(repo, opts.get('rev')) if not ctx: raise error.Abort(_('no working directory: please specify a revision')) node = ctx.node() dest = cmdutil.makefilename(repo, dest, node) if os.path.realpath(dest) == repo.root: raise error.Abort(_('repository root cannot be destination')) kind = opts.get('type') or archival.guesskind(dest) or 'files' prefix = opts.get('prefix') if dest == '-': if kind == 'files': raise error.Abort(_('cannot archive plain files to stdout')) dest = cmdutil.makefileobj(repo, dest) if not prefix: prefix = os.path.basename(repo.root) + '-%h' prefix = cmdutil.makefilename(repo, prefix, node) matchfn = scmutil.match(ctx, [], opts) archival.archive(repo, dest, node, kind, not opts.get('no_decode'), matchfn, prefix, subrepos=opts.get('subrepos')) @command('backout', [('', 'merge', None, _('merge with old dirstate parent after backout')), ('', 'commit', None, _('commit if no conflicts were encountered (DEPRECATED)')), ('', 'no-commit', None, _('do not commit')), ('', 'parent', '', _('parent to choose when backing out merge (DEPRECATED)'), _('REV')), ('r', 'rev', '', _('revision to backout'), _('REV')), ('e', 'edit', False, _('invoke editor on commit messages')), ] + mergetoolopts + walkopts + commitopts + commitopts2, _('[OPTION]... [-r] REV')) def backout(ui, repo, node=None, rev=None, **opts): '''reverse effect of earlier changeset Prepare a new changeset with the effect of REV undone in the current working directory. If no conflicts were encountered, it will be committed immediately. If REV is the parent of the working directory, then this new changeset is committed automatically (unless --no-commit is specified). .. note:: :hg:`backout` cannot be used to fix either an unwanted or incorrect merge. .. container:: verbose Examples: - Reverse the effect of the parent of the working directory. This backout will be committed immediately:: hg backout -r . - Reverse the effect of previous bad revision 23:: hg backout -r 23 - Reverse the effect of previous bad revision 23 and leave changes uncommitted:: hg backout -r 23 --no-commit hg commit -m "Backout revision 23" By default, the pending changeset will have one parent, maintaining a linear history. With --merge, the pending changeset will instead have two parents: the old parent of the working directory and a new child of REV that simply undoes REV. Before version 1.7, the behavior without --merge was equivalent to specifying --merge followed by :hg:`update --clean .` to cancel the merge and leave the child of REV as a head to be merged separately. See :hg:`help dates` for a list of formats valid for -d/--date. See :hg:`help revert` for a way to restore files to the state of another revision. Returns 0 on success, 1 if nothing to backout or there are unresolved files. ''' wlock = lock = None try: wlock = repo.wlock() lock = repo.lock() return _dobackout(ui, repo, node, rev, **opts) finally: release(lock, wlock) def _dobackout(ui, repo, node=None, rev=None, **opts): if opts.get('commit') and opts.get('no_commit'): raise error.Abort(_("cannot use --commit with --no-commit")) if opts.get('merge') and opts.get('no_commit'): raise error.Abort(_("cannot use --merge with --no-commit")) if rev and node: raise error.Abort(_("please specify just one revision")) if not rev: rev = node if not rev: raise error.Abort(_("please specify a revision to backout")) date = opts.get('date') if date: opts['date'] = util.parsedate(date) cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) node = scmutil.revsingle(repo, rev).node() op1, op2 = repo.dirstate.parents() if not repo.changelog.isancestor(node, op1): raise error.Abort(_('cannot backout change that is not an ancestor')) p1, p2 = repo.changelog.parents(node) if p1 == nullid: raise error.Abort(_('cannot backout a change with no parents')) if p2 != nullid: if not opts.get('parent'): raise error.Abort(_('cannot backout a merge changeset')) p = repo.lookup(opts['parent']) if p not in (p1, p2): raise error.Abort(_('%s is not a parent of %s') % (short(p), short(node))) parent = p else: if opts.get('parent'): raise error.Abort(_('cannot use --parent on non-merge changeset')) parent = p1 # the backout should appear on the same branch branch = repo.dirstate.branch() bheads = repo.branchheads(branch) rctx = scmutil.revsingle(repo, hex(parent)) if not opts.get('merge') and op1 != node: dsguard = cmdutil.dirstateguard(repo, 'backout') try: ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'backout') stats = mergemod.update(repo, parent, True, True, node, False) repo.setparents(op1, op2) dsguard.close() hg._showstats(repo, stats) if stats[3]: repo.ui.status(_("use 'hg resolve' to retry unresolved " "file merges\n")) return 1 finally: ui.setconfig('ui', 'forcemerge', '', '') lockmod.release(dsguard) else: hg.clean(repo, node, show_stats=False) repo.dirstate.setbranch(branch) cmdutil.revert(ui, repo, rctx, repo.dirstate.parents()) if opts.get('no_commit'): msg = _("changeset %s backed out, " "don't forget to commit.\n") ui.status(msg % short(node)) return 0 def commitfunc(ui, repo, message, match, opts): editform = 'backout' e = cmdutil.getcommiteditor(editform=editform, **opts) if not message: # we don't translate commit messages message = "Backed out changeset %s" % short(node) e = cmdutil.getcommiteditor(edit=True, editform=editform) return repo.commit(message, opts.get('user'), opts.get('date'), match, editor=e) newnode = cmdutil.commit(ui, repo, commitfunc, [], opts) if not newnode: ui.status(_("nothing changed\n")) return 1 cmdutil.commitstatus(repo, newnode, branch, bheads) def nice(node): return '%d:%s' % (repo.changelog.rev(node), short(node)) ui.status(_('changeset %s backs out changeset %s\n') % (nice(repo.changelog.tip()), nice(node))) if opts.get('merge') and op1 != node: hg.clean(repo, op1, show_stats=False) ui.status(_('merging with changeset %s\n') % nice(repo.changelog.tip())) try: ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'backout') return hg.merge(repo, hex(repo.changelog.tip())) finally: ui.setconfig('ui', 'forcemerge', '', '') return 0 @command('bisect', [('r', 'reset', False, _('reset bisect state')), ('g', 'good', False, _('mark changeset good')), ('b', 'bad', False, _('mark changeset bad')), ('s', 'skip', False, _('skip testing changeset')), ('e', 'extend', False, _('extend the bisect range')), ('c', 'command', '', _('use command to check changeset state'), _('CMD')), ('U', 'noupdate', False, _('do not update to target'))], _("[-gbsr] [-U] [-c CMD] [REV]")) def bisect(ui, repo, rev=None, extra=None, command=None, reset=None, good=None, bad=None, skip=None, extend=None, noupdate=None): """subdivision search of changesets This command helps to find changesets which introduce problems. To use, mark the earliest changeset you know exhibits the problem as bad, then mark the latest changeset which is free from the problem as good. Bisect will update your working directory to a revision for testing (unless the -U/--noupdate option is specified). Once you have performed tests, mark the working directory as good or bad, and bisect will either update to another candidate changeset or announce that it has found the bad revision. As a shortcut, you can also use the revision argument to mark a revision as good or bad without checking it out first. If you supply a command, it will be used for automatic bisection. The environment variable HG_NODE will contain the ID of the changeset being tested. The exit status of the command will be used to mark revisions as good or bad: status 0 means good, 125 means to skip the revision, 127 (command not found) will abort the bisection, and any other non-zero exit status means the revision is bad. .. container:: verbose Some examples: - start a bisection with known bad revision 34, and good revision 12:: hg bisect --bad 34 hg bisect --good 12 - advance the current bisection by marking current revision as good or bad:: hg bisect --good hg bisect --bad - mark the current revision, or a known revision, to be skipped (e.g. if that revision is not usable because of another issue):: hg bisect --skip hg bisect --skip 23 - skip all revisions that do not touch directories ``foo`` or ``bar``:: hg bisect --skip "!( file('path:foo') & file('path:bar') )" - forget the current bisection:: hg bisect --reset - use 'make && make tests' to automatically find the first broken revision:: hg bisect --reset hg bisect --bad 34 hg bisect --good 12 hg bisect --command "make && make tests" - see all changesets whose states are already known in the current bisection:: hg log -r "bisect(pruned)" - see the changeset currently being bisected (especially useful if running with -U/--noupdate):: hg log -r "bisect(current)" - see all changesets that took part in the current bisection:: hg log -r "bisect(range)" - you can even get a nice graph:: hg log --graph -r "bisect(range)" See :hg:`help revsets` for more about the `bisect()` keyword. Returns 0 on success. """ def extendbisectrange(nodes, good): # bisect is incomplete when it ends on a merge node and # one of the parent was not checked. parents = repo[nodes[0]].parents() if len(parents) > 1: if good: side = state['bad'] else: side = state['good'] num = len(set(i.node() for i in parents) & set(side)) if num == 1: return parents[0].ancestor(parents[1]) return None def print_result(nodes, good): displayer = cmdutil.show_changeset(ui, repo, {}) if len(nodes) == 1: # narrowed it down to a single revision if good: ui.write(_("The first good revision is:\n")) else: ui.write(_("The first bad revision is:\n")) displayer.show(repo[nodes[0]]) extendnode = extendbisectrange(nodes, good) if extendnode is not None: ui.write(_('Not all ancestors of this changeset have been' ' checked.\nUse bisect --extend to continue the ' 'bisection from\nthe common ancestor, %s.\n') % extendnode) else: # multiple possible revisions if good: ui.write(_("Due to skipped revisions, the first " "good revision could be any of:\n")) else: ui.write(_("Due to skipped revisions, the first " "bad revision could be any of:\n")) for n in nodes: displayer.show(repo[n]) displayer.close() def check_state(state, interactive=True): if not state['good'] or not state['bad']: if (good or bad or skip or reset) and interactive: return if not state['good']: raise error.Abort(_('cannot bisect (no known good revisions)')) else: raise error.Abort(_('cannot bisect (no known bad revisions)')) return True # backward compatibility if rev in "good bad reset init".split(): ui.warn(_("(use of 'hg bisect ' is deprecated)\n")) cmd, rev, extra = rev, extra, None if cmd == "good": good = True elif cmd == "bad": bad = True else: reset = True elif extra or good + bad + skip + reset + extend + bool(command) > 1: raise error.Abort(_('incompatible arguments')) cmdutil.checkunfinished(repo) if reset: p = repo.join("bisect.state") if os.path.exists(p): os.unlink(p) return state = hbisect.load_state(repo) if command: changesets = 1 if noupdate: try: node = state['current'][0] except LookupError: raise error.Abort(_('current bisect revision is unknown - ' 'start a new bisect to fix')) else: node, p2 = repo.dirstate.parents() if p2 != nullid: raise error.Abort(_('current bisect revision is a merge')) try: while changesets: # update state state['current'] = [node] hbisect.save_state(repo, state) status = ui.system(command, environ={'HG_NODE': hex(node)}) if status == 125: transition = "skip" elif status == 0: transition = "good" # status < 0 means process was killed elif status == 127: raise error.Abort(_("failed to execute %s") % command) elif status < 0: raise error.Abort(_("%s killed") % command) else: transition = "bad" ctx = scmutil.revsingle(repo, rev, node) rev = None # clear for future iterations state[transition].append(ctx.node()) ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition)) check_state(state, interactive=False) # bisect nodes, changesets, bgood = hbisect.bisect(repo.changelog, state) # update to next check node = nodes[0] if not noupdate: cmdutil.bailifchanged(repo) hg.clean(repo, node, show_stats=False) finally: state['current'] = [node] hbisect.save_state(repo, state) print_result(nodes, bgood) return # update state if rev: nodes = [repo.lookup(i) for i in scmutil.revrange(repo, [rev])] else: nodes = [repo.lookup('.')] if good or bad or skip: if good: state['good'] += nodes elif bad: state['bad'] += nodes elif skip: state['skip'] += nodes hbisect.save_state(repo, state) if not check_state(state): return # actually bisect nodes, changesets, good = hbisect.bisect(repo.changelog, state) if extend: if not changesets: extendnode = extendbisectrange(nodes, good) if extendnode is not None: ui.write(_("Extending search to changeset %d:%s\n") % (extendnode.rev(), extendnode)) state['current'] = [extendnode.node()] hbisect.save_state(repo, state) if noupdate: return cmdutil.bailifchanged(repo) return hg.clean(repo, extendnode.node()) raise error.Abort(_("nothing to extend")) if changesets == 0: print_result(nodes, good) else: assert len(nodes) == 1 # only a single node can be tested next node = nodes[0] # compute the approximate number of remaining tests tests, size = 0, 2 while size <= changesets: tests, size = tests + 1, size * 2 rev = repo.changelog.rev(node) ui.write(_("Testing changeset %d:%s " "(%d changesets remaining, ~%d tests)\n") % (rev, short(node), changesets, tests)) state['current'] = [node] hbisect.save_state(repo, state) if not noupdate: cmdutil.bailifchanged(repo) return hg.clean(repo, node) @command('bookmarks|bookmark', [('f', 'force', False, _('force')), ('r', 'rev', '', _('revision for bookmark action'), _('REV')), ('d', 'delete', False, _('delete a given bookmark')), ('m', 'rename', '', _('rename a given bookmark'), _('OLD')), ('i', 'inactive', False, _('mark a bookmark inactive')), ] + formatteropts, _('hg bookmarks [OPTIONS]... [NAME]...')) def bookmark(ui, repo, *names, **opts): '''create a new bookmark or list existing bookmarks Bookmarks are labels on changesets to help track lines of development. Bookmarks are unversioned and can be moved, renamed and deleted. Deleting or moving a bookmark has no effect on the associated changesets. Creating or updating to a bookmark causes it to be marked as 'active'. The active bookmark is indicated with a '*'. When a commit is made, the active bookmark will advance to the new commit. A plain :hg:`update` will also advance an active bookmark, if possible. Updating away from a bookmark will cause it to be deactivated. Bookmarks can be pushed and pulled between repositories (see :hg:`help push` and :hg:`help pull`). If a shared bookmark has diverged, a new 'divergent bookmark' of the form 'name@path' will be created. Using :hg:`merge` will resolve the divergence. A bookmark named '@' has the special property that :hg:`clone` will check it out by default if it exists. .. container:: verbose Examples: - create an active bookmark for a new line of development:: hg book new-feature - create an inactive bookmark as a place marker:: hg book -i reviewed - create an inactive bookmark on another changeset:: hg book -r .^ tested - rename bookmark turkey to dinner:: hg book -m turkey dinner - move the '@' bookmark from another branch:: hg book -f @ ''' force = opts.get('force') rev = opts.get('rev') delete = opts.get('delete') rename = opts.get('rename') inactive = opts.get('inactive') def checkformat(mark): mark = mark.strip() if not mark: raise error.Abort(_("bookmark names cannot consist entirely of " "whitespace")) scmutil.checknewlabel(repo, mark, 'bookmark') return mark def checkconflict(repo, mark, cur, force=False, target=None): if mark in marks and not force: if target: if marks[mark] == target and target == cur: # re-activating a bookmark return anc = repo.changelog.ancestors([repo[target].rev()]) bmctx = repo[marks[mark]] divs = [repo[b].node() for b in marks if b.split('@', 1)[0] == mark.split('@', 1)[0]] # allow resolving a single divergent bookmark even if moving # the bookmark across branches when a revision is specified # that contains a divergent bookmark if bmctx.rev() not in anc and target in divs: bookmarks.deletedivergent(repo, [target], mark) return deletefrom = [b for b in divs if repo[b].rev() in anc or b == target] bookmarks.deletedivergent(repo, deletefrom, mark) if bookmarks.validdest(repo, bmctx, repo[target]): ui.status(_("moving bookmark '%s' forward from %s\n") % (mark, short(bmctx.node()))) return raise error.Abort(_("bookmark '%s' already exists " "(use -f to force)") % mark) if ((mark in repo.branchmap() or mark == repo.dirstate.branch()) and not force): raise error.Abort( _("a bookmark cannot have the name of an existing branch")) if delete and rename: raise error.Abort(_("--delete and --rename are incompatible")) if delete and rev: raise error.Abort(_("--rev is incompatible with --delete")) if rename and rev: raise error.Abort(_("--rev is incompatible with --rename")) if not names and (delete or rev): raise error.Abort(_("bookmark name required")) if delete or rename or names or inactive: wlock = lock = tr = None try: wlock = repo.wlock() lock = repo.lock() cur = repo.changectx('.').node() marks = repo._bookmarks if delete: tr = repo.transaction('bookmark') for mark in names: if mark not in marks: raise error.Abort(_("bookmark '%s' does not exist") % mark) if mark == repo._activebookmark: bookmarks.deactivate(repo) del marks[mark] elif rename: tr = repo.transaction('bookmark') if not names: raise error.Abort(_("new bookmark name required")) elif len(names) > 1: raise error.Abort(_("only one new bookmark name allowed")) mark = checkformat(names[0]) if rename not in marks: raise error.Abort(_("bookmark '%s' does not exist") % rename) checkconflict(repo, mark, cur, force) marks[mark] = marks[rename] if repo._activebookmark == rename and not inactive: bookmarks.activate(repo, mark) del marks[rename] elif names: tr = repo.transaction('bookmark') newact = None for mark in names: mark = checkformat(mark) if newact is None: newact = mark if inactive and mark == repo._activebookmark: bookmarks.deactivate(repo) return tgt = cur if rev: tgt = scmutil.revsingle(repo, rev).node() checkconflict(repo, mark, cur, force, tgt) marks[mark] = tgt if not inactive and cur == marks[newact] and not rev: bookmarks.activate(repo, newact) elif cur != tgt and newact == repo._activebookmark: bookmarks.deactivate(repo) elif inactive: if len(marks) == 0: ui.status(_("no bookmarks set\n")) elif not repo._activebookmark: ui.status(_("no active bookmark\n")) else: bookmarks.deactivate(repo) if tr is not None: marks.recordchange(tr) tr.close() finally: lockmod.release(tr, lock, wlock) else: # show bookmarks fm = ui.formatter('bookmarks', opts) hexfn = fm.hexfunc marks = repo._bookmarks if len(marks) == 0 and not fm: ui.status(_("no bookmarks set\n")) for bmark, n in sorted(marks.iteritems()): active = repo._activebookmark if bmark == active: prefix, label = '*', activebookmarklabel else: prefix, label = ' ', '' fm.startitem() if not ui.quiet: fm.plain(' %s ' % prefix, label=label) fm.write('bookmark', '%s', bmark, label=label) pad = " " * (25 - encoding.colwidth(bmark)) fm.condwrite(not ui.quiet, 'rev node', pad + ' %d:%s', repo.changelog.rev(n), hexfn(n), label=label) fm.data(active=(bmark == active)) fm.plain('\n') fm.end() @command('branch', [('f', 'force', None, _('set branch name even if it shadows an existing branch')), ('C', 'clean', None, _('reset branch name to parent branch name'))], _('[-fC] [NAME]')) def branch(ui, repo, label=None, **opts): """set or show the current branch name .. note:: Branch names are permanent and global. Use :hg:`bookmark` to create a light-weight bookmark instead. See :hg:`help glossary` for more information about named branches and bookmarks. With no argument, show the current branch name. With one argument, set the working directory branch name (the branch will not exist in the repository until the next commit). Standard practice recommends that primary development take place on the 'default' branch. Unless -f/--force is specified, branch will not let you set a branch name that already exists. Use -C/--clean to reset the working directory branch to that of the parent of the working directory, negating a previous branch change. Use the command :hg:`update` to switch to an existing branch. Use :hg:`commit --close-branch` to mark this branch head as closed. When all heads of a branch are closed, the branch will be considered closed. Returns 0 on success. """ if label: label = label.strip() if not opts.get('clean') and not label: ui.write("%s\n" % repo.dirstate.branch()) return with repo.wlock(): if opts.get('clean'): label = repo[None].p1().branch() repo.dirstate.setbranch(label) ui.status(_('reset working directory to branch %s\n') % label) elif label: if not opts.get('force') and label in repo.branchmap(): if label not in [p.branch() for p in repo[None].parents()]: raise error.Abort(_('a branch of the same name already' ' exists'), # i18n: "it" refers to an existing branch hint=_("use 'hg update' to switch to it")) scmutil.checknewlabel(repo, label, 'branch') repo.dirstate.setbranch(label) ui.status(_('marked working directory as branch %s\n') % label) # find any open named branches aside from default others = [n for n, h, t, c in repo.branchmap().iterbranches() if n != "default" and not c] if not others: ui.status(_('(branches are permanent and global, ' 'did you want a bookmark?)\n')) @command('branches', [('a', 'active', False, _('show only branches that have unmerged heads (DEPRECATED)')), ('c', 'closed', False, _('show normal and closed branches')), ] + formatteropts, _('[-c]')) def branches(ui, repo, active=False, closed=False, **opts): """list repository named branches List the repository's named branches, indicating which ones are inactive. If -c/--closed is specified, also list branches which have been marked closed (see :hg:`commit --close-branch`). Use the command :hg:`update` to switch to an existing branch. Returns 0. """ fm = ui.formatter('branches', opts) hexfunc = fm.hexfunc allheads = set(repo.heads()) branches = [] for tag, heads, tip, isclosed in repo.branchmap().iterbranches(): isactive = not isclosed and bool(set(heads) & allheads) branches.append((tag, repo[tip], isactive, not isclosed)) branches.sort(key=lambda i: (i[2], i[1].rev(), i[0], i[3]), reverse=True) for tag, ctx, isactive, isopen in branches: if active and not isactive: continue if isactive: label = 'branches.active' notice = '' elif not isopen: if not closed: continue label = 'branches.closed' notice = _(' (closed)') else: label = 'branches.inactive' notice = _(' (inactive)') current = (tag == repo.dirstate.branch()) if current: label = 'branches.current' fm.startitem() fm.write('branch', '%s', tag, label=label) rev = ctx.rev() padsize = max(31 - len(str(rev)) - encoding.colwidth(tag), 0) fmt = ' ' * padsize + ' %d:%s' fm.condwrite(not ui.quiet, 'rev node', fmt, rev, hexfunc(ctx.node()), label='log.changeset changeset.%s' % ctx.phasestr()) fm.data(active=isactive, closed=not isopen, current=current) if not ui.quiet: fm.plain(notice) fm.plain('\n') fm.end() @command('bundle', [('f', 'force', None, _('run even when the destination is unrelated')), ('r', 'rev', [], _('a changeset intended to be added to the destination'), _('REV')), ('b', 'branch', [], _('a specific branch you would like to bundle'), _('BRANCH')), ('', 'base', [], _('a base changeset assumed to be available at the destination'), _('REV')), ('a', 'all', None, _('bundle all changesets in the repository')), ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE')), ] + remoteopts, _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')) def bundle(ui, repo, fname, dest=None, **opts): """create a changegroup file Generate a changegroup file collecting changesets to be added to a repository. To create a bundle containing all changesets, use -a/--all (or --base null). Otherwise, hg assumes the destination will have all the nodes you specify with --base parameters. Otherwise, hg will assume the repository has all the nodes in destination, or default-push/default if no destination is specified. You can change bundle format with the -t/--type option. You can specify a compression, a bundle version or both using a dash (comp-version). The available compression methods are: none, bzip2, and gzip (by default, bundles are compressed using bzip2). The available formats are: v1, v2 (default to most suitable). The bundle file can then be transferred using conventional means and applied to another repository with the unbundle or pull command. This is useful when direct push and pull are not available or when exporting an entire repository is undesirable. Applying bundles preserves all changeset contents including permissions, copy/rename information, and revision history. Returns 0 on success, 1 if no changes found. """ revs = None if 'rev' in opts: revstrings = opts['rev'] revs = scmutil.revrange(repo, revstrings) if revstrings and not revs: raise error.Abort(_('no commits to bundle')) bundletype = opts.get('type', 'bzip2').lower() try: bcompression, cgversion, params = exchange.parsebundlespec( repo, bundletype, strict=False) except error.UnsupportedBundleSpecification as e: raise error.Abort(str(e), hint=_('see "hg help bundle" for supported ' 'values for --type')) # Packed bundles are a pseudo bundle format for now. if cgversion == 's1': raise error.Abort(_('packed bundles cannot be produced by "hg bundle"'), hint=_('use "hg debugcreatestreamclonebundle"')) if opts.get('all'): if dest: raise error.Abort(_("--all is incompatible with specifying " "a destination")) if opts.get('base'): ui.warn(_("ignoring --base because --all was specified\n")) base = ['null'] else: base = scmutil.revrange(repo, opts.get('base')) # TODO: get desired bundlecaps from command line. bundlecaps = None if base: if dest: raise error.Abort(_("--base is incompatible with specifying " "a destination")) common = [repo.lookup(rev) for rev in base] heads = revs and map(repo.lookup, revs) or revs cg = changegroup.getchangegroup(repo, 'bundle', heads=heads, common=common, bundlecaps=bundlecaps, version=cgversion) outgoing = None else: dest = ui.expandpath(dest or 'default-push', dest or 'default') dest, branches = hg.parseurl(dest, opts.get('branch')) other = hg.peer(repo, opts, dest) revs, checkout = hg.addbranchrevs(repo, repo, branches, revs) heads = revs and map(repo.lookup, revs) or revs outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=heads, force=opts.get('force'), portable=True) cg = changegroup.getlocalchangegroup(repo, 'bundle', outgoing, bundlecaps, version=cgversion) if not cg: scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded) return 1 if cgversion == '01': #bundle1 if bcompression is None: bcompression = 'UN' bversion = 'HG10' + bcompression bcompression = None else: assert cgversion == '02' bversion = 'HG20' changegroup.writebundle(ui, cg, fname, bversion, compression=bcompression) @command('cat', [('o', 'output', '', _('print output to file with formatted name'), _('FORMAT')), ('r', 'rev', '', _('print the given revision'), _('REV')), ('', 'decode', None, _('apply any matching decode filter')), ] + walkopts, _('[OPTION]... FILE...'), inferrepo=True) def cat(ui, repo, file1, *pats, **opts): """output the current or given revision of files Print the specified files as they were at the given revision. If no revision is given, the parent of the working directory is used. Output may be to a file, in which case the name of the file is given using a format string. The formatting rules as follows: :``%%``: literal "%" character :``%s``: basename of file being printed :``%d``: dirname of file being printed, or '.' if in repository root :``%p``: root-relative path name of file being printed :``%H``: changeset hash (40 hexadecimal digits) :``%R``: changeset revision number :``%h``: short-form changeset hash (12 hexadecimal digits) :``%r``: zero-padded changeset revision number :``%b``: basename of the exporting repository Returns 0 on success. """ ctx = scmutil.revsingle(repo, opts.get('rev')) m = scmutil.match(ctx, (file1,) + pats, opts) return cmdutil.cat(ui, repo, ctx, m, '', **opts) @command('^clone', [('U', 'noupdate', None, _('the clone will include an empty working ' 'directory (only a repository)')), ('u', 'updaterev', '', _('revision, tag, or branch to check out'), _('REV')), ('r', 'rev', [], _('include the specified changeset'), _('REV')), ('b', 'branch', [], _('clone only the specified branch'), _('BRANCH')), ('', 'pull', None, _('use pull protocol to copy metadata')), ('', 'uncompressed', None, _('use uncompressed transfer (fast over LAN)')), ] + remoteopts, _('[OPTION]... SOURCE [DEST]'), norepo=True) def clone(ui, source, dest=None, **opts): """make a copy of an existing repository Create a copy of an existing repository in a new directory. If no destination directory name is specified, it defaults to the basename of the source. The location of the source is added to the new repository's ``.hg/hgrc`` file, as the default to be used for future pulls. Only local paths and ``ssh://`` URLs are supported as destinations. For ``ssh://`` destinations, no working directory or ``.hg/hgrc`` will be created on the remote side. If the source repository has a bookmark called '@' set, that revision will be checked out in the new repository by default. To check out a particular version, use -u/--update, or -U/--noupdate to create a clone with no working directory. To pull only a subset of changesets, specify one or more revisions identifiers with -r/--rev or branches with -b/--branch. The resulting clone will contain only the specified changesets and their ancestors. These options (or 'clone src#rev dest') imply --pull, even for local source repositories. .. note:: Specifying a tag will include the tagged changeset but not the changeset containing the tag. .. container:: verbose For efficiency, hardlinks are used for cloning whenever the source and destination are on the same filesystem (note this applies only to the repository data, not to the working directory). Some filesystems, such as AFS, implement hardlinking incorrectly, but do not report errors. In these cases, use the --pull option to avoid hardlinking. In some cases, you can clone repositories and the working directory using full hardlinks with :: $ cp -al REPO REPOCLONE This is the fastest way to clone, but it is not always safe. The operation is not atomic (making sure REPO is not modified during the operation is up to you) and you have to make sure your editor breaks hardlinks (Emacs and most Linux Kernel tools do so). Also, this is not compatible with certain extensions that place their metadata under the .hg directory, such as mq. Mercurial will update the working directory to the first applicable revision from this list: a) null if -U or the source repository has no changesets b) if -u . and the source repository is local, the first parent of the source repository's working directory c) the changeset specified with -u (if a branch name, this means the latest head of that branch) d) the changeset specified with -r e) the tipmost head specified with -b f) the tipmost head specified with the url#branch source syntax g) the revision marked with the '@' bookmark, if present h) the tipmost head of the default branch i) tip When cloning from servers that support it, Mercurial may fetch pre-generated data from a server-advertised URL. When this is done, hooks operating on incoming changesets and changegroups may fire twice, once for the bundle fetched from the URL and another for any additional data not fetched from this URL. In addition, if an error occurs, the repository may be rolled back to a partial clone. This behavior may change in future releases. See :hg:`help -e clonebundles` for more. Examples: - clone a remote repository to a new directory named hg/:: hg clone http://selenic.com/hg - create a lightweight local clone:: hg clone project/ project-feature/ - clone from an absolute path on an ssh server (note double-slash):: hg clone ssh://user@server//home/projects/alpha/ - do a high-speed clone over a LAN while checking out a specified version:: hg clone --uncompressed http://server/repo -u 1.5 - create a repository without changesets after a particular revision:: hg clone -r 04e544 experimental/ good/ - clone (and track) a particular named branch:: hg clone http://selenic.com/hg#stable See :hg:`help urls` for details on specifying URLs. Returns 0 on success. """ if opts.get('noupdate') and opts.get('updaterev'): raise error.Abort(_("cannot specify both --noupdate and --updaterev")) r = hg.clone(ui, opts, source, dest, pull=opts.get('pull'), stream=opts.get('uncompressed'), rev=opts.get('rev'), update=opts.get('updaterev') or not opts.get('noupdate'), branch=opts.get('branch'), shareopts=opts.get('shareopts')) return r is None @command('^commit|ci', [('A', 'addremove', None, _('mark new/missing files as added/removed before committing')), ('', 'close-branch', None, _('mark a branch head as closed')), ('', 'amend', None, _('amend the parent of the working directory')), ('s', 'secret', None, _('use the secret phase for committing')), ('e', 'edit', None, _('invoke editor on commit messages')), ('i', 'interactive', None, _('use interactive mode')), ] + walkopts + commitopts + commitopts2 + subrepoopts, _('[OPTION]... [FILE]...'), inferrepo=True) def commit(ui, repo, *pats, **opts): """commit the specified files or all outstanding changes Commit changes to the given files into the repository. Unlike a centralized SCM, this operation is a local operation. See :hg:`push` for a way to actively distribute your changes. If a list of files is omitted, all changes reported by :hg:`status` will be committed. If you are committing the result of a merge, do not provide any filenames or -I/-X filters. If no commit message is specified, Mercurial starts your configured editor where you can enter a message. In case your commit fails, you will find a backup of your message in ``.hg/last-message.txt``. The --close-branch flag can be used to mark the current branch head closed. When all heads of a branch are closed, the branch will be considered closed and no longer listed. The --amend flag can be used to amend the parent of the working directory with a new commit that contains the changes in the parent in addition to those currently reported by :hg:`status`, if there are any. The old commit is stored in a backup bundle in ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle` on how to restore it). Message, user and date are taken from the amended commit unless specified. When a message isn't specified on the command line, the editor will open with the message of the amended commit. It is not possible to amend public changesets (see :hg:`help phases`) or changesets that have children. See :hg:`help dates` for a list of formats valid for -d/--date. Returns 0 on success, 1 if nothing changed. .. container:: verbose Examples: - commit all files ending in .py:: hg commit --include "set:**.py" - commit all non-binary files:: hg commit --exclude "set:binary()" - amend the current commit and set the date to now:: hg commit --amend --date now """ wlock = lock = None try: wlock = repo.wlock() lock = repo.lock() return _docommit(ui, repo, *pats, **opts) finally: release(lock, wlock) def _docommit(ui, repo, *pats, **opts): if opts.get('interactive'): opts.pop('interactive') cmdutil.dorecord(ui, repo, commit, None, False, cmdutil.recordfilter, *pats, **opts) return if opts.get('subrepos'): if opts.get('amend'): raise error.Abort(_('cannot amend with --subrepos')) # Let --subrepos on the command line override config setting. ui.setconfig('ui', 'commitsubrepos', True, 'commit') cmdutil.checkunfinished(repo, commit=True) branch = repo[None].branch() bheads = repo.branchheads(branch) extra = {} if opts.get('close_branch'): extra['close'] = 1 if not bheads: raise error.Abort(_('can only close branch heads')) elif opts.get('amend'): if repo[None].parents()[0].p1().branch() != branch and \ repo[None].parents()[0].p2().branch() != branch: raise error.Abort(_('can only close branch heads')) if opts.get('amend'): if ui.configbool('ui', 'commitsubrepos'): raise error.Abort(_('cannot amend with ui.commitsubrepos enabled')) old = repo['.'] if not old.mutable(): raise error.Abort(_('cannot amend public changesets')) if len(repo[None].parents()) > 1: raise error.Abort(_('cannot amend while merging')) allowunstable = obsolete.isenabled(repo, obsolete.allowunstableopt) if not allowunstable and old.children(): raise error.Abort(_('cannot amend changeset with children')) # commitfunc is used only for temporary amend commit by cmdutil.amend def commitfunc(ui, repo, message, match, opts): return repo.commit(message, opts.get('user') or old.user(), opts.get('date') or old.date(), match, extra=extra) node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts) if node == old.node(): ui.status(_("nothing changed\n")) return 1 else: def commitfunc(ui, repo, message, match, opts): backup = ui.backupconfig('phases', 'new-commit') baseui = repo.baseui basebackup = baseui.backupconfig('phases', 'new-commit') try: if opts.get('secret'): ui.setconfig('phases', 'new-commit', 'secret', 'commit') # Propagate to subrepos baseui.setconfig('phases', 'new-commit', 'secret', 'commit') editform = cmdutil.mergeeditform(repo[None], 'commit.normal') editor = cmdutil.getcommiteditor(editform=editform, **opts) return repo.commit(message, opts.get('user'), opts.get('date'), match, editor=editor, extra=extra) finally: ui.restoreconfig(backup) repo.baseui.restoreconfig(basebackup) node = cmdutil.commit(ui, repo, commitfunc, pats, opts) if not node: stat = cmdutil.postcommitstatus(repo, pats, opts) if stat[3]: ui.status(_("nothing changed (%d missing files, see " "'hg status')\n") % len(stat[3])) else: ui.status(_("nothing changed\n")) return 1 cmdutil.commitstatus(repo, node, branch, bheads, opts) @command('config|showconfig|debugconfig', [('u', 'untrusted', None, _('show untrusted configuration options')), ('e', 'edit', None, _('edit user config')), ('l', 'local', None, _('edit repository config')), ('g', 'global', None, _('edit global config'))], _('[-u] [NAME]...'), optionalrepo=True) def config(ui, repo, *values, **opts): """show combined config settings from all hgrc files With no arguments, print names and values of all config items. With one argument of the form section.name, print just the value of that config item. With multiple arguments, print names and values of all config items with matching section names. With --edit, start an editor on the user-level config file. With --global, edit the system-wide config file. With --local, edit the repository-level config file. With --debug, the source (filename and line number) is printed for each config item. See :hg:`help config` for more information about config files. Returns 0 on success, 1 if NAME does not exist. """ if opts.get('edit') or opts.get('local') or opts.get('global'): if opts.get('local') and opts.get('global'): raise error.Abort(_("can't use --local and --global together")) if opts.get('local'): if not repo: raise error.Abort(_("can't use --local outside a repository")) paths = [repo.join('hgrc')] elif opts.get('global'): paths = scmutil.systemrcpath() else: paths = scmutil.userrcpath() for f in paths: if os.path.exists(f): break else: if opts.get('global'): samplehgrc = uimod.samplehgrcs['global'] elif opts.get('local'): samplehgrc = uimod.samplehgrcs['local'] else: samplehgrc = uimod.samplehgrcs['user'] f = paths[0] fp = open(f, "w") fp.write(samplehgrc) fp.close() editor = ui.geteditor() ui.system("%s \"%s\"" % (editor, f), onerr=error.Abort, errprefix=_("edit failed")) return for f in scmutil.rcpath(): ui.debug('read config from: %s\n' % f) untrusted = bool(opts.get('untrusted')) if values: sections = [v for v in values if '.' not in v] items = [v for v in values if '.' in v] if len(items) > 1 or items and sections: raise error.Abort(_('only one config item permitted')) matched = False for section, name, value in ui.walkconfig(untrusted=untrusted): value = str(value).replace('\n', '\\n') sectname = section + '.' + name if values: for v in values: if v == section: ui.debug('%s: ' % ui.configsource(section, name, untrusted)) ui.write('%s=%s\n' % (sectname, value)) matched = True elif v == sectname: ui.debug('%s: ' % ui.configsource(section, name, untrusted)) ui.write(value, '\n') matched = True else: ui.debug('%s: ' % ui.configsource(section, name, untrusted)) ui.write('%s=%s\n' % (sectname, value)) matched = True if matched: return 0 return 1 @command('copy|cp', [('A', 'after', None, _('record a copy that has already occurred')), ('f', 'force', None, _('forcibly copy over an existing managed file')), ] + walkopts + dryrunopts, _('[OPTION]... [SOURCE]... DEST')) def copy(ui, repo, *pats, **opts): """mark files as copied for the next commit Mark dest as having copies of source files. If dest is a directory, copies are put in that directory. If dest is a file, the source must be a single file. By default, this command copies the contents of files as they exist in the working directory. If invoked with -A/--after, the operation is recorded, but no copying is performed. This command takes effect with the next commit. To undo a copy before that, see :hg:`revert`. Returns 0 on success, 1 if errors are encountered. """ with repo.wlock(False): return cmdutil.copy(ui, repo, pats, opts) @command('debugancestor', [], _('[INDEX] REV1 REV2'), optionalrepo=True) def debugancestor(ui, repo, *args): """find the ancestor revision of two revisions in a given index""" if len(args) == 3: index, rev1, rev2 = args r = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), index) lookup = r.lookup elif len(args) == 2: if not repo: raise error.Abort(_("there is no Mercurial repository here " "(.hg not found)")) rev1, rev2 = args r = repo.changelog lookup = repo.lookup else: raise error.Abort(_('either two or three arguments required')) a = r.ancestor(lookup(rev1), lookup(rev2)) ui.write("%d:%s\n" % (r.rev(a), hex(a))) @command('debugbuilddag', [('m', 'mergeable-file', None, _('add single file mergeable changes')), ('o', 'overwritten-file', None, _('add single file all revs overwrite')), ('n', 'new-file', None, _('add new file at each rev'))], _('[OPTION]... [TEXT]')) def debugbuilddag(ui, repo, text=None, mergeable_file=False, overwritten_file=False, new_file=False): """builds a repo with a given DAG from scratch in the current empty repo The description of the DAG is read from stdin if not given on the command line. Elements: - "+n" is a linear run of n nodes based on the current default parent - "." is a single node based on the current default parent - "$" resets the default parent to null (implied at the start); otherwise the default parent is always the last node created - " 0: raise error.Abort(_('repository is not empty')) # determine number of revs in DAG total = 0 for type, data in dagparser.parsedag(text): if type == 'n': total += 1 if mergeable_file: linesperrev = 2 # make a file with k lines per rev initialmergedlines = [str(i) for i in xrange(0, total * linesperrev)] initialmergedlines.append("") tags = [] lock = tr = None try: lock = repo.lock() tr = repo.transaction("builddag") at = -1 atbranch = 'default' nodeids = [] id = 0 ui.progress(_('building'), id, unit=_('revisions'), total=total) for type, data in dagparser.parsedag(text): if type == 'n': ui.note(('node %s\n' % str(data))) id, ps = data files = [] fctxs = {} p2 = None if mergeable_file: fn = "mf" p1 = repo[ps[0]] if len(ps) > 1: p2 = repo[ps[1]] pa = p1.ancestor(p2) base, local, other = [x[fn].data() for x in (pa, p1, p2)] m3 = simplemerge.Merge3Text(base, local, other) ml = [l.strip() for l in m3.merge_lines()] ml.append("") elif at > 0: ml = p1[fn].data().split("\n") else: ml = initialmergedlines ml[id * linesperrev] += " r%i" % id mergedtext = "\n".join(ml) files.append(fn) fctxs[fn] = context.memfilectx(repo, fn, mergedtext) if overwritten_file: fn = "of" files.append(fn) fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id) if new_file: fn = "nf%i" % id files.append(fn) fctxs[fn] = context.memfilectx(repo, fn, "r%i\n" % id) if len(ps) > 1: if not p2: p2 = repo[ps[1]] for fn in p2: if fn.startswith("nf"): files.append(fn) fctxs[fn] = p2[fn] def fctxfn(repo, cx, path): return fctxs.get(path) if len(ps) == 0 or ps[0] < 0: pars = [None, None] elif len(ps) == 1: pars = [nodeids[ps[0]], None] else: pars = [nodeids[p] for p in ps] cx = context.memctx(repo, pars, "r%i" % id, files, fctxfn, date=(id, 0), user="debugbuilddag", extra={'branch': atbranch}) nodeid = repo.commitctx(cx) nodeids.append(nodeid) at = id elif type == 'l': id, name = data ui.note(('tag %s\n' % name)) tags.append("%s %s\n" % (hex(repo.changelog.node(id)), name)) elif type == 'a': ui.note(('branch %s\n' % data)) atbranch = data ui.progress(_('building'), id, unit=_('revisions'), total=total) tr.close() if tags: repo.vfs.write("localtags", "".join(tags)) finally: ui.progress(_('building'), None) release(tr, lock) @command('debugbundle', [('a', 'all', None, _('show all details')), ('', 'spec', None, _('print the bundlespec of the bundle'))], _('FILE'), norepo=True) def debugbundle(ui, bundlepath, all=None, spec=None, **opts): """lists the contents of a bundle""" with hg.openpath(ui, bundlepath) as f: if spec: spec = exchange.getbundlespec(ui, f) ui.write('%s\n' % spec) return gen = exchange.readbundle(ui, f, bundlepath) if isinstance(gen, bundle2.unbundle20): return _debugbundle2(ui, gen, all=all, **opts) if all: ui.write(("format: id, p1, p2, cset, delta base, len(delta)\n")) def showchunks(named): ui.write("\n%s\n" % named) chain = None while True: chunkdata = gen.deltachunk(chain) if not chunkdata: break node = chunkdata['node'] p1 = chunkdata['p1'] p2 = chunkdata['p2'] cs = chunkdata['cs'] deltabase = chunkdata['deltabase'] delta = chunkdata['delta'] ui.write("%s %s %s %s %s %s\n" % (hex(node), hex(p1), hex(p2), hex(cs), hex(deltabase), len(delta))) chain = node chunkdata = gen.changelogheader() showchunks("changelog") chunkdata = gen.manifestheader() showchunks("manifest") while True: chunkdata = gen.filelogheader() if not chunkdata: break fname = chunkdata['filename'] showchunks(fname) else: if isinstance(gen, bundle2.unbundle20): raise error.Abort(_('use debugbundle2 for this file')) chunkdata = gen.changelogheader() chain = None while True: chunkdata = gen.deltachunk(chain) if not chunkdata: break node = chunkdata['node'] ui.write("%s\n" % hex(node)) chain = node def _debugbundle2(ui, gen, **opts): """lists the contents of a bundle2""" if not isinstance(gen, bundle2.unbundle20): raise error.Abort(_('not a bundle2 file')) ui.write(('Stream params: %s\n' % repr(gen.params))) for part in gen.iterparts(): ui.write('%s -- %r\n' % (part.type, repr(part.params))) if part.type == 'changegroup': version = part.params.get('version', '01') cg = changegroup.getunbundler(version, part, 'UN') chunkdata = cg.changelogheader() chain = None while True: chunkdata = cg.deltachunk(chain) if not chunkdata: break node = chunkdata['node'] ui.write(" %s\n" % hex(node)) chain = node @command('debugcreatestreamclonebundle', [], 'FILE') def debugcreatestreamclonebundle(ui, repo, fname): """create a stream clone bundle file Stream bundles are special bundles that are essentially archives of revlog files. They are commonly used for cloning very quickly. """ requirements, gen = streamclone.generatebundlev1(repo) changegroup.writechunks(ui, gen, fname) ui.write(_('bundle requirements: %s\n') % ', '.join(sorted(requirements))) @command('debugapplystreamclonebundle', [], 'FILE') def debugapplystreamclonebundle(ui, repo, fname): """apply a stream clone bundle file""" f = hg.openpath(ui, fname) gen = exchange.readbundle(ui, f, fname) gen.apply(repo) @command('debugcheckstate', [], '') def debugcheckstate(ui, repo): """validate the correctness of the current dirstate""" parent1, parent2 = repo.dirstate.parents() m1 = repo[parent1].manifest() m2 = repo[parent2].manifest() errors = 0 for f in repo.dirstate: state = repo.dirstate[f] if state in "nr" and f not in m1: ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state)) errors += 1 if state in "a" and f in m1: ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state)) errors += 1 if state in "m" and f not in m1 and f not in m2: ui.warn(_("%s in state %s, but not in either manifest\n") % (f, state)) errors += 1 for f in m1: state = repo.dirstate[f] if state not in "nrm": ui.warn(_("%s in manifest1, but listed as state %s") % (f, state)) errors += 1 if errors: error = _(".hg/dirstate inconsistent with current parent's manifest") raise error.Abort(error) @command('debugcommands', [], _('[COMMAND]'), norepo=True) def debugcommands(ui, cmd='', *args): """list all available commands and options""" for cmd, vals in sorted(table.iteritems()): cmd = cmd.split('|')[0].strip('^') opts = ', '.join([i[1] for i in vals[1]]) ui.write('%s: %s\n' % (cmd, opts)) @command('debugcomplete', [('o', 'options', None, _('show the command options'))], _('[-o] CMD'), norepo=True) def debugcomplete(ui, cmd='', **opts): """returns the completion list associated with the given command""" if opts.get('options'): options = [] otables = [globalopts] if cmd: aliases, entry = cmdutil.findcmd(cmd, table, False) otables.append(entry[1]) for t in otables: for o in t: if "(DEPRECATED)" in o[3]: continue if o[0]: options.append('-%s' % o[0]) options.append('--%s' % o[1]) ui.write("%s\n" % "\n".join(options)) return cmdlist, unused_allcmds = cmdutil.findpossible(cmd, table) if ui.verbose: cmdlist = [' '.join(c[0]) for c in cmdlist.values()] ui.write("%s\n" % "\n".join(sorted(cmdlist))) @command('debugdag', [('t', 'tags', None, _('use tags as labels')), ('b', 'branches', None, _('annotate with branch names')), ('', 'dots', None, _('use dots for runs')), ('s', 'spaces', None, _('separate elements by spaces'))], _('[OPTION]... [FILE [REV]...]'), optionalrepo=True) def debugdag(ui, repo, file_=None, *revs, **opts): """format the changelog or an index DAG as a concise textual description If you pass a revlog index, the revlog's DAG is emitted. If you list revision numbers, they get labeled in the output as rN. Otherwise, the changelog DAG of the current repo is emitted. """ spaces = opts.get('spaces') dots = opts.get('dots') if file_: rlog = revlog.revlog(scmutil.opener(os.getcwd(), audit=False), file_) revs = set((int(r) for r in revs)) def events(): for r in rlog: yield 'n', (r, list(p for p in rlog.parentrevs(r) if p != -1)) if r in revs: yield 'l', (r, "r%i" % r) elif repo: cl = repo.changelog tags = opts.get('tags') branches = opts.get('branches') if tags: labels = {} for l, n in repo.tags().items(): labels.setdefault(cl.rev(n), []).append(l) def events(): b = "default" for r in cl: if branches: newb = cl.read(cl.node(r))[5]['branch'] if newb != b: yield 'a', newb b = newb yield 'n', (r, list(p for p in cl.parentrevs(r) if p != -1)) if tags: ls = labels.get(r) if ls: for l in ls: yield 'l', (r, l) else: raise error.Abort(_('need repo for changelog dag')) for line in dagparser.dagtextlines(events(), addspaces=spaces, wraplabels=True, wrapannotations=True, wrapnonlinear=dots, usedots=dots, maxlinewidth=70): ui.write(line) ui.write("\n") @command('debugdata', debugrevlogopts, _('-c|-m|FILE REV')) def debugdata(ui, repo, file_, rev=None, **opts): """dump the contents of a data file revision""" if opts.get('changelog') or opts.get('manifest'): file_, rev = None, file_ elif rev is None: raise error.CommandError('debugdata', _('invalid arguments')) r = cmdutil.openrevlog(repo, 'debugdata', file_, opts) try: ui.write(r.revision(r.lookup(rev))) except KeyError: raise error.Abort(_('invalid revision identifier %s') % rev) @command('debugdate', [('e', 'extended', None, _('try extended date formats'))], _('[-e] DATE [RANGE]'), norepo=True, optionalrepo=True) def debugdate(ui, date, range=None, **opts): """parse and display a date""" if opts["extended"]: d = util.parsedate(date, util.extendeddateformats) else: d = util.parsedate(date) ui.write(("internal: %s %s\n") % d) ui.write(("standard: %s\n") % util.datestr(d)) if range: m = util.matchdate(range) ui.write(("match: %s\n") % m(d[0])) @command('debugdiscovery', [('', 'old', None, _('use old-style discovery')), ('', 'nonheads', None, _('use old-style discovery with non-heads included')), ] + remoteopts, _('[-l REV] [-r REV] [-b BRANCH]... [OTHER]')) def debugdiscovery(ui, repo, remoteurl="default", **opts): """runs the changeset discovery protocol in isolation""" remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch')) remote = hg.peer(repo, opts, remoteurl) ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl)) # make sure tests are repeatable random.seed(12323) def doit(localheads, remoteheads, remote=remote): if opts.get('old'): if localheads: raise error.Abort('cannot use localheads with old style ' 'discovery') if not util.safehasattr(remote, 'branches'): # enable in-client legacy support remote = localrepo.locallegacypeer(remote.local()) common, _in, hds = treediscovery.findcommonincoming(repo, remote, force=True) common = set(common) if not opts.get('nonheads'): ui.write(("unpruned common: %s\n") % " ".join(sorted(short(n) for n in common))) dag = dagutil.revlogdag(repo.changelog) all = dag.ancestorset(dag.internalizeall(common)) common = dag.externalizeall(dag.headsetofconnecteds(all)) else: common, any, hds = setdiscovery.findcommonheads(ui, repo, remote) common = set(common) rheads = set(hds) lheads = set(repo.heads()) ui.write(("common heads: %s\n") % " ".join(sorted(short(n) for n in common))) if lheads <= common: ui.write(("local is subset\n")) elif rheads <= common: ui.write(("remote is subset\n")) serverlogs = opts.get('serverlog') if serverlogs: for filename in serverlogs: with open(filename, 'r') as logfile: line = logfile.readline() while line: parts = line.strip().split(';') op = parts[1] if op == 'cg': pass elif op == 'cgss': doit(parts[2].split(' '), parts[3].split(' ')) elif op == 'unb': doit(parts[3].split(' '), parts[2].split(' ')) line = logfile.readline() else: remoterevs, _checkout = hg.addbranchrevs(repo, remote, branches, opts.get('remote_head')) localrevs = opts.get('local_head') doit(localrevs, remoterevs) @command('debugextensions', formatteropts, [], norepo=True) def debugextensions(ui, **opts): '''show information about active extensions''' exts = extensions.extensions(ui) fm = ui.formatter('debugextensions', opts) for extname, extmod in sorted(exts, key=operator.itemgetter(0)): extsource = extmod.__file__ exttestedwith = getattr(extmod, 'testedwith', None) if exttestedwith is not None: exttestedwith = exttestedwith.split() extbuglink = getattr(extmod, 'buglink', None) fm.startitem() if ui.quiet or ui.verbose: fm.write('name', '%s\n', extname) else: fm.write('name', '%s', extname) if not exttestedwith: fm.plain(_(' (untested!)\n')) else: if exttestedwith == ['internal'] or \ util.version() in exttestedwith: fm.plain('\n') else: lasttestedversion = exttestedwith[-1] fm.plain(' (%s!)\n' % lasttestedversion) fm.condwrite(ui.verbose and extsource, 'source', _(' location: %s\n'), extsource or "") fm.condwrite(ui.verbose and exttestedwith, 'testedwith', _(' tested with: %s\n'), ' '.join(exttestedwith or [])) fm.condwrite(ui.verbose and extbuglink, 'buglink', _(' bug reporting: %s\n'), extbuglink or "") fm.end() @command('debugfileset', [('r', 'rev', '', _('apply the filespec on this revision'), _('REV'))], _('[-r REV] FILESPEC')) def debugfileset(ui, repo, expr, **opts): '''parse and apply a fileset specification''' ctx = scmutil.revsingle(repo, opts.get('rev'), None) if ui.verbose: tree = fileset.parse(expr) ui.note(fileset.prettyformat(tree), "\n") for f in ctx.getfileset(expr): ui.write("%s\n" % f) @command('debugfsinfo', [], _('[PATH]'), norepo=True) def debugfsinfo(ui, path="."): """show information detected about current filesystem""" util.writefile('.debugfsinfo', '') ui.write(('exec: %s\n') % (util.checkexec(path) and 'yes' or 'no')) ui.write(('symlink: %s\n') % (util.checklink(path) and 'yes' or 'no')) ui.write(('hardlink: %s\n') % (util.checknlink(path) and 'yes' or 'no')) ui.write(('case-sensitive: %s\n') % (util.checkcase('.debugfsinfo') and 'yes' or 'no')) os.unlink('.debugfsinfo') @command('debuggetbundle', [('H', 'head', [], _('id of head node'), _('ID')), ('C', 'common', [], _('id of common node'), _('ID')), ('t', 'type', 'bzip2', _('bundle compression type to use'), _('TYPE'))], _('REPO FILE [-H|-C ID]...'), norepo=True) def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts): """retrieves a bundle from a repo Every ID must be a full-length hex node id string. Saves the bundle to the given file. """ repo = hg.peer(ui, opts, repopath) if not repo.capable('getbundle'): raise error.Abort("getbundle() not supported by target repository") args = {} if common: args['common'] = [bin(s) for s in common] if head: args['heads'] = [bin(s) for s in head] # TODO: get desired bundlecaps from command line. args['bundlecaps'] = None bundle = repo.getbundle('debug', **args) bundletype = opts.get('type', 'bzip2').lower() btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ', 'bundle2': 'HG20'} bundletype = btypes.get(bundletype) if bundletype not in changegroup.bundletypes: raise error.Abort(_('unknown bundle type specified with --type')) changegroup.writebundle(ui, bundle, bundlepath, bundletype) @command('debugignore', [], '[FILE]') def debugignore(ui, repo, *files, **opts): """display the combined ignore pattern and information about ignored files With no argument display the combined ignore pattern. Given space separated file names, shows if the given file is ignored and if so, show the ignore rule (file and line number) that matched it. """ ignore = repo.dirstate._ignore if not files: # Show all the patterns includepat = getattr(ignore, 'includepat', None) if includepat is not None: ui.write("%s\n" % includepat) else: raise error.Abort(_("no ignore patterns found")) else: for f in files: ignored = None ignoredata = None if f != '.': if ignore(f): ignored = f ignoredata = repo.dirstate._ignorefileandline(f) else: for p in util.finddirs(f): if ignore(p): ignored = p ignoredata = repo.dirstate._ignorefileandline(p) break if ignored: if ignored == f: ui.write("%s is ignored\n" % f) else: ui.write("%s is ignored because of containing folder %s\n" % (f, ignored)) ignorefile, lineno, line = ignoredata ui.write("(ignore rule in %s, line %d: '%s')\n" % (ignorefile, lineno, line)) else: ui.write("%s is not ignored\n" % f) @command('debugindex', debugrevlogopts + [('f', 'format', 0, _('revlog format'), _('FORMAT'))], _('[-f FORMAT] -c|-m|FILE'), optionalrepo=True) def debugindex(ui, repo, file_=None, **opts): """dump the contents of an index file""" r = cmdutil.openrevlog(repo, 'debugindex', file_, opts) format = opts.get('format', 0) if format not in (0, 1): raise error.Abort(_("unknown format %d") % format) generaldelta = r.version & revlog.REVLOGGENERALDELTA if generaldelta: basehdr = ' delta' else: basehdr = ' base' if ui.debugflag: shortfn = hex else: shortfn = short # There might not be anything in r, so have a sane default idlen = 12 for i in r: idlen = len(shortfn(r.node(i))) break if format == 0: ui.write(" rev offset length " + basehdr + " linkrev" " %s %s p2\n" % ("nodeid".ljust(idlen), "p1".ljust(idlen))) elif format == 1: ui.write(" rev flag offset length" " size " + basehdr + " link p1 p2" " %s\n" % "nodeid".rjust(idlen)) for i in r: node = r.node(i) if generaldelta: base = r.deltaparent(i) else: base = r.chainbase(i) if format == 0: try: pp = r.parents(node) except Exception: pp = [nullid, nullid] ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % ( i, r.start(i), r.length(i), base, r.linkrev(i), shortfn(node), shortfn(pp[0]), shortfn(pp[1]))) elif format == 1: pr = r.parentrevs(i) ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % ( i, r.flags(i), r.start(i), r.length(i), r.rawsize(i), base, r.linkrev(i), pr[0], pr[1], shortfn(node))) @command('debugindexdot', debugrevlogopts, _('-c|-m|FILE'), optionalrepo=True) def debugindexdot(ui, repo, file_=None, **opts): """dump an index DAG as a graphviz dot file""" r = cmdutil.openrevlog(repo, 'debugindexdot', file_, opts) ui.write(("digraph G {\n")) for i in r: node = r.node(i) pp = r.parents(node) ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i)) if pp[1] != nullid: ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i)) ui.write("}\n") @command('debugdeltachain', debugrevlogopts + formatteropts, _('-c|-m|FILE'), optionalrepo=True) def debugdeltachain(ui, repo, file_=None, **opts): """dump information about delta chains in a revlog Output can be templatized. Available template keywords are: rev revision number chainid delta chain identifier (numbered by unique base) chainlen delta chain length to this revision prevrev previous revision in delta chain deltatype role of delta / how it was computed compsize compressed size of revision uncompsize uncompressed size of revision chainsize total size of compressed revisions in chain chainratio total chain size divided by uncompressed revision size (new delta chains typically start at ratio 2.00) lindist linear distance from base revision in delta chain to end of this revision extradist total size of revisions not part of this delta chain from base of delta chain to end of this revision; a measurement of how much extra data we need to read/seek across to read the delta chain for this revision extraratio extradist divided by chainsize; another representation of how much unrelated data is needed to load this delta chain """ r = cmdutil.openrevlog(repo, 'debugdeltachain', file_, opts) index = r.index generaldelta = r.version & revlog.REVLOGGENERALDELTA def revinfo(rev): e = index[rev] compsize = e[1] uncompsize = e[2] chainsize = 0 if generaldelta: if e[3] == e[5]: deltatype = 'p1' elif e[3] == e[6]: deltatype = 'p2' elif e[3] == rev - 1: deltatype = 'prev' elif e[3] == rev: deltatype = 'base' else: deltatype = 'other' else: if e[3] == rev: deltatype = 'base' else: deltatype = 'prev' chain = r._deltachain(rev)[0] for iterrev in chain: e = index[iterrev] chainsize += e[1] return compsize, uncompsize, deltatype, chain, chainsize fm = ui.formatter('debugdeltachain', opts) fm.plain(' rev chain# chainlen prev delta ' 'size rawsize chainsize ratio lindist extradist ' 'extraratio\n') chainbases = {} for rev in r: comp, uncomp, deltatype, chain, chainsize = revinfo(rev) chainbase = chain[0] chainid = chainbases.setdefault(chainbase, len(chainbases) + 1) basestart = r.start(chainbase) revstart = r.start(rev) lineardist = revstart + comp - basestart extradist = lineardist - chainsize try: prevrev = chain[-2] except IndexError: prevrev = -1 chainratio = float(chainsize) / float(uncomp) extraratio = float(extradist) / float(chainsize) fm.startitem() fm.write('rev chainid chainlen prevrev deltatype compsize ' 'uncompsize chainsize chainratio lindist extradist ' 'extraratio', '%7d %7d %8d %8d %7s %10d %10d %10d %9.5f %9d %9d %10.5f\n', rev, chainid, len(chain), prevrev, deltatype, comp, uncomp, chainsize, chainratio, lineardist, extradist, extraratio, rev=rev, chainid=chainid, chainlen=len(chain), prevrev=prevrev, deltatype=deltatype, compsize=comp, uncompsize=uncomp, chainsize=chainsize, chainratio=chainratio, lindist=lineardist, extradist=extradist, extraratio=extraratio) fm.end() @command('debuginstall', [], '', norepo=True) def debuginstall(ui): '''test Mercurial installation Returns 0 on success. ''' def writetemp(contents): (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-") f = os.fdopen(fd, "wb") f.write(contents) f.close() return name problems = 0 # encoding ui.status(_("checking encoding (%s)...\n") % encoding.encoding) try: encoding.fromlocal("test") except error.Abort as inst: ui.write(" %s\n" % inst) ui.write(_(" (check that your locale is properly set)\n")) problems += 1 # Python ui.status(_("checking Python executable (%s)\n") % sys.executable) ui.status(_("checking Python version (%s)\n") % ("%s.%s.%s" % sys.version_info[:3])) ui.status(_("checking Python lib (%s)...\n") % os.path.dirname(os.__file__)) # compiled modules ui.status(_("checking installed modules (%s)...\n") % os.path.dirname(__file__)) try: import bdiff, mpatch, base85, osutil dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes except Exception as inst: ui.write(" %s\n" % inst) ui.write(_(" One or more extensions could not be found")) ui.write(_(" (check that you compiled the extensions)\n")) problems += 1 # templates import templater p = templater.templatepaths() ui.status(_("checking templates (%s)...\n") % ' '.join(p)) if p: m = templater.templatepath("map-cmdline.default") if m: # template found, check if it is working try: templater.templater(m) except Exception as inst: ui.write(" %s\n" % inst) p = None else: ui.write(_(" template 'default' not found\n")) p = None else: ui.write(_(" no template directories found\n")) if not p: ui.write(_(" (templates seem to have been installed incorrectly)\n")) problems += 1 # editor ui.status(_("checking commit editor...\n")) editor = ui.geteditor() editor = util.expandpath(editor) cmdpath = util.findexe(shlex.split(editor)[0]) if not cmdpath: if editor == 'vi': ui.write(_(" No commit editor set and can't find vi in PATH\n")) ui.write(_(" (specify a commit editor in your configuration" " file)\n")) else: ui.write(_(" Can't find editor '%s' in PATH\n") % editor) ui.write(_(" (specify a commit editor in your configuration" " file)\n")) problems += 1 # check username ui.status(_("checking username...\n")) try: ui.username() except error.Abort as e: ui.write(" %s\n" % e) ui.write(_(" (specify a username in your configuration file)\n")) problems += 1 if not problems: ui.status(_("no problems detected\n")) else: ui.write(_("%s problems detected," " please check your install!\n") % problems) return problems @command('debugknown', [], _('REPO ID...'), norepo=True) def debugknown(ui, repopath, *ids, **opts): """test whether node ids are known to a repo Every ID must be a full-length hex node id string. Returns a list of 0s and 1s indicating unknown/known. """ repo = hg.peer(ui, opts, repopath) if not repo.capable('known'): raise error.Abort("known() not supported by target repository") flags = repo.known([bin(s) for s in ids]) ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags]))) @command('debuglabelcomplete', [], _('LABEL...')) def debuglabelcomplete(ui, repo, *args): '''backwards compatibility with old bash completion scripts (DEPRECATED)''' debugnamecomplete(ui, repo, *args) @command('debugmergestate', [], '') def debugmergestate(ui, repo, *args): """print merge state Use --verbose to print out information about whether v1 or v2 merge state was chosen.""" def _hashornull(h): if h == nullhex: return 'null' else: return h def printrecords(version): ui.write(('* version %s records\n') % version) if version == 1: records = v1records else: records = v2records for rtype, record in records: # pretty print some record types if rtype == 'L': ui.write(('local: %s\n') % record) elif rtype == 'O': ui.write(('other: %s\n') % record) elif rtype == 'm': driver, mdstate = record.split('\0', 1) ui.write(('merge driver: %s (state "%s")\n') % (driver, mdstate)) elif rtype in 'FDC': r = record.split('\0') f, state, hash, lfile, afile, anode, ofile = r[0:7] if version == 1: onode = 'not stored in v1 format' flags = r[7] else: onode, flags = r[7:9] ui.write(('file: %s (record type "%s", state "%s", hash %s)\n') % (f, rtype, state, _hashornull(hash))) ui.write((' local path: %s (flags "%s")\n') % (lfile, flags)) ui.write((' ancestor path: %s (node %s)\n') % (afile, _hashornull(anode))) ui.write((' other path: %s (node %s)\n') % (ofile, _hashornull(onode))) else: ui.write(('unrecognized entry: %s\t%s\n') % (rtype, record.replace('\0', '\t'))) # Avoid mergestate.read() since it may raise an exception for unsupported # merge state records. We shouldn't be doing this, but this is OK since this # command is pretty low-level. ms = mergemod.mergestate(repo) # sort so that reasonable information is on top v1records = ms._readrecordsv1() v2records = ms._readrecordsv2() order = 'LOm' def key(r): idx = order.find(r[0]) if idx == -1: return (1, r[1]) else: return (0, idx) v1records.sort(key=key) v2records.sort(key=key) if not v1records and not v2records: ui.write(('no merge state found\n')) elif not v2records: ui.note(('no version 2 merge state\n')) printrecords(1) elif ms._v1v2match(v1records, v2records): ui.note(('v1 and v2 states match: using v2\n')) printrecords(2) else: ui.note(('v1 and v2 states mismatch: using v1\n')) printrecords(1) if ui.verbose: printrecords(2) @command('debugnamecomplete', [], _('NAME...')) def debugnamecomplete(ui, repo, *args): '''complete "names" - tags, open branch names, bookmark names''' names = set() # since we previously only listed open branches, we will handle that # specially (after this for loop) for name, ns in repo.names.iteritems(): if name != 'branches': names.update(ns.listnames(repo)) names.update(tag for (tag, heads, tip, closed) in repo.branchmap().iterbranches() if not closed) completions = set() if not args: args = [''] for a in args: completions.update(n for n in names if n.startswith(a)) ui.write('\n'.join(sorted(completions))) ui.write('\n') @command('debuglocks', [('L', 'force-lock', None, _('free the store lock (DANGEROUS)')), ('W', 'force-wlock', None, _('free the working state lock (DANGEROUS)'))], _('[OPTION]...')) def debuglocks(ui, repo, **opts): """show or modify state of locks By default, this command will show which locks are held. This includes the user and process holding the lock, the amount of time the lock has been held, and the machine name where the process is running if it's not local. Locks protect the integrity of Mercurial's data, so should be treated with care. System crashes or other interruptions may cause locks to not be properly released, though Mercurial will usually detect and remove such stale locks automatically. However, detecting stale locks may not always be possible (for instance, on a shared filesystem). Removing locks may also be blocked by filesystem permissions. Returns 0 if no locks are held. """ if opts.get('force_lock'): repo.svfs.unlink('lock') if opts.get('force_wlock'): repo.vfs.unlink('wlock') if opts.get('force_lock') or opts.get('force_lock'): return 0 now = time.time() held = 0 def report(vfs, name, method): # this causes stale locks to get reaped for more accurate reporting try: l = method(False) except error.LockHeld: l = None if l: l.release() else: try: stat = vfs.lstat(name) age = now - stat.st_mtime user = util.username(stat.st_uid) locker = vfs.readlock(name) if ":" in locker: host, pid = locker.split(':') if host == socket.gethostname(): locker = 'user %s, process %s' % (user, pid) else: locker = 'user %s, process %s, host %s' \ % (user, pid, host) ui.write("%-6s %s (%ds)\n" % (name + ":", locker, age)) return 1 except OSError as e: if e.errno != errno.ENOENT: raise ui.write("%-6s free\n" % (name + ":")) return 0 held += report(repo.svfs, "lock", repo.lock) held += report(repo.vfs, "wlock", repo.wlock) return held @command('debugobsolete', [('', 'flags', 0, _('markers flag')), ('', 'record-parents', False, _('record parent information for the precursor')), ('r', 'rev', [], _('display markers relevant to REV')), ] + commitopts2, _('[OBSOLETED [REPLACEMENT ...]]')) def debugobsolete(ui, repo, precursor=None, *successors, **opts): """create arbitrary obsolete marker With no arguments, displays the list of obsolescence markers.""" def parsenodeid(s): try: # We do not use revsingle/revrange functions here to accept # arbitrary node identifiers, possibly not present in the # local repository. n = bin(s) if len(n) != len(nullid): raise TypeError() return n except TypeError: raise error.Abort('changeset references must be full hexadecimal ' 'node identifiers') if precursor is not None: if opts['rev']: raise error.Abort('cannot select revision when creating marker') metadata = {} metadata['user'] = opts['user'] or ui.username() succs = tuple(parsenodeid(succ) for succ in successors) l = repo.lock() try: tr = repo.transaction('debugobsolete') try: date = opts.get('date') if date: date = util.parsedate(date) else: date = None prec = parsenodeid(precursor) parents = None if opts['record_parents']: if prec not in repo.unfiltered(): raise error.Abort('cannot used --record-parents on ' 'unknown changesets') parents = repo.unfiltered()[prec].parents() parents = tuple(p.node() for p in parents) repo.obsstore.create(tr, prec, succs, opts['flags'], parents=parents, date=date, metadata=metadata) tr.close() except ValueError as exc: raise error.Abort(_('bad obsmarker input: %s') % exc) finally: tr.release() finally: l.release() else: if opts['rev']: revs = scmutil.revrange(repo, opts['rev']) nodes = [repo[r].node() for r in revs] markers = list(obsolete.getmarkers(repo, nodes=nodes)) markers.sort(key=lambda x: x._data) else: markers = obsolete.getmarkers(repo) for m in markers: cmdutil.showmarker(ui, m) @command('debugpathcomplete', [('f', 'full', None, _('complete an entire path')), ('n', 'normal', None, _('show only normal files')), ('a', 'added', None, _('show only added files')), ('r', 'removed', None, _('show only removed files'))], _('FILESPEC...')) def debugpathcomplete(ui, repo, *specs, **opts): '''complete part or all of a tracked path This command supports shells that offer path name completion. It currently completes only files already known to the dirstate. Completion extends only to the next path segment unless --full is specified, in which case entire paths are used.''' def complete(path, acceptable): dirstate = repo.dirstate spec = os.path.normpath(os.path.join(os.getcwd(), path)) rootdir = repo.root + os.sep if spec != repo.root and not spec.startswith(rootdir): return [], [] if os.path.isdir(spec): spec += '/' spec = spec[len(rootdir):] fixpaths = os.sep != '/' if fixpaths: spec = spec.replace(os.sep, '/') speclen = len(spec) fullpaths = opts['full'] files, dirs = set(), set() adddir, addfile = dirs.add, files.add for f, st in dirstate.iteritems(): if f.startswith(spec) and st[0] in acceptable: if fixpaths: f = f.replace('/', os.sep) if fullpaths: addfile(f) continue s = f.find(os.sep, speclen) if s >= 0: adddir(f[:s]) else: addfile(f) return files, dirs acceptable = '' if opts['normal']: acceptable += 'nm' if opts['added']: acceptable += 'a' if opts['removed']: acceptable += 'r' cwd = repo.getcwd() if not specs: specs = ['.'] files, dirs = set(), set() for spec in specs: f, d = complete(spec, acceptable or 'nmar') files.update(f) dirs.update(d) files.update(dirs) ui.write('\n'.join(repo.pathto(p, cwd) for p in sorted(files))) ui.write('\n') @command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'), norepo=True) def debugpushkey(ui, repopath, namespace, *keyinfo, **opts): '''access the pushkey key/value protocol With two args, list the keys in the given namespace. With five args, set a key to new if it currently is set to old. Reports success or failure. ''' target = hg.peer(ui, {}, repopath) if keyinfo: key, old, new = keyinfo r = target.pushkey(namespace, key, old, new) ui.status(str(r) + '\n') return not r else: for k, v in sorted(target.listkeys(namespace).iteritems()): ui.write("%s\t%s\n" % (k.encode('string-escape'), v.encode('string-escape'))) @command('debugpvec', [], _('A B')) def debugpvec(ui, repo, a, b=None): ca = scmutil.revsingle(repo, a) cb = scmutil.revsingle(repo, b) pa = pvec.ctxpvec(ca) pb = pvec.ctxpvec(cb) if pa == pb: rel = "=" elif pa > pb: rel = ">" elif pa < pb: rel = "<" elif pa | pb: rel = "|" ui.write(_("a: %s\n") % pa) ui.write(_("b: %s\n") % pb) ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth)) ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") % (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec), pa.distance(pb), rel)) @command('debugrebuilddirstate|debugrebuildstate', [('r', 'rev', '', _('revision to rebuild to'), _('REV')), ('', 'minimal', None, _('only rebuild files that are inconsistent with ' 'the working copy parent')), ], _('[-r REV]')) def debugrebuilddirstate(ui, repo, rev, **opts): """rebuild the dirstate as it would look like for the given revision If no revision is specified the first current parent will be used. The dirstate will be set to the files of the given revision. The actual working directory content or existing dirstate information such as adds or removes is not considered. ``minimal`` will only rebuild the dirstate status for files that claim to be tracked but are not in the parent manifest, or that exist in the parent manifest but are not in the dirstate. It will not change adds, removes, or modified files that are in the working copy parent. One use of this command is to make the next :hg:`status` invocation check the actual file content. """ ctx = scmutil.revsingle(repo, rev) with repo.wlock(): dirstate = repo.dirstate changedfiles = None # See command doc for what minimal does. if opts.get('minimal'): manifestfiles = set(ctx.manifest().keys()) dirstatefiles = set(dirstate) manifestonly = manifestfiles - dirstatefiles dsonly = dirstatefiles - manifestfiles dsnotadded = set(f for f in dsonly if dirstate[f] != 'a') changedfiles = manifestonly | dsnotadded dirstate.rebuild(ctx.node(), ctx.manifest(), changedfiles) @command('debugrebuildfncache', [], '') def debugrebuildfncache(ui, repo): """rebuild the fncache file""" repair.rebuildfncache(ui, repo) @command('debugrename', [('r', 'rev', '', _('revision to debug'), _('REV'))], _('[-r REV] FILE')) def debugrename(ui, repo, file1, *pats, **opts): """dump rename information""" ctx = scmutil.revsingle(repo, opts.get('rev')) m = scmutil.match(ctx, (file1,) + pats, opts) for abs in ctx.walk(m): fctx = ctx[abs] o = fctx.filelog().renamed(fctx.filenode()) rel = m.rel(abs) if o: ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1]))) else: ui.write(_("%s not renamed\n") % rel) @command('debugrevlog', debugrevlogopts + [('d', 'dump', False, _('dump index data'))], _('-c|-m|FILE'), optionalrepo=True) def debugrevlog(ui, repo, file_=None, **opts): """show data and statistics about a revlog""" r = cmdutil.openrevlog(repo, 'debugrevlog', file_, opts) if opts.get("dump"): numrevs = len(r) ui.write("# rev p1rev p2rev start end deltastart base p1 p2" " rawsize totalsize compression heads chainlen\n") ts = 0 heads = set() for rev in xrange(numrevs): dbase = r.deltaparent(rev) if dbase == -1: dbase = rev cbase = r.chainbase(rev) clen = r.chainlen(rev) p1, p2 = r.parentrevs(rev) rs = r.rawsize(rev) ts = ts + rs heads -= set(r.parentrevs(rev)) heads.add(rev) ui.write("%5d %5d %5d %5d %5d %10d %4d %4d %4d %7d %9d " "%11d %5d %8d\n" % (rev, p1, p2, r.start(rev), r.end(rev), r.start(dbase), r.start(cbase), r.start(p1), r.start(p2), rs, ts, ts / r.end(rev), len(heads), clen)) return 0 v = r.version format = v & 0xFFFF flags = [] gdelta = False if v & revlog.REVLOGNGINLINEDATA: flags.append('inline') if v & revlog.REVLOGGENERALDELTA: gdelta = True flags.append('generaldelta') if not flags: flags = ['(none)'] nummerges = 0 numfull = 0 numprev = 0 nump1 = 0 nump2 = 0 numother = 0 nump1prev = 0 nump2prev = 0 chainlengths = [] datasize = [None, 0, 0L] fullsize = [None, 0, 0L] deltasize = [None, 0, 0L] def addsize(size, l): if l[0] is None or size < l[0]: l[0] = size if size > l[1]: l[1] = size l[2] += size numrevs = len(r) for rev in xrange(numrevs): p1, p2 = r.parentrevs(rev) delta = r.deltaparent(rev) if format > 0: addsize(r.rawsize(rev), datasize) if p2 != nullrev: nummerges += 1 size = r.length(rev) if delta == nullrev: chainlengths.append(0) numfull += 1 addsize(size, fullsize) else: chainlengths.append(chainlengths[delta] + 1) addsize(size, deltasize) if delta == rev - 1: numprev += 1 if delta == p1: nump1prev += 1 elif delta == p2: nump2prev += 1 elif delta == p1: nump1 += 1 elif delta == p2: nump2 += 1 elif delta != nullrev: numother += 1 # Adjust size min value for empty cases for size in (datasize, fullsize, deltasize): if size[0] is None: size[0] = 0 numdeltas = numrevs - numfull numoprev = numprev - nump1prev - nump2prev totalrawsize = datasize[2] datasize[2] /= numrevs fulltotal = fullsize[2] fullsize[2] /= numfull deltatotal = deltasize[2] if numrevs - numfull > 0: deltasize[2] /= numrevs - numfull totalsize = fulltotal + deltatotal avgchainlen = sum(chainlengths) / numrevs maxchainlen = max(chainlengths) compratio = 1 if totalsize: compratio = totalrawsize / totalsize basedfmtstr = '%%%dd\n' basepcfmtstr = '%%%dd %s(%%5.2f%%%%)\n' def dfmtstr(max): return basedfmtstr % len(str(max)) def pcfmtstr(max, padding=0): return basepcfmtstr % (len(str(max)), ' ' * padding) def pcfmt(value, total): if total: return (value, 100 * float(value) / total) else: return value, 100.0 ui.write(('format : %d\n') % format) ui.write(('flags : %s\n') % ', '.join(flags)) ui.write('\n') fmt = pcfmtstr(totalsize) fmt2 = dfmtstr(totalsize) ui.write(('revisions : ') + fmt2 % numrevs) ui.write((' merges : ') + fmt % pcfmt(nummerges, numrevs)) ui.write((' normal : ') + fmt % pcfmt(numrevs - nummerges, numrevs)) ui.write(('revisions : ') + fmt2 % numrevs) ui.write((' full : ') + fmt % pcfmt(numfull, numrevs)) ui.write((' deltas : ') + fmt % pcfmt(numdeltas, numrevs)) ui.write(('revision size : ') + fmt2 % totalsize) ui.write((' full : ') + fmt % pcfmt(fulltotal, totalsize)) ui.write((' deltas : ') + fmt % pcfmt(deltatotal, totalsize)) ui.write('\n') fmt = dfmtstr(max(avgchainlen, compratio)) ui.write(('avg chain length : ') + fmt % avgchainlen) ui.write(('max chain length : ') + fmt % maxchainlen) ui.write(('compression ratio : ') + fmt % compratio) if format > 0: ui.write('\n') ui.write(('uncompressed data size (min/max/avg) : %d / %d / %d\n') % tuple(datasize)) ui.write(('full revision size (min/max/avg) : %d / %d / %d\n') % tuple(fullsize)) ui.write(('delta size (min/max/avg) : %d / %d / %d\n') % tuple(deltasize)) if numdeltas > 0: ui.write('\n') fmt = pcfmtstr(numdeltas) fmt2 = pcfmtstr(numdeltas, 4) ui.write(('deltas against prev : ') + fmt % pcfmt(numprev, numdeltas)) if numprev > 0: ui.write((' where prev = p1 : ') + fmt2 % pcfmt(nump1prev, numprev)) ui.write((' where prev = p2 : ') + fmt2 % pcfmt(nump2prev, numprev)) ui.write((' other : ') + fmt2 % pcfmt(numoprev, numprev)) if gdelta: ui.write(('deltas against p1 : ') + fmt % pcfmt(nump1, numdeltas)) ui.write(('deltas against p2 : ') + fmt % pcfmt(nump2, numdeltas)) ui.write(('deltas against other : ') + fmt % pcfmt(numother, numdeltas)) @command('debugrevspec', [('', 'optimize', None, _('print parsed tree after optimizing'))], ('REVSPEC')) def debugrevspec(ui, repo, expr, **opts): """parse and apply a revision specification Use --verbose to print the parsed tree before and after aliases expansion. """ if ui.verbose: tree = revset.parse(expr, lookup=repo.__contains__) ui.note(revset.prettyformat(tree), "\n") newtree = revset.findaliases(ui, tree) if newtree != tree: ui.note(revset.prettyformat(newtree), "\n") tree = newtree newtree = revset.foldconcat(tree) if newtree != tree: ui.note(revset.prettyformat(newtree), "\n") if opts["optimize"]: weight, optimizedtree = revset.optimize(newtree, True) ui.note("* optimized:\n", revset.prettyformat(optimizedtree), "\n") func = revset.match(ui, expr, repo) revs = func(repo) if ui.verbose: ui.note("* set:\n", revset.prettyformatset(revs), "\n") for c in revs: ui.write("%s\n" % c) @command('debugsetparents', [], _('REV1 [REV2]')) def debugsetparents(ui, repo, rev1, rev2=None): """manually set the parents of the current working directory This is useful for writing repository conversion tools, but should be used with care. For example, neither the working directory nor the dirstate is updated, so file status may be incorrect after running this command. Returns 0 on success. """ r1 = scmutil.revsingle(repo, rev1).node() r2 = scmutil.revsingle(repo, rev2, 'null').node() with repo.wlock(): repo.dirstate.beginparentchange() repo.setparents(r1, r2) repo.dirstate.endparentchange() @command('debugdirstate|debugstate', [('', 'nodates', None, _('do not display the saved mtime')), ('', 'datesort', None, _('sort by saved mtime'))], _('[OPTION]...')) def debugstate(ui, repo, **opts): """show the contents of the current dirstate""" nodates = opts.get('nodates') datesort = opts.get('datesort') timestr = "" if datesort: keyfunc = lambda x: (x[1][3], x[0]) # sort by mtime, then by filename else: keyfunc = None # sort by filename for file_, ent in sorted(repo.dirstate._map.iteritems(), key=keyfunc): if ent[3] == -1: timestr = 'unset ' elif nodates: timestr = 'set ' else: timestr = time.strftime("%Y-%m-%d %H:%M:%S ", time.localtime(ent[3])) if ent[1] & 0o20000: mode = 'lnk' else: mode = '%3o' % (ent[1] & 0o777 & ~util.umask) ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_)) for f in repo.dirstate.copies(): ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f)) @command('debugsub', [('r', 'rev', '', _('revision to check'), _('REV'))], _('[-r REV] [REV]')) def debugsub(ui, repo, rev=None): ctx = scmutil.revsingle(repo, rev, None) for k, v in sorted(ctx.substate.items()): ui.write(('path %s\n') % k) ui.write((' source %s\n') % v[0]) ui.write((' revision %s\n') % v[1]) @command('debugsuccessorssets', [], _('[REV]')) def debugsuccessorssets(ui, repo, *revs): """show set of successors for revision A successors set of changeset A is a consistent group of revisions that succeed A. It contains non-obsolete changesets only. In most cases a changeset A has a single successors set containing a single successor (changeset A replaced by A'). A changeset that is made obsolete with no successors are called "pruned". Such changesets have no successors sets at all. A changeset that has been "split" will have a successors set containing more than one successor. A changeset that has been rewritten in multiple different ways is called "divergent". Such changesets have multiple successor sets (each of which may also be split, i.e. have multiple successors). Results are displayed as follows:: Here rev2 has two possible (i.e. divergent) successors sets. The first holds one element, whereas the second holds three (i.e. the changeset has been split). """ # passed to successorssets caching computation from one call to another cache = {} ctx2str = str node2str = short if ui.debug(): def ctx2str(ctx): return ctx.hex() node2str = hex for rev in scmutil.revrange(repo, revs): ctx = repo[rev] ui.write('%s\n'% ctx2str(ctx)) for succsset in obsolete.successorssets(repo, ctx.node(), cache): if succsset: ui.write(' ') ui.write(node2str(succsset[0])) for node in succsset[1:]: ui.write(' ') ui.write(node2str(node)) ui.write('\n') @command('debugwalk', walkopts, _('[OPTION]... [FILE]...'), inferrepo=True) def debugwalk(ui, repo, *pats, **opts): """show how files match on given patterns""" m = scmutil.match(repo[None], pats, opts) items = list(repo.walk(m)) if not items: return f = lambda fn: fn if ui.configbool('ui', 'slash') and os.sep != '/': f = lambda fn: util.normpath(fn) fmt = 'f %%-%ds %%-%ds %%s' % ( max([len(abs) for abs in items]), max([len(m.rel(abs)) for abs in items])) for abs in items: line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '') ui.write("%s\n" % line.rstrip()) @command('debugwireargs', [('', 'three', '', 'three'), ('', 'four', '', 'four'), ('', 'five', '', 'five'), ] + remoteopts, _('REPO [OPTIONS]... [ONE [TWO]]'), norepo=True) def debugwireargs(ui, repopath, *vals, **opts): repo = hg.peer(ui, opts, repopath) for opt in remoteopts: del opts[opt[1]] args = {} for k, v in opts.iteritems(): if v: args[k] = v # run twice to check that we don't mess up the stream for the next command res1 = repo.debugwireargs(*vals, **args) res2 = repo.debugwireargs(*vals, **args) ui.write("%s\n" % res1) if res1 != res2: ui.warn("%s\n" % res2) @command('^diff', [('r', 'rev', [], _('revision'), _('REV')), ('c', 'change', '', _('change made by revision'), _('REV')) ] + diffopts + diffopts2 + walkopts + subrepoopts, _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...'), inferrepo=True) def diff(ui, repo, *pats, **opts): """diff repository (or selected files) Show differences between revisions for the specified files. Differences between files are shown using the unified diff format. .. note:: :hg:`diff` may generate unexpected results for merges, as it will default to comparing against the working directory's first parent changeset if no revisions are specified. When two revision arguments are given, then changes are shown between those revisions. If only one revision is specified then that revision is compared to the working directory, and, when no revisions are specified, the working directory files are compared to its first parent. Alternatively you can specify -c/--change with a revision to see the changes in that changeset relative to its first parent. Without the -a/--text option, diff will avoid generating diffs of files it detects as binary. With -a, diff will generate a diff anyway, probably with undesirable results. Use the -g/--git option to generate diffs in the git extended diff format. For more information, read :hg:`help diffs`. .. container:: verbose Examples: - compare a file in the current working directory to its parent:: hg diff foo.c - compare two historical versions of a directory, with rename info:: hg diff --git -r 1.0:1.2 lib/ - get change stats relative to the last change on some date:: hg diff --stat -r "date('may 2')" - diff all newly-added files that contain a keyword:: hg diff "set:added() and grep(GNU)" - compare a revision and its parents:: hg diff -c 9353 # compare against first parent hg diff -r 9353^:9353 # same using revset syntax hg diff -r 9353^2:9353 # compare against the second parent Returns 0 on success. """ revs = opts.get('rev') change = opts.get('change') stat = opts.get('stat') reverse = opts.get('reverse') if revs and change: msg = _('cannot specify --rev and --change at the same time') raise error.Abort(msg) elif change: node2 = scmutil.revsingle(repo, change, None).node() node1 = repo[node2].p1().node() else: node1, node2 = scmutil.revpair(repo, revs) if reverse: node1, node2 = node2, node1 diffopts = patch.diffallopts(ui, opts) m = scmutil.match(repo[node2], pats, opts) cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat, listsubrepos=opts.get('subrepos'), root=opts.get('root')) @command('^export', [('o', 'output', '', _('print output to file with formatted name'), _('FORMAT')), ('', 'switch-parent', None, _('diff against the second parent')), ('r', 'rev', [], _('revisions to export'), _('REV')), ] + diffopts, _('[OPTION]... [-o OUTFILESPEC] [-r] [REV]...')) def export(ui, repo, *changesets, **opts): """dump the header and diffs for one or more changesets Print the changeset header and diffs for one or more revisions. If no revision is given, the parent of the working directory is used. The information shown in the changeset header is: author, date, branch name (if non-default), changeset hash, parent(s) and commit comment. .. note:: :hg:`export` may generate unexpected diff output for merge changesets, as it will compare the merge changeset against its first parent only. Output may be to a file, in which case the name of the file is given using a format string. The formatting rules are as follows: :``%%``: literal "%" character :``%H``: changeset hash (40 hexadecimal digits) :``%N``: number of patches being generated :``%R``: changeset revision number :``%b``: basename of the exporting repository :``%h``: short-form changeset hash (12 hexadecimal digits) :``%m``: first line of the commit message (only alphanumeric characters) :``%n``: zero-padded sequence number, starting at 1 :``%r``: zero-padded changeset revision number Without the -a/--text option, export will avoid generating diffs of files it detects as binary. With -a, export will generate a diff anyway, probably with undesirable results. Use the -g/--git option to generate diffs in the git extended diff format. See :hg:`help diffs` for more information. With the --switch-parent option, the diff will be against the second parent. It can be useful to review a merge. .. container:: verbose Examples: - use export and import to transplant a bugfix to the current branch:: hg export -r 9353 | hg import - - export all the changesets between two revisions to a file with rename information:: hg export --git -r 123:150 > changes.txt - split outgoing changes into a series of patches with descriptive names:: hg export -r "outgoing()" -o "%n-%m.patch" Returns 0 on success. """ changesets += tuple(opts.get('rev', [])) if not changesets: changesets = ['.'] revs = scmutil.revrange(repo, changesets) if not revs: raise error.Abort(_("export requires at least one changeset")) if len(revs) > 1: ui.note(_('exporting patches:\n')) else: ui.note(_('exporting patch:\n')) cmdutil.export(repo, revs, template=opts.get('output'), switch_parent=opts.get('switch_parent'), opts=patch.diffallopts(ui, opts)) @command('files', [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')), ('0', 'print0', None, _('end filenames with NUL, for use with xargs')), ] + walkopts + formatteropts + subrepoopts, _('[OPTION]... [PATTERN]...')) def files(ui, repo, *pats, **opts): """list tracked files Print files under Mercurial control in the working directory or specified revision whose names match the given patterns (excluding removed files). If no patterns are given to match, this command prints the names of all files under Mercurial control in the working directory. .. container:: verbose Examples: - list all files under the current directory:: hg files . - shows sizes and flags for current revision:: hg files -vr . - list all files named README:: hg files -I "**/README" - list all binary files:: hg files "set:binary()" - find files containing a regular expression:: hg files "set:grep('bob')" - search tracked file contents with xargs and grep:: hg files -0 | xargs -0 grep foo See :hg:`help patterns` and :hg:`help filesets` for more information on specifying file patterns. Returns 0 if a match is found, 1 otherwise. """ ctx = scmutil.revsingle(repo, opts.get('rev'), None) end = '\n' if opts.get('print0'): end = '\0' fm = ui.formatter('files', opts) fmt = '%s' + end m = scmutil.match(ctx, pats, opts) ret = cmdutil.files(ui, ctx, m, fm, fmt, opts.get('subrepos')) fm.end() return ret @command('^forget', walkopts, _('[OPTION]... FILE...'), inferrepo=True) def forget(ui, repo, *pats, **opts): """forget the specified files on the next commit Mark the specified files so they will no longer be tracked after the next commit. This only removes files from the current branch, not from the entire project history, and it does not delete them from the working directory. To delete the file from the working directory, see :hg:`remove`. To undo a forget before the next commit, see :hg:`add`. .. container:: verbose Examples: - forget newly-added binary files:: hg forget "set:added() and binary()" - forget files that would be excluded by .hgignore:: hg forget "set:hgignore()" Returns 0 on success. """ if not pats: raise error.Abort(_('no files specified')) m = scmutil.match(repo[None], pats, opts) rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0] return rejected and 1 or 0 @command( 'graft', [('r', 'rev', [], _('revisions to graft'), _('REV')), ('c', 'continue', False, _('resume interrupted graft')), ('e', 'edit', False, _('invoke editor on commit messages')), ('', 'log', None, _('append graft info to log message')), ('f', 'force', False, _('force graft')), ('D', 'currentdate', False, _('record the current date as commit date')), ('U', 'currentuser', False, _('record the current user as committer'), _('DATE'))] + commitopts2 + mergetoolopts + dryrunopts, _('[OPTION]... [-r REV]... REV...')) def graft(ui, repo, *revs, **opts): '''copy changes from other branches onto the current branch This command uses Mercurial's merge logic to copy individual changes from other branches without merging branches in the history graph. This is sometimes known as 'backporting' or 'cherry-picking'. By default, graft will copy user, date, and description from the source changesets. Changesets that are ancestors of the current revision, that have already been grafted, or that are merges will be skipped. If --log is specified, log messages will have a comment appended of the form:: (grafted from CHANGESETHASH) If --force is specified, revisions will be grafted even if they are already ancestors of or have been grafted to the destination. This is useful when the revisions have since been backed out. If a graft merge results in conflicts, the graft process is interrupted so that the current merge can be manually resolved. Once all conflicts are addressed, the graft process can be continued with the -c/--continue option. .. note:: The -c/--continue option does not reapply earlier options, except for --force. .. container:: verbose Examples: - copy a single change to the stable branch and edit its description:: hg update stable hg graft --edit 9393 - graft a range of changesets with one exception, updating dates:: hg graft -D "2085::2093 and not 2091" - continue a graft after resolving conflicts:: hg graft -c - show the source of a grafted changeset:: hg log --debug -r . - show revisions sorted by date:: hg log -r 'sort(all(), date)' See :hg:`help revisions` and :hg:`help revsets` for more about specifying revisions. Returns 0 on successful completion. ''' with repo.wlock(): return _dograft(ui, repo, *revs, **opts) def _dograft(ui, repo, *revs, **opts): if revs and opts['rev']: ui.warn(_('warning: inconsistent use of --rev might give unexpected ' 'revision ordering!\n')) revs = list(revs) revs.extend(opts['rev']) if not opts.get('user') and opts.get('currentuser'): opts['user'] = ui.username() if not opts.get('date') and opts.get('currentdate'): opts['date'] = "%d %d" % util.makedate() editor = cmdutil.getcommiteditor(editform='graft', **opts) cont = False if opts['continue']: cont = True if revs: raise error.Abort(_("can't specify --continue and revisions")) # read in unfinished revisions try: nodes = repo.vfs.read('graftstate').splitlines() revs = [repo[node].rev() for node in nodes] except IOError as inst: if inst.errno != errno.ENOENT: raise raise error.Abort(_("no graft state found, can't continue")) else: cmdutil.checkunfinished(repo) cmdutil.bailifchanged(repo) if not revs: raise error.Abort(_('no revisions specified')) revs = scmutil.revrange(repo, revs) skipped = set() # check for merges for rev in repo.revs('%ld and merge()', revs): ui.warn(_('skipping ungraftable merge revision %s\n') % rev) skipped.add(rev) revs = [r for r in revs if r not in skipped] if not revs: return -1 # Don't check in the --continue case, in effect retaining --force across # --continues. That's because without --force, any revisions we decided to # skip would have been filtered out here, so they wouldn't have made their # way to the graftstate. With --force, any revisions we would have otherwise # skipped would not have been filtered out, and if they hadn't been applied # already, they'd have been in the graftstate. if not (cont or opts.get('force')): # check for ancestors of dest branch crev = repo['.'].rev() ancestors = repo.changelog.ancestors([crev], inclusive=True) # Cannot use x.remove(y) on smart set, this has to be a list. # XXX make this lazy in the future revs = list(revs) # don't mutate while iterating, create a copy for rev in list(revs): if rev in ancestors: ui.warn(_('skipping ancestor revision %d:%s\n') % (rev, repo[rev])) # XXX remove on list is slow revs.remove(rev) if not revs: return -1 # analyze revs for earlier grafts ids = {} for ctx in repo.set("%ld", revs): ids[ctx.hex()] = ctx.rev() n = ctx.extra().get('source') if n: ids[n] = ctx.rev() # check ancestors for earlier grafts ui.debug('scanning for duplicate grafts\n') for rev in repo.changelog.findmissingrevs(revs, [crev]): ctx = repo[rev] n = ctx.extra().get('source') if n in ids: try: r = repo[n].rev() except error.RepoLookupError: r = None if r in revs: ui.warn(_('skipping revision %d:%s ' '(already grafted to %d:%s)\n') % (r, repo[r], rev, ctx)) revs.remove(r) elif ids[n] in revs: if r is None: ui.warn(_('skipping already grafted revision %d:%s ' '(%d:%s also has unknown origin %s)\n') % (ids[n], repo[ids[n]], rev, ctx, n[:12])) else: ui.warn(_('skipping already grafted revision %d:%s ' '(%d:%s also has origin %d:%s)\n') % (ids[n], repo[ids[n]], rev, ctx, r, n[:12])) revs.remove(ids[n]) elif ctx.hex() in ids: r = ids[ctx.hex()] ui.warn(_('skipping already grafted revision %d:%s ' '(was grafted from %d:%s)\n') % (r, repo[r], rev, ctx)) revs.remove(r) if not revs: return -1 for pos, ctx in enumerate(repo.set("%ld", revs)): desc = '%d:%s "%s"' % (ctx.rev(), ctx, ctx.description().split('\n', 1)[0]) names = repo.nodetags(ctx.node()) + repo.nodebookmarks(ctx.node()) if names: desc += ' (%s)' % ' '.join(names) ui.status(_('grafting %s\n') % desc) if opts.get('dry_run'): continue source = ctx.extra().get('source') extra = {} if source: extra['source'] = source extra['intermediate-source'] = ctx.hex() else: extra['source'] = ctx.hex() user = ctx.user() if opts.get('user'): user = opts['user'] date = ctx.date() if opts.get('date'): date = opts['date'] message = ctx.description() if opts.get('log'): message += '\n(grafted from %s)' % ctx.hex() # we don't merge the first commit when continuing if not cont: # perform the graft merge with p1(rev) as 'ancestor' try: # ui.forcemerge is an internal variable, do not document repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'graft') stats = mergemod.graft(repo, ctx, ctx.p1(), ['local', 'graft']) finally: repo.ui.setconfig('ui', 'forcemerge', '', 'graft') # report any conflicts if stats and stats[3] > 0: # write out state for --continue nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]] repo.vfs.write('graftstate', ''.join(nodelines)) extra = '' if opts.get('user'): extra += ' --user %s' % opts['user'] if opts.get('date'): extra += ' --date %s' % opts['date'] if opts.get('log'): extra += ' --log' hint=_('use hg resolve and hg graft --continue%s') % extra raise error.Abort( _("unresolved conflicts, can't continue"), hint=hint) else: cont = False # commit node = repo.commit(text=message, user=user, date=date, extra=extra, editor=editor) if node is None: ui.warn( _('note: graft of %d:%s created no changes to commit\n') % (ctx.rev(), ctx)) # remove state when we complete successfully if not opts.get('dry_run'): util.unlinkpath(repo.join('graftstate'), ignoremissing=True) return 0 @command('grep', [('0', 'print0', None, _('end fields with NUL')), ('', 'all', None, _('print all revisions that match')), ('a', 'text', None, _('treat all files as text')), ('f', 'follow', None, _('follow changeset history,' ' or file history across copies and renames')), ('i', 'ignore-case', None, _('ignore case when matching')), ('l', 'files-with-matches', None, _('print only filenames and revisions that match')), ('n', 'line-number', None, _('print matching line numbers')), ('r', 'rev', [], _('only search files changed within revision range'), _('REV')), ('u', 'user', None, _('list the author (long with -v)')), ('d', 'date', None, _('list the date (short with -q)')), ] + walkopts, _('[OPTION]... PATTERN [FILE]...'), inferrepo=True) def grep(ui, repo, pattern, *pats, **opts): """search for a pattern in specified files and revisions Search revisions of files for a regular expression. This command behaves differently than Unix grep. It only accepts Python/Perl regexps. It searches repository history, not the working directory. It always prints the revision number in which a match appears. By default, grep only prints output for the first revision of a file in which it finds a match. To get it to print every revision that contains a change in match status ("-" for a match that becomes a non-match, or "+" for a non-match that becomes a match), use the --all flag. Returns 0 if a match is found, 1 otherwise. """ reflags = re.M if opts.get('ignore_case'): reflags |= re.I try: regexp = util.re.compile(pattern, reflags) except re.error as inst: ui.warn(_("grep: invalid match pattern: %s\n") % inst) return 1 sep, eol = ':', '\n' if opts.get('print0'): sep = eol = '\0' getfile = util.lrucachefunc(repo.file) def matchlines(body): begin = 0 linenum = 0 while begin < len(body): match = regexp.search(body, begin) if not match: break mstart, mend = match.span() linenum += body.count('\n', begin, mstart) + 1 lstart = body.rfind('\n', begin, mstart) + 1 or begin begin = body.find('\n', mend) + 1 or len(body) + 1 lend = begin - 1 yield linenum, mstart - lstart, mend - lstart, body[lstart:lend] class linestate(object): def __init__(self, line, linenum, colstart, colend): self.line = line self.linenum = linenum self.colstart = colstart self.colend = colend def __hash__(self): return hash((self.linenum, self.line)) def __eq__(self, other): return self.line == other.line def __iter__(self): yield (self.line[:self.colstart], '') yield (self.line[self.colstart:self.colend], 'grep.match') rest = self.line[self.colend:] while rest != '': match = regexp.search(rest) if not match: yield (rest, '') break mstart, mend = match.span() yield (rest[:mstart], '') yield (rest[mstart:mend], 'grep.match') rest = rest[mend:] matches = {} copies = {} def grepbody(fn, rev, body): matches[rev].setdefault(fn, []) m = matches[rev][fn] for lnum, cstart, cend, line in matchlines(body): s = linestate(line, lnum, cstart, cend) m.append(s) def difflinestates(a, b): sm = difflib.SequenceMatcher(None, a, b) for tag, alo, ahi, blo, bhi in sm.get_opcodes(): if tag == 'insert': for i in xrange(blo, bhi): yield ('+', b[i]) elif tag == 'delete': for i in xrange(alo, ahi): yield ('-', a[i]) elif tag == 'replace': for i in xrange(alo, ahi): yield ('-', a[i]) for i in xrange(blo, bhi): yield ('+', b[i]) def display(fn, ctx, pstates, states): rev = ctx.rev() if ui.quiet: datefunc = util.shortdate else: datefunc = util.datestr found = False @util.cachefunc def binary(): flog = getfile(fn) return util.binary(flog.read(ctx.filenode(fn))) if opts.get('all'): iter = difflinestates(pstates, states) else: iter = [('', l) for l in states] for change, l in iter: cols = [(fn, 'grep.filename'), (str(rev), 'grep.rev')] if opts.get('line_number'): cols.append((str(l.linenum), 'grep.linenumber')) if opts.get('all'): cols.append((change, 'grep.change')) if opts.get('user'): cols.append((ui.shortuser(ctx.user()), 'grep.user')) if opts.get('date'): cols.append((datefunc(ctx.date()), 'grep.date')) for col, label in cols[:-1]: ui.write(col, label=label) ui.write(sep, label='grep.sep') ui.write(cols[-1][0], label=cols[-1][1]) if not opts.get('files_with_matches'): ui.write(sep, label='grep.sep') if not opts.get('text') and binary(): ui.write(" Binary file matches") else: for s, label in l: ui.write(s, label=label) ui.write(eol) found = True if opts.get('files_with_matches'): break return found skip = {} revfiles = {} matchfn = scmutil.match(repo[None], pats, opts) found = False follow = opts.get('follow') def prep(ctx, fns): rev = ctx.rev() pctx = ctx.p1() parent = pctx.rev() matches.setdefault(rev, {}) matches.setdefault(parent, {}) files = revfiles.setdefault(rev, []) for fn in fns: flog = getfile(fn) try: fnode = ctx.filenode(fn) except error.LookupError: continue copied = flog.renamed(fnode) copy = follow and copied and copied[0] if copy: copies.setdefault(rev, {})[fn] = copy if fn in skip: if copy: skip[copy] = True continue files.append(fn) if fn not in matches[rev]: grepbody(fn, rev, flog.read(fnode)) pfn = copy or fn if pfn not in matches[parent]: try: fnode = pctx.filenode(pfn) grepbody(pfn, parent, flog.read(fnode)) except error.LookupError: pass for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep): rev = ctx.rev() parent = ctx.p1().rev() for fn in sorted(revfiles.get(rev, [])): states = matches[rev][fn] copy = copies.get(rev, {}).get(fn) if fn in skip: if copy: skip[copy] = True continue pstates = matches.get(parent, {}).get(copy or fn, []) if pstates or states: r = display(fn, ctx, pstates, states) found = found or r if r and not opts.get('all'): skip[fn] = True if copy: skip[copy] = True del matches[rev] del revfiles[rev] return not found @command('heads', [('r', 'rev', '', _('show only heads which are descendants of STARTREV'), _('STARTREV')), ('t', 'topo', False, _('show topological heads only')), ('a', 'active', False, _('show active branchheads only (DEPRECATED)')), ('c', 'closed', False, _('show normal and closed branch heads')), ] + templateopts, _('[-ct] [-r STARTREV] [REV]...')) def heads(ui, repo, *branchrevs, **opts): """show branch heads With no arguments, show all open branch heads in the repository. Branch heads are changesets that have no descendants on the same branch. They are where development generally takes place and are the usual targets for update and merge operations. If one or more REVs are given, only open branch heads on the branches associated with the specified changesets are shown. This means that you can use :hg:`heads .` to see the heads on the currently checked-out branch. If -c/--closed is specified, also show branch heads marked closed (see :hg:`commit --close-branch`). If STARTREV is specified, only those heads that are descendants of STARTREV will be displayed. If -t/--topo is specified, named branch mechanics will be ignored and only topological heads (changesets with no children) will be shown. Returns 0 if matching heads are found, 1 if not. """ start = None if 'rev' in opts: start = scmutil.revsingle(repo, opts['rev'], None).node() if opts.get('topo'): heads = [repo[h] for h in repo.heads(start)] else: heads = [] for branch in repo.branchmap(): heads += repo.branchheads(branch, start, opts.get('closed')) heads = [repo[h] for h in heads] if branchrevs: branches = set(repo[br].branch() for br in branchrevs) heads = [h for h in heads if h.branch() in branches] if opts.get('active') and branchrevs: dagheads = repo.heads(start) heads = [h for h in heads if h.node() in dagheads] if branchrevs: haveheads = set(h.branch() for h in heads) if branches - haveheads: headless = ', '.join(b for b in branches - haveheads) msg = _('no open branch heads found on branches %s') if opts.get('rev'): msg += _(' (started at %s)') % opts['rev'] ui.warn((msg + '\n') % headless) if not heads: return 1 heads = sorted(heads, key=lambda x: -x.rev()) displayer = cmdutil.show_changeset(ui, repo, opts) for ctx in heads: displayer.show(ctx) displayer.close() @command('help', [('e', 'extension', None, _('show only help for extensions')), ('c', 'command', None, _('show only help for commands')), ('k', 'keyword', None, _('show topics matching keyword')), ('s', 'system', [], _('show help for specific platform(s)')), ], _('[-ecks] [TOPIC]'), norepo=True) def help_(ui, name=None, **opts): """show help for a given topic or a help overview With no arguments, print a list of commands with short help messages. Given a topic, extension, or command name, print help for that topic. Returns 0 if successful. """ textwidth = min(ui.termwidth(), 80) - 2 keep = opts.get('system') or [] if len(keep) == 0: if sys.platform.startswith('win'): keep.append('windows') elif sys.platform == 'OpenVMS': keep.append('vms') elif sys.platform == 'plan9': keep.append('plan9') else: keep.append('unix') keep.append(sys.platform.lower()) if ui.verbose: keep.append('verbose') section = None subtopic = None if name and '.' in name: name, section = name.split('.', 1) section = section.lower() if '.' in section: subtopic, section = section.split('.', 1) else: subtopic = section text = help.help_(ui, name, subtopic=subtopic, **opts) formatted, pruned = minirst.format(text, textwidth, keep=keep, section=section) # We could have been given a weird ".foo" section without a name # to look for, or we could have simply failed to found "foo.bar" # because bar isn't a section of foo if section and not (formatted and name): raise error.Abort(_("help section not found")) if 'verbose' in pruned: keep.append('omitted') else: keep.append('notomitted') formatted, pruned = minirst.format(text, textwidth, keep=keep, section=section) ui.write(formatted) @command('identify|id', [('r', 'rev', '', _('identify the specified revision'), _('REV')), ('n', 'num', None, _('show local revision number')), ('i', 'id', None, _('show global revision id')), ('b', 'branch', None, _('show branch')), ('t', 'tags', None, _('show tags')), ('B', 'bookmarks', None, _('show bookmarks')), ] + remoteopts, _('[-nibtB] [-r REV] [SOURCE]'), optionalrepo=True) def identify(ui, repo, source=None, rev=None, num=None, id=None, branch=None, tags=None, bookmarks=None, **opts): """identify the working directory or specified revision Print a summary identifying the repository state at REV using one or two parent hash identifiers, followed by a "+" if the working directory has uncommitted changes, the branch name (if not default), a list of tags, and a list of bookmarks. When REV is not given, print a summary of the current state of the repository. Specifying a path to a repository root or Mercurial bundle will cause lookup to operate on that repository/bundle. .. container:: verbose Examples: - generate a build identifier for the working directory:: hg id --id > build-id.dat - find the revision corresponding to a tag:: hg id -n -r 1.3 - check the most recent revision of a remote repository:: hg id -r tip http://selenic.com/hg/ See :hg:`log` for generating more information about specific revisions, including full hash identifiers. Returns 0 if successful. """ if not repo and not source: raise error.Abort(_("there is no Mercurial repository here " "(.hg not found)")) if ui.debugflag: hexfunc = hex else: hexfunc = short default = not (num or id or branch or tags or bookmarks) output = [] revs = [] if source: source, branches = hg.parseurl(ui.expandpath(source)) peer = hg.peer(repo or ui, opts, source) # only pass ui when no repo repo = peer.local() revs, checkout = hg.addbranchrevs(repo, peer, branches, None) if not repo: if num or branch or tags: raise error.Abort( _("can't query remote revision number, branch, or tags")) if not rev and revs: rev = revs[0] if not rev: rev = "tip" remoterev = peer.lookup(rev) if default or id: output = [hexfunc(remoterev)] def getbms(): bms = [] if 'bookmarks' in peer.listkeys('namespaces'): hexremoterev = hex(remoterev) bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems() if bmr == hexremoterev] return sorted(bms) if bookmarks: output.extend(getbms()) elif default and not ui.quiet: # multiple bookmarks for a single parent separated by '/' bm = '/'.join(getbms()) if bm: output.append(bm) else: ctx = scmutil.revsingle(repo, rev, None) if ctx.rev() is None: ctx = repo[None] parents = ctx.parents() taglist = [] for p in parents: taglist.extend(p.tags()) changed = "" if default or id or num: if (any(repo.status()) or any(ctx.sub(s).dirty() for s in ctx.substate)): changed = '+' if default or id: output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]), changed)] if num: output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]), changed)) else: if default or id: output = [hexfunc(ctx.node())] if num: output.append(str(ctx.rev())) taglist = ctx.tags() if default and not ui.quiet: b = ctx.branch() if b != 'default': output.append("(%s)" % b) # multiple tags for a single parent separated by '/' t = '/'.join(taglist) if t: output.append(t) # multiple bookmarks for a single parent separated by '/' bm = '/'.join(ctx.bookmarks()) if bm: output.append(bm) else: if branch: output.append(ctx.branch()) if tags: output.extend(taglist) if bookmarks: output.extend(ctx.bookmarks()) ui.write("%s\n" % ' '.join(output)) @command('import|patch', [('p', 'strip', 1, _('directory strip option for patch. This has the same ' 'meaning as the corresponding patch option'), _('NUM')), ('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')), ('e', 'edit', False, _('invoke editor on commit messages')), ('f', 'force', None, _('skip check for outstanding uncommitted changes (DEPRECATED)')), ('', 'no-commit', None, _("don't commit, just update the working directory")), ('', 'bypass', None, _("apply patch without touching the working directory")), ('', 'partial', None, _('commit even if some hunks fail')), ('', 'exact', None, _('apply patch to the nodes from which it was generated')), ('', 'prefix', '', _('apply patch to subdirectory'), _('DIR')), ('', 'import-branch', None, _('use any branch information in patch (implied by --exact)'))] + commitopts + commitopts2 + similarityopts, _('[OPTION]... PATCH...')) def import_(ui, repo, patch1=None, *patches, **opts): """import an ordered set of patches Import a list of patches and commit them individually (unless --no-commit is specified). To read a patch from standard input, use "-" as the patch name. If a URL is specified, the patch will be downloaded from there. Import first applies changes to the working directory (unless --bypass is specified), import will abort if there are outstanding changes. Use --bypass to apply and commit patches directly to the repository, without affecting the working directory. Without --exact, patches will be applied on top of the working directory parent revision. You can import a patch straight from a mail message. Even patches as attachments work (to use the body part, it must have type text/plain or text/x-patch). From and Subject headers of email message are used as default committer and commit message. All text/plain body parts before first diff are added to the commit message. If the imported patch was generated by :hg:`export`, user and description from patch override values from message headers and body. Values given on command line with -m/--message and -u/--user override these. If --exact is specified, import will set the working directory to the parent of each patch before applying it, and will abort if the resulting changeset has a different ID than the one recorded in the patch. This may happen due to character set problems or other deficiencies in the text patch format. Use --partial to ensure a changeset will be created from the patch even if some hunks fail to apply. Hunks that fail to apply will be written to a .rej file. Conflicts can then be resolved by hand before :hg:`commit --amend` is run to update the created changeset. This flag exists to let people import patches that partially apply without losing the associated metadata (author, date, description, ...). .. note:: When no hunks apply cleanly, :hg:`import --partial` will create an empty changeset, importing only the patch metadata. With -s/--similarity, hg will attempt to discover renames and copies in the patch in the same way as :hg:`addremove`. It is possible to use external patch programs to perform the patch by setting the ``ui.patch`` configuration option. For the default internal tool, the fuzz can also be configured via ``patch.fuzz``. See :hg:`help config` for more information about configuration files and how to use these options. See :hg:`help dates` for a list of formats valid for -d/--date. .. container:: verbose Examples: - import a traditional patch from a website and detect renames:: hg import -s 80 http://example.com/bugfix.patch - import a changeset from an hgweb server:: hg import http://www.selenic.com/hg/rev/5ca8c111e9aa - import all the patches in an Unix-style mbox:: hg import incoming-patches.mbox - attempt to exactly restore an exported changeset (not always possible):: hg import --exact proposed-fix.patch - use an external tool to apply a patch which is too fuzzy for the default internal tool. hg import --config ui.patch="patch --merge" fuzzy.patch - change the default fuzzing from 2 to a less strict 7 hg import --config ui.fuzz=7 fuzz.patch Returns 0 on success, 1 on partial success (see --partial). """ if not patch1: raise error.Abort(_('need at least one patch to import')) patches = (patch1,) + patches date = opts.get('date') if date: opts['date'] = util.parsedate(date) exact = opts.get('exact') update = not opts.get('bypass') if not update and opts.get('no_commit'): raise error.Abort(_('cannot use --no-commit with --bypass')) try: sim = float(opts.get('similarity') or 0) except ValueError: raise error.Abort(_('similarity must be a number')) if sim < 0 or sim > 100: raise error.Abort(_('similarity must be between 0 and 100')) if sim and not update: raise error.Abort(_('cannot use --similarity with --bypass')) if exact: if opts.get('edit'): raise error.Abort(_('cannot use --exact with --edit')) if opts.get('prefix'): raise error.Abort(_('cannot use --exact with --prefix')) base = opts["base"] wlock = dsguard = lock = tr = None msgs = [] ret = 0 try: wlock = repo.wlock() if update: cmdutil.checkunfinished(repo) if (exact or not opts.get('force')): cmdutil.bailifchanged(repo) if not opts.get('no_commit'): lock = repo.lock() tr = repo.transaction('import') else: dsguard = cmdutil.dirstateguard(repo, 'import') parents = repo[None].parents() for patchurl in patches: if patchurl == '-': ui.status(_('applying patch from stdin\n')) patchfile = ui.fin patchurl = 'stdin' # for error message else: patchurl = os.path.join(base, patchurl) ui.status(_('applying %s\n') % patchurl) patchfile = hg.openpath(ui, patchurl) haspatch = False for hunk in patch.split(patchfile): (msg, node, rej) = cmdutil.tryimportone(ui, repo, hunk, parents, opts, msgs, hg.clean) if msg: haspatch = True ui.note(msg + '\n') if update or exact: parents = repo[None].parents() else: parents = [repo[node]] if rej: ui.write_err(_("patch applied partially\n")) ui.write_err(_("(fix the .rej files and run " "`hg commit --amend`)\n")) ret = 1 break if not haspatch: raise error.Abort(_('%s: no diffs found') % patchurl) if tr: tr.close() if msgs: repo.savecommitmessage('\n* * *\n'.join(msgs)) if dsguard: dsguard.close() return ret finally: if tr: tr.release() release(lock, dsguard, wlock) @command('incoming|in', [('f', 'force', None, _('run even if remote repository is unrelated')), ('n', 'newest-first', None, _('show newest record first')), ('', 'bundle', '', _('file to store the bundles into'), _('FILE')), ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')), ('B', 'bookmarks', False, _("compare bookmarks")), ('b', 'branch', [], _('a specific branch you would like to pull'), _('BRANCH')), ] + logopts + remoteopts + subrepoopts, _('[-p] [-n] [-M] [-f] [-r REV]... [--bundle FILENAME] [SOURCE]')) def incoming(ui, repo, source="default", **opts): """show new changesets found in source Show new changesets found in the specified path/URL or the default pull location. These are the changesets that would have been pulled if a pull at the time you issued this command. See pull for valid source format details. .. container:: verbose With -B/--bookmarks, the result of bookmark comparison between local and remote repositories is displayed. With -v/--verbose, status is also displayed for each bookmark like below:: BM1 01234567890a added BM2 1234567890ab advanced BM3 234567890abc diverged BM4 34567890abcd changed The action taken locally when pulling depends on the status of each bookmark: :``added``: pull will create it :``advanced``: pull will update it :``diverged``: pull will create a divergent bookmark :``changed``: result depends on remote changesets From the point of view of pulling behavior, bookmark existing only in the remote repository are treated as ``added``, even if it is in fact locally deleted. .. container:: verbose For remote repository, using --bundle avoids downloading the changesets twice if the incoming is followed by a pull. Examples: - show incoming changes with patches and full description:: hg incoming -vp - show incoming changes excluding merges, store a bundle:: hg in -vpM --bundle incoming.hg hg pull incoming.hg - briefly list changes inside a bundle:: hg in changes.hg -T "{desc|firstline}\\n" Returns 0 if there are incoming changes, 1 otherwise. """ if opts.get('graph'): cmdutil.checkunsupportedgraphflags([], opts) def display(other, chlist, displayer): revdag = cmdutil.graphrevs(other, chlist, opts) cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges) hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True) return 0 if opts.get('bundle') and opts.get('subrepos'): raise error.Abort(_('cannot combine --bundle and --subrepos')) if opts.get('bookmarks'): source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch')) other = hg.peer(repo, opts, source) if 'bookmarks' not in other.listkeys('namespaces'): ui.warn(_("remote doesn't support bookmarks\n")) return 0 ui.status(_('comparing with %s\n') % util.hidepassword(source)) return bookmarks.incoming(ui, repo, other) repo._subtoppath = ui.expandpath(source) try: return hg.incoming(ui, repo, source, opts) finally: del repo._subtoppath @command('^init', remoteopts, _('[-e CMD] [--remotecmd CMD] [DEST]'), norepo=True) def init(ui, dest=".", **opts): """create a new repository in the given directory Initialize a new repository in the given directory. If the given directory does not exist, it will be created. If no directory is given, the current directory is used. It is possible to specify an ``ssh://`` URL as the destination. See :hg:`help urls` for more information. Returns 0 on success. """ hg.peer(ui, opts, ui.expandpath(dest), create=True) @command('locate', [('r', 'rev', '', _('search the repository as it is in REV'), _('REV')), ('0', 'print0', None, _('end filenames with NUL, for use with xargs')), ('f', 'fullpath', None, _('print complete paths from the filesystem root')), ] + walkopts, _('[OPTION]... [PATTERN]...')) def locate(ui, repo, *pats, **opts): """locate files matching specific patterns (DEPRECATED) Print files under Mercurial control in the working directory whose names match the given patterns. By default, this command searches all directories in the working directory. To search just the current directory and its subdirectories, use "--include .". If no patterns are given to match, this command prints the names of all files under Mercurial control in the working directory. If you want to feed the output of this command into the "xargs" command, use the -0 option to both this command and "xargs". This will avoid the problem of "xargs" treating single filenames that contain whitespace as multiple filenames. See :hg:`help files` for a more versatile command. Returns 0 if a match is found, 1 otherwise. """ if opts.get('print0'): end = '\0' else: end = '\n' rev = scmutil.revsingle(repo, opts.get('rev'), None).node() ret = 1 ctx = repo[rev] m = scmutil.match(ctx, pats, opts, default='relglob', badfn=lambda x, y: False) for abs in ctx.matches(m): if opts.get('fullpath'): ui.write(repo.wjoin(abs), end) else: ui.write(((pats and m.rel(abs)) or abs), end) ret = 0 return ret @command('^log|history', [('f', 'follow', None, _('follow changeset history, or file history across copies and renames')), ('', 'follow-first', None, _('only follow the first parent of merge changesets (DEPRECATED)')), ('d', 'date', '', _('show revisions matching date spec'), _('DATE')), ('C', 'copies', None, _('show copied files')), ('k', 'keyword', [], _('do case-insensitive search for a given text'), _('TEXT')), ('r', 'rev', [], _('show the specified revision or revset'), _('REV')), ('', 'removed', None, _('include revisions where files were removed')), ('m', 'only-merges', None, _('show only merges (DEPRECATED)')), ('u', 'user', [], _('revisions committed by user'), _('USER')), ('', 'only-branch', [], _('show only changesets within the given named branch (DEPRECATED)'), _('BRANCH')), ('b', 'branch', [], _('show changesets within the given named branch'), _('BRANCH')), ('P', 'prune', [], _('do not display revision or any of its ancestors'), _('REV')), ] + logopts + walkopts, _('[OPTION]... [FILE]'), inferrepo=True) def log(ui, repo, *pats, **opts): """show revision history of entire repository or files Print the revision history of the specified files or the entire project. If no revision range is specified, the default is ``tip:0`` unless --follow is set, in which case the working directory parent is used as the starting revision. File history is shown without following rename or copy history of files. Use -f/--follow with a filename to follow history across renames and copies. --follow without a filename will only show ancestors or descendants of the starting revision. By default this command prints revision number and changeset id, tags, non-trivial parents, user, date and time, and a summary for each commit. When the -v/--verbose switch is used, the list of changed files and full commit message are shown. With --graph the revisions are shown as an ASCII art DAG with the most recent changeset at the top. 'o' is a changeset, '@' is a working directory parent, 'x' is obsolete, and '+' represents a fork where the changeset from the lines below is a parent of the 'o' merge on the same line. .. note:: :hg:`log --patch` may generate unexpected diff output for merge changesets, as it will only compare the merge changeset against its first parent. Also, only files different from BOTH parents will appear in files:. .. note:: For performance reasons, :hg:`log FILE` may omit duplicate changes made on branches and will not show removals or mode changes. To see all such changes, use the --removed switch. .. container:: verbose Some examples: - changesets with full descriptions and file lists:: hg log -v - changesets ancestral to the working directory:: hg log -f - last 10 commits on the current branch:: hg log -l 10 -b . - changesets showing all modifications of a file, including removals:: hg log --removed file.c - all changesets that touch a directory, with diffs, excluding merges:: hg log -Mp lib/ - all revision numbers that match a keyword:: hg log -k bug --template "{rev}\\n" - the full hash identifier of the working directory parent:: hg log -r . --template "{node}\\n" - list available log templates:: hg log -T list - check if a given changeset is included in a tagged release:: hg log -r "a21ccf and ancestor(1.9)" - find all changesets by some user in a date range:: hg log -k alice -d "may 2008 to jul 2008" - summary of all changesets after the last tag:: hg log -r "last(tagged())::" --template "{desc|firstline}\\n" See :hg:`help dates` for a list of formats valid for -d/--date. See :hg:`help revisions` and :hg:`help revsets` for more about specifying and ordering revisions. See :hg:`help templates` for more about pre-packaged styles and specifying custom templates. Returns 0 on success. """ if opts.get('follow') and opts.get('rev'): opts['rev'] = [revset.formatspec('reverse(::%lr)', opts.get('rev'))] del opts['follow'] if opts.get('graph'): return cmdutil.graphlog(ui, repo, *pats, **opts) revs, expr, filematcher = cmdutil.getlogrevs(repo, pats, opts) limit = cmdutil.loglimit(opts) count = 0 getrenamed = None if opts.get('copies'): endrev = None if opts.get('rev'): endrev = scmutil.revrange(repo, opts.get('rev')).max() + 1 getrenamed = templatekw.getrenamedfn(repo, endrev=endrev) displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True) for rev in revs: if count == limit: break ctx = repo[rev] copies = None if getrenamed is not None and rev: copies = [] for fn in ctx.files(): rename = getrenamed(fn, rev) if rename: copies.append((fn, rename[0])) if filematcher: revmatchfn = filematcher(ctx.rev()) else: revmatchfn = None displayer.show(ctx, copies=copies, matchfn=revmatchfn) if displayer.flush(ctx): count += 1 displayer.close() @command('manifest', [('r', 'rev', '', _('revision to display'), _('REV')), ('', 'all', False, _("list files from all revisions"))] + formatteropts, _('[-r REV]')) def manifest(ui, repo, node=None, rev=None, **opts): """output the current or given revision of the project manifest Print a list of version controlled files for the given revision. If no revision is given, the first parent of the working directory is used, or the null revision if no revision is checked out. With -v, print file permissions, symlink and executable bits. With --debug, print file revision hashes. If option --all is specified, the list of all files from all revisions is printed. This includes deleted and renamed files. Returns 0 on success. """ fm = ui.formatter('manifest', opts) if opts.get('all'): if rev or node: raise error.Abort(_("can't specify a revision with --all")) res = [] prefix = "data/" suffix = ".i" plen = len(prefix) slen = len(suffix) with repo.lock(): for fn, b, size in repo.store.datafiles(): if size != 0 and fn[-slen:] == suffix and fn[:plen] == prefix: res.append(fn[plen:-slen]) for f in res: fm.startitem() fm.write("path", '%s\n', f) fm.end() return if rev and node: raise error.Abort(_("please specify just one revision")) if not node: node = rev char = {'l': '@', 'x': '*', '': ''} mode = {'l': '644', 'x': '755', '': '644'} ctx = scmutil.revsingle(repo, node) mf = ctx.manifest() for f in ctx: fm.startitem() fl = ctx[f].flags() fm.condwrite(ui.debugflag, 'hash', '%s ', hex(mf[f])) fm.condwrite(ui.verbose, 'mode type', '%s %1s ', mode[fl], char[fl]) fm.write('path', '%s\n', f) fm.end() @command('^merge', [('f', 'force', None, _('force a merge including outstanding changes (DEPRECATED)')), ('r', 'rev', '', _('revision to merge'), _('REV')), ('P', 'preview', None, _('review revisions to merge (no merge is performed)')) ] + mergetoolopts, _('[-P] [[-r] REV]')) def merge(ui, repo, node=None, **opts): """merge another revision into working directory The current working directory is updated with all changes made in the requested revision since the last common predecessor revision. Files that changed between either parent are marked as changed for the next commit and a commit must be performed before any further updates to the repository are allowed. The next commit will have two parents. ``--tool`` can be used to specify the merge tool used for file merges. It overrides the HGMERGE environment variable and your configuration files. See :hg:`help merge-tools` for options. If no revision is specified, the working directory's parent is a head revision, and the current branch contains exactly one other head, the other head is merged with by default. Otherwise, an explicit revision with which to merge with must be provided. See :hg:`help resolve` for information on handling file conflicts. To undo an uncommitted merge, use :hg:`update --clean .` which will check out a clean copy of the original merge parent, losing all changes. Returns 0 on success, 1 if there are unresolved files. """ if opts.get('rev') and node: raise error.Abort(_("please specify just one revision")) if not node: node = opts.get('rev') if node: node = scmutil.revsingle(repo, node).node() if not node: node = repo[destutil.destmerge(repo)].node() if opts.get('preview'): # find nodes that are ancestors of p2 but not of p1 p1 = repo.lookup('.') p2 = repo.lookup(node) nodes = repo.changelog.findmissing(common=[p1], heads=[p2]) displayer = cmdutil.show_changeset(ui, repo, opts) for node in nodes: displayer.show(repo[node]) displayer.close() return 0 try: # ui.forcemerge is an internal variable, do not document repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'merge') return hg.merge(repo, node, force=opts.get('force')) finally: ui.setconfig('ui', 'forcemerge', '', 'merge') @command('outgoing|out', [('f', 'force', None, _('run even when the destination is unrelated')), ('r', 'rev', [], _('a changeset intended to be included in the destination'), _('REV')), ('n', 'newest-first', None, _('show newest record first')), ('B', 'bookmarks', False, _('compare bookmarks')), ('b', 'branch', [], _('a specific branch you would like to push'), _('BRANCH')), ] + logopts + remoteopts + subrepoopts, _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')) def outgoing(ui, repo, dest=None, **opts): """show changesets not found in the destination Show changesets not found in the specified destination repository or the default push location. These are the changesets that would be pushed if a push was requested. See pull for details of valid destination formats. .. container:: verbose With -B/--bookmarks, the result of bookmark comparison between local and remote repositories is displayed. With -v/--verbose, status is also displayed for each bookmark like below:: BM1 01234567890a added BM2 deleted BM3 234567890abc advanced BM4 34567890abcd diverged BM5 4567890abcde changed The action taken when pushing depends on the status of each bookmark: :``added``: push with ``-B`` will create it :``deleted``: push with ``-B`` will delete it :``advanced``: push will update it :``diverged``: push with ``-B`` will update it :``changed``: push with ``-B`` will update it From the point of view of pushing behavior, bookmarks existing only in the remote repository are treated as ``deleted``, even if it is in fact added remotely. Returns 0 if there are outgoing changes, 1 otherwise. """ if opts.get('graph'): cmdutil.checkunsupportedgraphflags([], opts) o, other = hg._outgoing(ui, repo, dest, opts) if not o: cmdutil.outgoinghooks(ui, repo, other, opts, o) return revdag = cmdutil.graphrevs(repo, o, opts) displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True) cmdutil.displaygraph(ui, repo, revdag, displayer, graphmod.asciiedges) cmdutil.outgoinghooks(ui, repo, other, opts, o) return 0 if opts.get('bookmarks'): dest = ui.expandpath(dest or 'default-push', dest or 'default') dest, branches = hg.parseurl(dest, opts.get('branch')) other = hg.peer(repo, opts, dest) if 'bookmarks' not in other.listkeys('namespaces'): ui.warn(_("remote doesn't support bookmarks\n")) return 0 ui.status(_('comparing with %s\n') % util.hidepassword(dest)) return bookmarks.outgoing(ui, repo, other) repo._subtoppath = ui.expandpath(dest or 'default-push', dest or 'default') try: return hg.outgoing(ui, repo, dest, opts) finally: del repo._subtoppath @command('parents', [('r', 'rev', '', _('show parents of the specified revision'), _('REV')), ] + templateopts, _('[-r REV] [FILE]'), inferrepo=True) def parents(ui, repo, file_=None, **opts): """show the parents of the working directory or revision (DEPRECATED) Print the working directory's parent revisions. If a revision is given via -r/--rev, the parent of that revision will be printed. If a file argument is given, the revision in which the file was last changed (before the working directory revision or the argument to --rev if given) is printed. This command is equivalent to:: hg log -r "p1()+p2()" or hg log -r "p1(REV)+p2(REV)" or hg log -r "max(::p1() and file(FILE))+max(::p2() and file(FILE))" or hg log -r "max(::p1(REV) and file(FILE))+max(::p2(REV) and file(FILE))" See :hg:`summary` and :hg:`help revsets` for related information. Returns 0 on success. """ ctx = scmutil.revsingle(repo, opts.get('rev'), None) if file_: m = scmutil.match(ctx, (file_,), opts) if m.anypats() or len(m.files()) != 1: raise error.Abort(_('can only specify an explicit filename')) file_ = m.files()[0] filenodes = [] for cp in ctx.parents(): if not cp: continue try: filenodes.append(cp.filenode(file_)) except error.LookupError: pass if not filenodes: raise error.Abort(_("'%s' not found in manifest!") % file_) p = [] for fn in filenodes: fctx = repo.filectx(file_, fileid=fn) p.append(fctx.node()) else: p = [cp.node() for cp in ctx.parents()] displayer = cmdutil.show_changeset(ui, repo, opts) for n in p: if n != nullid: displayer.show(repo[n]) displayer.close() @command('paths', formatteropts, _('[NAME]'), optionalrepo=True) def paths(ui, repo, search=None, **opts): """show aliases for remote repositories Show definition of symbolic path name NAME. If no name is given, show definition of all available names. Option -q/--quiet suppresses all output when searching for NAME and shows only the path names when listing all definitions. Path names are defined in the [paths] section of your configuration file and in ``/etc/mercurial/hgrc``. If run inside a repository, ``.hg/hgrc`` is used, too. The path names ``default`` and ``default-push`` have a special meaning. When performing a push or pull operation, they are used as fallbacks if no location is specified on the command-line. When ``default-push`` is set, it will be used for push and ``default`` will be used for pull; otherwise ``default`` is used as the fallback for both. When cloning a repository, the clone source is written as ``default`` in ``.hg/hgrc``. .. note:: ``default`` and ``default-push`` apply to all inbound (e.g. :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and :hg:`bundle`) operations. See :hg:`help urls` for more information. Returns 0 on success. """ if search: pathitems = [(name, path) for name, path in ui.paths.iteritems() if name == search] else: pathitems = sorted(ui.paths.iteritems()) fm = ui.formatter('paths', opts) if fm: hidepassword = str else: hidepassword = util.hidepassword if ui.quiet: namefmt = '%s\n' else: namefmt = '%s = ' showsubopts = not search and not ui.quiet for name, path in pathitems: fm.startitem() fm.condwrite(not search, 'name', namefmt, name) fm.condwrite(not ui.quiet, 'url', '%s\n', hidepassword(path.rawloc)) for subopt, value in sorted(path.suboptions.items()): assert subopt not in ('name', 'url') if showsubopts: fm.plain('%s:%s = ' % (name, subopt)) fm.condwrite(showsubopts, subopt, '%s\n', value) fm.end() if search and not pathitems: if not ui.quiet: ui.warn(_("not found!\n")) return 1 else: return 0 @command('phase', [('p', 'public', False, _('set changeset phase to public')), ('d', 'draft', False, _('set changeset phase to draft')), ('s', 'secret', False, _('set changeset phase to secret')), ('f', 'force', False, _('allow to move boundary backward')), ('r', 'rev', [], _('target revision'), _('REV')), ], _('[-p|-d|-s] [-f] [-r] [REV...]')) def phase(ui, repo, *revs, **opts): """set or show the current phase name With no argument, show the phase name of the current revision(s). With one of -p/--public, -d/--draft or -s/--secret, change the phase value of the specified revisions. Unless -f/--force is specified, :hg:`phase` won't move changeset from a lower phase to an higher phase. Phases are ordered as follows:: public < draft < secret Returns 0 on success, 1 if some phases could not be changed. (For more information about the phases concept, see :hg:`help phases`.) """ # search for a unique phase argument targetphase = None for idx, name in enumerate(phases.phasenames): if opts[name]: if targetphase is not None: raise error.Abort(_('only one phase can be specified')) targetphase = idx # look for specified revision revs = list(revs) revs.extend(opts['rev']) if not revs: # display both parents as the second parent phase can influence # the phase of a merge commit revs = [c.rev() for c in repo[None].parents()] revs = scmutil.revrange(repo, revs) lock = None ret = 0 if targetphase is None: # display for r in revs: ctx = repo[r] ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr())) else: tr = None lock = repo.lock() try: tr = repo.transaction("phase") # set phase if not revs: raise error.Abort(_('empty revision set')) nodes = [repo[r].node() for r in revs] # moving revision from public to draft may hide them # We have to check result on an unfiltered repository unfi = repo.unfiltered() getphase = unfi._phasecache.phase olddata = [getphase(unfi, r) for r in unfi] phases.advanceboundary(repo, tr, targetphase, nodes) if opts['force']: phases.retractboundary(repo, tr, targetphase, nodes) tr.close() finally: if tr is not None: tr.release() lock.release() getphase = unfi._phasecache.phase newdata = [getphase(unfi, r) for r in unfi] changes = sum(newdata[r] != olddata[r] for r in unfi) cl = unfi.changelog rejected = [n for n in nodes if newdata[cl.rev(n)] < targetphase] if rejected: ui.warn(_('cannot move %i changesets to a higher ' 'phase, use --force\n') % len(rejected)) ret = 1 if changes: msg = _('phase changed for %i changesets\n') % changes if ret: ui.status(msg) else: ui.note(msg) else: ui.warn(_('no phases changed\n')) return ret def postincoming(ui, repo, modheads, optupdate, checkout): if modheads == 0: return if optupdate: try: brev = checkout movemarkfrom = None if not checkout: updata = destutil.destupdate(repo) checkout, movemarkfrom, brev = updata ret = hg.update(repo, checkout) except error.UpdateAbort as inst: msg = _("not updating: %s") % str(inst) hint = inst.hint raise error.UpdateAbort(msg, hint=hint) if not ret and movemarkfrom: if movemarkfrom == repo['.'].node(): pass # no-op update elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()): ui.status(_("updating bookmark %s\n") % repo._activebookmark) return ret if modheads > 1: currentbranchheads = len(repo.branchheads()) if currentbranchheads == modheads: ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n")) elif currentbranchheads > 1: ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to " "merge)\n")) else: ui.status(_("(run 'hg heads' to see heads)\n")) else: ui.status(_("(run 'hg update' to get a working copy)\n")) @command('^pull', [('u', 'update', None, _('update to new branch head if changesets were pulled')), ('f', 'force', None, _('run even when remote repository is unrelated')), ('r', 'rev', [], _('a remote changeset intended to be added'), _('REV')), ('B', 'bookmark', [], _("bookmark to pull"), _('BOOKMARK')), ('b', 'branch', [], _('a specific branch you would like to pull'), _('BRANCH')), ] + remoteopts, _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')) def pull(ui, repo, source="default", **opts): """pull changes from the specified source Pull changes from a remote repository to a local one. This finds all changes from the repository at the specified path or URL and adds them to a local repository (the current one unless -R is specified). By default, this does not update the copy of the project in the working directory. Use :hg:`incoming` if you want to see what would have been added by a pull at the time you issued this command. If you then decide to add those changes to the repository, you should use :hg:`pull -r X` where ``X`` is the last changeset listed by :hg:`incoming`. If SOURCE is omitted, the 'default' path will be used. See :hg:`help urls` for more information. Returns 0 on success, 1 if an update had unresolved files. """ source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch')) ui.status(_('pulling from %s\n') % util.hidepassword(source)) other = hg.peer(repo, opts, source) try: revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) pullopargs = {} if opts.get('bookmark'): if not revs: revs = [] # The list of bookmark used here is not the one used to actually # update the bookmark name. This can result in the revision pulled # not ending up with the name of the bookmark because of a race # condition on the server. (See issue 4689 for details) remotebookmarks = other.listkeys('bookmarks') pullopargs['remotebookmarks'] = remotebookmarks for b in opts['bookmark']: if b not in remotebookmarks: raise error.Abort(_('remote bookmark %s not found!') % b) revs.append(remotebookmarks[b]) if revs: try: # When 'rev' is a bookmark name, we cannot guarantee that it # will be updated with that name because of a race condition # server side. (See issue 4689 for details) oldrevs = revs revs = [] # actually, nodes for r in oldrevs: node = other.lookup(r) revs.append(node) if r == checkout: checkout = node except error.CapabilityError: err = _("other repository doesn't support revision lookup, " "so a rev cannot be specified.") raise error.Abort(err) pullopargs.update(opts.get('opargs', {})) modheads = exchange.pull(repo, other, heads=revs, force=opts.get('force'), bookmarks=opts.get('bookmark', ()), opargs=pullopargs).cgresult if checkout: checkout = str(repo.changelog.rev(checkout)) repo._subtoppath = source try: ret = postincoming(ui, repo, modheads, opts.get('update'), checkout) finally: del repo._subtoppath finally: other.close() return ret @command('^push', [('f', 'force', None, _('force push')), ('r', 'rev', [], _('a changeset intended to be included in the destination'), _('REV')), ('B', 'bookmark', [], _("bookmark to push"), _('BOOKMARK')), ('b', 'branch', [], _('a specific branch you would like to push'), _('BRANCH')), ('', 'new-branch', False, _('allow pushing a new branch')), ] + remoteopts, _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')) def push(ui, repo, dest=None, **opts): """push changes to the specified destination Push changesets from the local repository to the specified destination. This operation is symmetrical to pull: it is identical to a pull in the destination repository from the current one. By default, push will not allow creation of new heads at the destination, since multiple heads would make it unclear which head to use. In this situation, it is recommended to pull and merge before pushing. Use --new-branch if you want to allow push to create a new named branch that is not present at the destination. This allows you to only create a new branch without forcing other changes. .. note:: Extra care should be taken with the -f/--force option, which will push all new heads on all branches, an action which will almost always cause confusion for collaborators. If -r/--rev is used, the specified revision and all its ancestors will be pushed to the remote repository. If -B/--bookmark is used, the specified bookmarked revision, its ancestors, and the bookmark will be pushed to the remote repository. Please see :hg:`help urls` for important details about ``ssh://`` URLs. If DESTINATION is omitted, a default path will be used. Returns 0 if push was successful, 1 if nothing to push. """ if opts.get('bookmark'): ui.setconfig('bookmarks', 'pushing', opts['bookmark'], 'push') for b in opts['bookmark']: # translate -B options to -r so changesets get pushed if b in repo._bookmarks: opts.setdefault('rev', []).append(b) else: # if we try to push a deleted bookmark, translate it to null # this lets simultaneous -r, -b options continue working opts.setdefault('rev', []).append("null") path = ui.paths.getpath(dest, default=('default-push', 'default')) if not path: raise error.Abort(_('default repository not configured!'), hint=_('see the "path" section in "hg help config"')) dest = path.pushloc or path.loc branches = (path.branch, opts.get('branch') or []) ui.status(_('pushing to %s\n') % util.hidepassword(dest)) revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev')) other = hg.peer(repo, opts, dest) if revs: revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)] if not revs: raise error.Abort(_("specified revisions evaluate to an empty set"), hint=_("use different revision arguments")) repo._subtoppath = dest try: # push subrepos depth-first for coherent ordering c = repo[''] subs = c.substate # only repos that are committed for s in sorted(subs): result = c.sub(s).push(opts) if result == 0: return not result finally: del repo._subtoppath pushop = exchange.push(repo, other, opts.get('force'), revs=revs, newbranch=opts.get('new_branch'), bookmarks=opts.get('bookmark', ()), opargs=opts.get('opargs')) result = not pushop.cgresult if pushop.bkresult is not None: if pushop.bkresult == 2: result = 2 elif not result and pushop.bkresult: result = 2 return result @command('recover', []) def recover(ui, repo): """roll back an interrupted transaction Recover from an interrupted commit or pull. This command tries to fix the repository status after an interrupted operation. It should only be necessary when Mercurial suggests it. Returns 0 if successful, 1 if nothing to recover or verify fails. """ if repo.recover(): return hg.verify(repo) return 1 @command('^remove|rm', [('A', 'after', None, _('record delete for missing files')), ('f', 'force', None, _('remove (and delete) file even if added or modified')), ] + subrepoopts + walkopts, _('[OPTION]... FILE...'), inferrepo=True) def remove(ui, repo, *pats, **opts): """remove the specified files on the next commit Schedule the indicated files for removal from the current branch. This command schedules the files to be removed at the next commit. To undo a remove before that, see :hg:`revert`. To undo added files, see :hg:`forget`. .. container:: verbose -A/--after can be used to remove only files that have already been deleted, -f/--force can be used to force deletion, and -Af can be used to remove files from the next revision without deleting them from the working directory. The following table details the behavior of remove for different file states (columns) and option combinations (rows). The file states are Added [A], Clean [C], Modified [M] and Missing [!] (as reported by :hg:`status`). The actions are Warn, Remove (from branch) and Delete (from disk): ========= == == == == opt/state A C M ! ========= == == == == none W RD W R -f R RD RD R -A W W W R -Af R R R R ========= == == == == .. note:: :hg:`remove` never deletes files in Added [A] state from the working directory, not even if ``--force`` is specified. Returns 0 on success, 1 if any warnings encountered. """ after, force = opts.get('after'), opts.get('force') if not pats and not after: raise error.Abort(_('no files specified')) m = scmutil.match(repo[None], pats, opts) subrepos = opts.get('subrepos') return cmdutil.remove(ui, repo, m, "", after, force, subrepos) @command('rename|move|mv', [('A', 'after', None, _('record a rename that has already occurred')), ('f', 'force', None, _('forcibly copy over an existing managed file')), ] + walkopts + dryrunopts, _('[OPTION]... SOURCE... DEST')) def rename(ui, repo, *pats, **opts): """rename files; equivalent of copy + remove Mark dest as copies of sources; mark sources for deletion. If dest is a directory, copies are put in that directory. If dest is a file, there can only be one source. By default, this command copies the contents of files as they exist in the working directory. If invoked with -A/--after, the operation is recorded, but no copying is performed. This command takes effect at the next commit. To undo a rename before that, see :hg:`revert`. Returns 0 on success, 1 if errors are encountered. """ with repo.wlock(False): return cmdutil.copy(ui, repo, pats, opts, rename=True) @command('resolve', [('a', 'all', None, _('select all unresolved files')), ('l', 'list', None, _('list state of files needing merge')), ('m', 'mark', None, _('mark files as resolved')), ('u', 'unmark', None, _('mark files as unresolved')), ('n', 'no-status', None, _('hide status prefix'))] + mergetoolopts + walkopts + formatteropts, _('[OPTION]... [FILE]...'), inferrepo=True) def resolve(ui, repo, *pats, **opts): """redo merges or set/view the merge status of files Merges with unresolved conflicts are often the result of non-interactive merging using the ``internal:merge`` configuration setting, or a command-line merge tool like ``diff3``. The resolve command is used to manage the files involved in a merge, after :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the working directory must have two parents). See :hg:`help merge-tools` for information on configuring merge tools. The resolve command can be used in the following ways: - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified files, discarding any previous merge attempts. Re-merging is not performed for files already marked as resolved. Use ``--all/-a`` to select all unresolved files. ``--tool`` can be used to specify the merge tool used for the given files. It overrides the HGMERGE environment variable and your configuration files. Previous file contents are saved with a ``.orig`` suffix. - :hg:`resolve -m [FILE]`: mark a file as having been resolved (e.g. after having manually fixed-up the files). The default is to mark all unresolved files. - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The default is to mark all resolved files. - :hg:`resolve -l`: list files which had or still have conflicts. In the printed list, ``U`` = unresolved and ``R`` = resolved. .. note:: Mercurial will not let you commit files with unresolved merge conflicts. You must use :hg:`resolve -m ...` before you can commit after a conflicting merge. Returns 0 on success, 1 if any files fail a resolve attempt. """ all, mark, unmark, show, nostatus = \ [opts.get(o) for o in 'all mark unmark list no_status'.split()] if (show and (mark or unmark)) or (mark and unmark): raise error.Abort(_("too many options specified")) if pats and all: raise error.Abort(_("can't specify --all and patterns")) if not (all or pats or show or mark or unmark): raise error.Abort(_('no files or directories specified'), hint=('use --all to re-merge all unresolved files')) if show: fm = ui.formatter('resolve', opts) ms = mergemod.mergestate.read(repo) m = scmutil.match(repo[None], pats, opts) for f in ms: if not m(f): continue l = 'resolve.' + {'u': 'unresolved', 'r': 'resolved', 'd': 'driverresolved'}[ms[f]] fm.startitem() fm.condwrite(not nostatus, 'status', '%s ', ms[f].upper(), label=l) fm.write('path', '%s\n', f, label=l) fm.end() return 0 with repo.wlock(): ms = mergemod.mergestate.read(repo) if not (ms.active() or repo.dirstate.p2() != nullid): raise error.Abort( _('resolve command not applicable when not merging')) wctx = repo[None] if ms.mergedriver and ms.mdstate() == 'u': proceed = mergemod.driverpreprocess(repo, ms, wctx) ms.commit() # allow mark and unmark to go through if not mark and not unmark and not proceed: return 1 m = scmutil.match(wctx, pats, opts) ret = 0 didwork = False runconclude = False tocomplete = [] for f in ms: if not m(f): continue didwork = True # don't let driver-resolved files be marked, and run the conclude # step if asked to resolve if ms[f] == "d": exact = m.exact(f) if mark: if exact: ui.warn(_('not marking %s as it is driver-resolved\n') % f) elif unmark: if exact: ui.warn(_('not unmarking %s as it is driver-resolved\n') % f) else: runconclude = True continue if mark: ms.mark(f, "r") elif unmark: ms.mark(f, "u") else: # backup pre-resolve (merge uses .orig for its own purposes) a = repo.wjoin(f) try: util.copyfile(a, a + ".resolve") except (IOError, OSError) as inst: if inst.errno != errno.ENOENT: raise try: # preresolve file ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'resolve') complete, r = ms.preresolve(f, wctx) if not complete: tocomplete.append(f) elif r: ret = 1 finally: ui.setconfig('ui', 'forcemerge', '', 'resolve') ms.commit() # replace filemerge's .orig file with our resolve file, but only # for merges that are complete if complete: try: util.rename(a + ".resolve", scmutil.origpath(ui, repo, a)) except OSError as inst: if inst.errno != errno.ENOENT: raise for f in tocomplete: try: # resolve file ui.setconfig('ui', 'forcemerge', opts.get('tool', ''), 'resolve') r = ms.resolve(f, wctx) if r: ret = 1 finally: ui.setconfig('ui', 'forcemerge', '', 'resolve') ms.commit() # replace filemerge's .orig file with our resolve file a = repo.wjoin(f) try: util.rename(a + ".resolve", scmutil.origpath(ui, repo, a)) except OSError as inst: if inst.errno != errno.ENOENT: raise ms.commit() ms.recordactions() if not didwork and pats: ui.warn(_("arguments do not match paths that need resolving\n")) elif ms.mergedriver and ms.mdstate() != 's': # run conclude step when either a driver-resolved file is requested # or there are no driver-resolved files # we can't use 'ret' to determine whether any files are unresolved # because we might not have tried to resolve some if ((runconclude or not list(ms.driverresolved())) and not list(ms.unresolved())): proceed = mergemod.driverconclude(repo, ms, wctx) ms.commit() if not proceed: return 1 # Nudge users into finishing an unfinished operation unresolvedf = list(ms.unresolved()) driverresolvedf = list(ms.driverresolved()) if not unresolvedf and not driverresolvedf: ui.status(_('(no more unresolved files)\n')) cmdutil.checkafterresolved(repo) elif not unresolvedf: ui.status(_('(no more unresolved files -- ' 'run "hg resolve --all" to conclude)\n')) return ret @command('revert', [('a', 'all', None, _('revert all changes when no arguments given')), ('d', 'date', '', _('tipmost revision matching date'), _('DATE')), ('r', 'rev', '', _('revert to the specified revision'), _('REV')), ('C', 'no-backup', None, _('do not save backup copies of files')), ('i', 'interactive', None, _('interactively select the changes (EXPERIMENTAL)')), ] + walkopts + dryrunopts, _('[OPTION]... [-r REV] [NAME]...')) def revert(ui, repo, *pats, **opts): """restore files to their checkout state .. note:: To check out earlier revisions, you should use :hg:`update REV`. To cancel an uncommitted merge (and lose your changes), use :hg:`update --clean .`. With no revision specified, revert the specified files or directories to the contents they had in the parent of the working directory. This restores the contents of files to an unmodified state and unschedules adds, removes, copies, and renames. If the working directory has two parents, you must explicitly specify a revision. Using the -r/--rev or -d/--date options, revert the given files or directories to their states as of a specific revision. Because revert does not change the working directory parents, this will cause these files to appear modified. This can be helpful to "back out" some or all of an earlier change. See :hg:`backout` for a related method. Modified files are saved with a .orig suffix before reverting. To disable these backups, use --no-backup. See :hg:`help dates` for a list of formats valid for -d/--date. See :hg:`help backout` for a way to reverse the effect of an earlier changeset. Returns 0 on success. """ if opts.get("date"): if opts.get("rev"): raise error.Abort(_("you can't specify a revision and a date")) opts["rev"] = cmdutil.finddate(ui, repo, opts["date"]) parent, p2 = repo.dirstate.parents() if not opts.get('rev') and p2 != nullid: # revert after merge is a trap for new users (issue2915) raise error.Abort(_('uncommitted merge with no revision specified'), hint=_('use "hg update" or see "hg help revert"')) ctx = scmutil.revsingle(repo, opts.get('rev')) if (not (pats or opts.get('include') or opts.get('exclude') or opts.get('all') or opts.get('interactive'))): msg = _("no files or directories specified") if p2 != nullid: hint = _("uncommitted merge, use --all to discard all changes," " or 'hg update -C .' to abort the merge") raise error.Abort(msg, hint=hint) dirty = any(repo.status()) node = ctx.node() if node != parent: if dirty: hint = _("uncommitted changes, use --all to discard all" " changes, or 'hg update %s' to update") % ctx.rev() else: hint = _("use --all to revert all files," " or 'hg update %s' to update") % ctx.rev() elif dirty: hint = _("uncommitted changes, use --all to discard all changes") else: hint = _("use --all to revert all files") raise error.Abort(msg, hint=hint) return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts) @command('rollback', dryrunopts + [('f', 'force', False, _('ignore safety measures'))]) def rollback(ui, repo, **opts): """roll back the last transaction (DANGEROUS) (DEPRECATED) Please use :hg:`commit --amend` instead of rollback to correct mistakes in the last commit. This command should be used with care. There is only one level of rollback, and there is no way to undo a rollback. It will also restore the dirstate at the time of the last transaction, losing any dirstate changes since that time. This command does not alter the working directory. Transactions are used to encapsulate the effects of all commands that create new changesets or propagate existing changesets into a repository. .. container:: verbose For example, the following commands are transactional, and their effects can be rolled back: - commit - import - pull - push (with this repository as the destination) - unbundle To avoid permanent data loss, rollback will refuse to rollback a commit transaction if it isn't checked out. Use --force to override this protection. This command is not intended for use on public repositories. Once changes are visible for pull by other users, rolling a transaction back locally is ineffective (someone else may already have pulled the changes). Furthermore, a race is possible with readers of the repository; for example an in-progress pull from the repository may fail if a rollback is performed. Returns 0 on success, 1 if no rollback data is available. """ return repo.rollback(dryrun=opts.get('dry_run'), force=opts.get('force')) @command('root', []) def root(ui, repo): """print the root (top) of the current working directory Print the root directory of the current repository. Returns 0 on success. """ ui.write(repo.root + "\n") @command('^serve', [('A', 'accesslog', '', _('name of access log file to write to'), _('FILE')), ('d', 'daemon', None, _('run server in background')), ('', 'daemon-pipefds', '', _('used internally by daemon mode'), _('FILE')), ('E', 'errorlog', '', _('name of error log file to write to'), _('FILE')), # use string type, then we can check if something was passed ('p', 'port', '', _('port to listen on (default: 8000)'), _('PORT')), ('a', 'address', '', _('address to listen on (default: all interfaces)'), _('ADDR')), ('', 'prefix', '', _('prefix path to serve from (default: server root)'), _('PREFIX')), ('n', 'name', '', _('name to show in web pages (default: working directory)'), _('NAME')), ('', 'web-conf', '', _('name of the hgweb config file (see "hg help hgweb")'), _('FILE')), ('', 'webdir-conf', '', _('name of the hgweb config file (DEPRECATED)'), _('FILE')), ('', 'pid-file', '', _('name of file to write process ID to'), _('FILE')), ('', 'stdio', None, _('for remote clients')), ('', 'cmdserver', '', _('for remote clients'), _('MODE')), ('t', 'templates', '', _('web templates to use'), _('TEMPLATE')), ('', 'style', '', _('template style to use'), _('STYLE')), ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')), ('', 'certificate', '', _('SSL certificate file'), _('FILE'))], _('[OPTION]...'), optionalrepo=True) def serve(ui, repo, **opts): """start stand-alone webserver Start a local HTTP repository browser and pull server. You can use this for ad-hoc sharing and browsing of repositories. It is recommended to use a real web server to serve a repository for longer periods of time. Please note that the server does not implement access control. This means that, by default, anybody can read from the server and nobody can write to it by default. Set the ``web.allow_push`` option to ``*`` to allow everybody to push to the server. You should use a real web server if you need to authenticate users. By default, the server logs accesses to stdout and errors to stderr. Use the -A/--accesslog and -E/--errorlog options to log to files. To have the server choose a free port number to listen on, specify a port number of 0; in this case, the server will print the port number it uses. Returns 0 on success. """ if opts["stdio"] and opts["cmdserver"]: raise error.Abort(_("cannot use --stdio with --cmdserver")) if opts["stdio"]: if repo is None: raise error.RepoError(_("there is no Mercurial repository here" " (.hg not found)")) s = sshserver.sshserver(ui, repo) s.serve_forever() if opts["cmdserver"]: service = commandserver.createservice(ui, repo, opts) else: service = hgweb.createservice(ui, repo, opts) return cmdutil.service(opts, initfn=service.init, runfn=service.run) @command('^status|st', [('A', 'all', None, _('show status of all files')), ('m', 'modified', None, _('show only modified files')), ('a', 'added', None, _('show only added files')), ('r', 'removed', None, _('show only removed files')), ('d', 'deleted', None, _('show only deleted (but tracked) files')), ('c', 'clean', None, _('show only files without changes')), ('u', 'unknown', None, _('show only unknown (not tracked) files')), ('i', 'ignored', None, _('show only ignored files')), ('n', 'no-status', None, _('hide status prefix')), ('C', 'copies', None, _('show source of copied files')), ('0', 'print0', None, _('end filenames with NUL, for use with xargs')), ('', 'rev', [], _('show difference from revision'), _('REV')), ('', 'change', '', _('list the changed files of a revision'), _('REV')), ] + walkopts + subrepoopts + formatteropts, _('[OPTION]... [FILE]...'), inferrepo=True) def status(ui, repo, *pats, **opts): """show changed files in the working directory Show status of files in the repository. If names are given, only files that match are shown. Files that are clean or ignored or the source of a copy/move operation, are not listed unless -c/--clean, -i/--ignored, -C/--copies or -A/--all are given. Unless options described with "show only ..." are given, the options -mardu are used. Option -q/--quiet hides untracked (unknown and ignored) files unless explicitly requested with -u/--unknown or -i/--ignored. .. note:: :hg:`status` may appear to disagree with diff if permissions have changed or a merge has occurred. The standard diff format does not report permission changes and diff only reports changes relative to one merge parent. If one revision is given, it is used as the base revision. If two revisions are given, the differences between them are shown. The --change option can also be used as a shortcut to list the changed files of a revision from its first parent. The codes used to show the status of files are:: M = modified A = added R = removed C = clean ! = missing (deleted by non-hg command, but still tracked) ? = not tracked I = ignored = origin of the previous file (with --copies) .. container:: verbose Examples: - show changes in the working directory relative to a changeset:: hg status --rev 9353 - show changes in the working directory relative to the current directory (see :hg:`help patterns` for more information):: hg status re: - show all changes including copies in an existing changeset:: hg status --copies --change 9353 - get a NUL separated list of added files, suitable for xargs:: hg status -an0 Returns 0 on success. """ revs = opts.get('rev') change = opts.get('change') if revs and change: msg = _('cannot specify --rev and --change at the same time') raise error.Abort(msg) elif change: node2 = scmutil.revsingle(repo, change, None).node() node1 = repo[node2].p1().node() else: node1, node2 = scmutil.revpair(repo, revs) if pats: cwd = repo.getcwd() else: cwd = '' if opts.get('print0'): end = '\0' else: end = '\n' copy = {} states = 'modified added removed deleted unknown ignored clean'.split() show = [k for k in states if opts.get(k)] if opts.get('all'): show += ui.quiet and (states[:4] + ['clean']) or states if not show: if ui.quiet: show = states[:4] else: show = states[:5] m = scmutil.match(repo[node2], pats, opts) stat = repo.status(node1, node2, m, 'ignored' in show, 'clean' in show, 'unknown' in show, opts.get('subrepos')) changestates = zip(states, 'MAR!?IC', stat) if (opts.get('all') or opts.get('copies') or ui.configbool('ui', 'statuscopies')) and not opts.get('no_status'): copy = copies.pathcopies(repo[node1], repo[node2], m) fm = ui.formatter('status', opts) fmt = '%s' + end showchar = not opts.get('no_status') for state, char, files in changestates: if state in show: label = 'status.' + state for f in files: fm.startitem() fm.condwrite(showchar, 'status', '%s ', char, label=label) fm.write('path', fmt, repo.pathto(f, cwd), label=label) if f in copy: fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd), label='status.copied') fm.end() @command('^summary|sum', [('', 'remote', None, _('check for push and pull'))], '[--remote]') def summary(ui, repo, **opts): """summarize working directory state This generates a brief summary of the working directory state, including parents, branch, commit status, phase and available updates. With the --remote option, this will check the default paths for incoming and outgoing changes. This can be time-consuming. Returns 0 on success. """ ctx = repo[None] parents = ctx.parents() pnode = parents[0].node() marks = [] for p in parents: # label with log.changeset (instead of log.parent) since this # shows a working directory parent *changeset*: # i18n: column positioning for "hg summary" ui.write(_('parent: %d:%s ') % (p.rev(), str(p)), label='log.changeset changeset.%s' % p.phasestr()) ui.write(' '.join(p.tags()), label='log.tag') if p.bookmarks(): marks.extend(p.bookmarks()) if p.rev() == -1: if not len(repo): ui.write(_(' (empty repository)')) else: ui.write(_(' (no revision checked out)')) ui.write('\n') if p.description(): ui.status(' ' + p.description().splitlines()[0].strip() + '\n', label='log.summary') branch = ctx.branch() bheads = repo.branchheads(branch) # i18n: column positioning for "hg summary" m = _('branch: %s\n') % branch if branch != 'default': ui.write(m, label='log.branch') else: ui.status(m, label='log.branch') if marks: active = repo._activebookmark # i18n: column positioning for "hg summary" ui.write(_('bookmarks:'), label='log.bookmark') if active is not None: if active in marks: ui.write(' *' + active, label=activebookmarklabel) marks.remove(active) else: ui.write(' [%s]' % active, label=activebookmarklabel) for m in marks: ui.write(' ' + m, label='log.bookmark') ui.write('\n', label='log.bookmark') status = repo.status(unknown=True) c = repo.dirstate.copies() copied, renamed = [], [] for d, s in c.iteritems(): if s in status.removed: status.removed.remove(s) renamed.append(d) else: copied.append(d) if d in status.added: status.added.remove(d) try: ms = mergemod.mergestate.read(repo) except error.UnsupportedMergeRecords as e: s = ' '.join(e.recordtypes) ui.warn( _('warning: merge state has unsupported record types: %s\n') % s) unresolved = 0 else: unresolved = [f for f in ms if ms[f] == 'u'] subs = [s for s in ctx.substate if ctx.sub(s).dirty()] labels = [(ui.label(_('%d modified'), 'status.modified'), status.modified), (ui.label(_('%d added'), 'status.added'), status.added), (ui.label(_('%d removed'), 'status.removed'), status.removed), (ui.label(_('%d renamed'), 'status.copied'), renamed), (ui.label(_('%d copied'), 'status.copied'), copied), (ui.label(_('%d deleted'), 'status.deleted'), status.deleted), (ui.label(_('%d unknown'), 'status.unknown'), status.unknown), (ui.label(_('%d unresolved'), 'resolve.unresolved'), unresolved), (ui.label(_('%d subrepos'), 'status.modified'), subs)] t = [] for l, s in labels: if s: t.append(l % len(s)) t = ', '.join(t) cleanworkdir = False if repo.vfs.exists('graftstate'): t += _(' (graft in progress)') if repo.vfs.exists('updatestate'): t += _(' (interrupted update)') elif len(parents) > 1: t += _(' (merge)') elif branch != parents[0].branch(): t += _(' (new branch)') elif (parents[0].closesbranch() and pnode in repo.branchheads(branch, closed=True)): t += _(' (head closed)') elif not (status.modified or status.added or status.removed or renamed or copied or subs): t += _(' (clean)') cleanworkdir = True elif pnode not in bheads: t += _(' (new branch head)') if parents: pendingphase = max(p.phase() for p in parents) else: pendingphase = phases.public if pendingphase > phases.newcommitphase(ui): t += ' (%s)' % phases.phasenames[pendingphase] if cleanworkdir: # i18n: column positioning for "hg summary" ui.status(_('commit: %s\n') % t.strip()) else: # i18n: column positioning for "hg summary" ui.write(_('commit: %s\n') % t.strip()) # all ancestors of branch heads - all ancestors of parent = new csets new = len(repo.changelog.findmissing([pctx.node() for pctx in parents], bheads)) if new == 0: # i18n: column positioning for "hg summary" ui.status(_('update: (current)\n')) elif pnode not in bheads: # i18n: column positioning for "hg summary" ui.write(_('update: %d new changesets (update)\n') % new) else: # i18n: column positioning for "hg summary" ui.write(_('update: %d new changesets, %d branch heads (merge)\n') % (new, len(bheads))) t = [] draft = len(repo.revs('draft()')) if draft: t.append(_('%d draft') % draft) secret = len(repo.revs('secret()')) if secret: t.append(_('%d secret') % secret) if draft or secret: ui.status(_('phases: %s\n') % ', '.join(t)) if obsolete.isenabled(repo, obsolete.createmarkersopt): for trouble in ("unstable", "divergent", "bumped"): numtrouble = len(repo.revs(trouble + "()")) # We write all the possibilities to ease translation troublemsg = { "unstable": _("unstable: %d changesets"), "divergent": _("divergent: %d changesets"), "bumped": _("bumped: %d changesets"), } if numtrouble > 0: ui.status(troublemsg[trouble] % numtrouble + "\n") cmdutil.summaryhooks(ui, repo) if opts.get('remote'): needsincoming, needsoutgoing = True, True else: needsincoming, needsoutgoing = False, False for i, o in cmdutil.summaryremotehooks(ui, repo, opts, None): if i: needsincoming = True if o: needsoutgoing = True if not needsincoming and not needsoutgoing: return def getincoming(): source, branches = hg.parseurl(ui.expandpath('default')) sbranch = branches[0] try: other = hg.peer(repo, {}, source) except error.RepoError: if opts.get('remote'): raise return source, sbranch, None, None, None revs, checkout = hg.addbranchrevs(repo, other, branches, None) if revs: revs = [other.lookup(rev) for rev in revs] ui.debug('comparing with %s\n' % util.hidepassword(source)) repo.ui.pushbuffer() commoninc = discovery.findcommonincoming(repo, other, heads=revs) repo.ui.popbuffer() return source, sbranch, other, commoninc, commoninc[1] if needsincoming: source, sbranch, sother, commoninc, incoming = getincoming() else: source = sbranch = sother = commoninc = incoming = None def getoutgoing(): dest, branches = hg.parseurl(ui.expandpath('default-push', 'default')) dbranch = branches[0] revs, checkout = hg.addbranchrevs(repo, repo, branches, None) if source != dest: try: dother = hg.peer(repo, {}, dest) except error.RepoError: if opts.get('remote'): raise return dest, dbranch, None, None ui.debug('comparing with %s\n' % util.hidepassword(dest)) elif sother is None: # there is no explicit destination peer, but source one is invalid return dest, dbranch, None, None else: dother = sother if (source != dest or (sbranch is not None and sbranch != dbranch)): common = None else: common = commoninc if revs: revs = [repo.lookup(rev) for rev in revs] repo.ui.pushbuffer() outgoing = discovery.findcommonoutgoing(repo, dother, onlyheads=revs, commoninc=common) repo.ui.popbuffer() return dest, dbranch, dother, outgoing if needsoutgoing: dest, dbranch, dother, outgoing = getoutgoing() else: dest = dbranch = dother = outgoing = None if opts.get('remote'): t = [] if incoming: t.append(_('1 or more incoming')) o = outgoing.missing if o: t.append(_('%d outgoing') % len(o)) other = dother or sother if 'bookmarks' in other.listkeys('namespaces'): counts = bookmarks.summary(repo, other) if counts[0] > 0: t.append(_('%d incoming bookmarks') % counts[0]) if counts[1] > 0: t.append(_('%d outgoing bookmarks') % counts[1]) if t: # i18n: column positioning for "hg summary" ui.write(_('remote: %s\n') % (', '.join(t))) else: # i18n: column positioning for "hg summary" ui.status(_('remote: (synced)\n')) cmdutil.summaryremotehooks(ui, repo, opts, ((source, sbranch, sother, commoninc), (dest, dbranch, dother, outgoing))) @command('tag', [('f', 'force', None, _('force tag')), ('l', 'local', None, _('make the tag local')), ('r', 'rev', '', _('revision to tag'), _('REV')), ('', 'remove', None, _('remove a tag')), # -l/--local is already there, commitopts cannot be used ('e', 'edit', None, _('invoke editor on commit messages')), ('m', 'message', '', _('use text as commit message'), _('TEXT')), ] + commitopts2, _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')) def tag(ui, repo, name1, *names, **opts): """add one or more tags for the current or given revision Name a particular revision using . Tags are used to name particular revisions of the repository and are very useful to compare different revisions, to go back to significant earlier versions or to mark branch points as releases, etc. Changing an existing tag is normally disallowed; use -f/--force to override. If no revision is given, the parent of the working directory is used. To facilitate version control, distribution, and merging of tags, they are stored as a file named ".hgtags" which is managed similarly to other project files and can be hand-edited if necessary. This also means that tagging creates a new commit. The file ".hg/localtags" is used for local tags (not shared among repositories). Tag commits are usually made at the head of a branch. If the parent of the working directory is not a branch head, :hg:`tag` aborts; use -f/--force to force the tag commit to be based on a non-head changeset. See :hg:`help dates` for a list of formats valid for -d/--date. Since tag names have priority over branch names during revision lookup, using an existing branch name as a tag name is discouraged. Returns 0 on success. """ wlock = lock = None try: wlock = repo.wlock() lock = repo.lock() rev_ = "." names = [t.strip() for t in (name1,) + names] if len(names) != len(set(names)): raise error.Abort(_('tag names must be unique')) for n in names: scmutil.checknewlabel(repo, n, 'tag') if not n: raise error.Abort(_('tag names cannot consist entirely of ' 'whitespace')) if opts.get('rev') and opts.get('remove'): raise error.Abort(_("--rev and --remove are incompatible")) if opts.get('rev'): rev_ = opts['rev'] message = opts.get('message') if opts.get('remove'): if opts.get('local'): expectedtype = 'local' else: expectedtype = 'global' for n in names: if not repo.tagtype(n): raise error.Abort(_("tag '%s' does not exist") % n) if repo.tagtype(n) != expectedtype: if expectedtype == 'global': raise error.Abort(_("tag '%s' is not a global tag") % n) else: raise error.Abort(_("tag '%s' is not a local tag") % n) rev_ = 'null' if not message: # we don't translate commit messages message = 'Removed tag %s' % ', '.join(names) elif not opts.get('force'): for n in names: if n in repo.tags(): raise error.Abort(_("tag '%s' already exists " "(use -f to force)") % n) if not opts.get('local'): p1, p2 = repo.dirstate.parents() if p2 != nullid: raise error.Abort(_('uncommitted merge')) bheads = repo.branchheads() if not opts.get('force') and bheads and p1 not in bheads: raise error.Abort(_('not at a branch head (use -f to force)')) r = scmutil.revsingle(repo, rev_).node() if not message: # we don't translate commit messages message = ('Added tag %s for changeset %s' % (', '.join(names), short(r))) date = opts.get('date') if date: date = util.parsedate(date) if opts.get('remove'): editform = 'tag.remove' else: editform = 'tag.add' editor = cmdutil.getcommiteditor(editform=editform, **opts) # don't allow tagging the null rev if (not opts.get('remove') and scmutil.revsingle(repo, rev_).rev() == nullrev): raise error.Abort(_("cannot tag null revision")) repo.tag(names, r, message, opts.get('local'), opts.get('user'), date, editor=editor) finally: release(lock, wlock) @command('tags', formatteropts, '') def tags(ui, repo, **opts): """list repository tags This lists both regular and local tags. When the -v/--verbose switch is used, a third column "local" is printed for local tags. When the -q/--quiet switch is used, only the tag name is printed. Returns 0 on success. """ fm = ui.formatter('tags', opts) hexfunc = fm.hexfunc tagtype = "" for t, n in reversed(repo.tagslist()): hn = hexfunc(n) label = 'tags.normal' tagtype = '' if repo.tagtype(t) == 'local': label = 'tags.local' tagtype = 'local' fm.startitem() fm.write('tag', '%s', t, label=label) fmt = " " * (30 - encoding.colwidth(t)) + ' %5d:%s' fm.condwrite(not ui.quiet, 'rev node', fmt, repo.changelog.rev(n), hn, label=label) fm.condwrite(ui.verbose and tagtype, 'type', ' %s', tagtype, label=label) fm.plain('\n') fm.end() @command('tip', [('p', 'patch', None, _('show patch')), ('g', 'git', None, _('use git extended diff format')), ] + templateopts, _('[-p] [-g]')) def tip(ui, repo, **opts): """show the tip revision (DEPRECATED) The tip revision (usually just called the tip) is the changeset most recently added to the repository (and therefore the most recently changed head). If you have just made a commit, that commit will be the tip. If you have just pulled changes from another repository, the tip of that repository becomes the current tip. The "tip" tag is special and cannot be renamed or assigned to a different changeset. This command is deprecated, please use :hg:`heads` instead. Returns 0 on success. """ displayer = cmdutil.show_changeset(ui, repo, opts) displayer.show(repo['tip']) displayer.close() @command('unbundle', [('u', 'update', None, _('update to new branch head if changesets were unbundled'))], _('[-u] FILE...')) def unbundle(ui, repo, fname1, *fnames, **opts): """apply one or more changegroup files Apply one or more compressed changegroup files generated by the bundle command. Returns 0 on success, 1 if an update has unresolved files. """ fnames = (fname1,) + fnames with repo.lock(): for fname in fnames: f = hg.openpath(ui, fname) gen = exchange.readbundle(ui, f, fname) if isinstance(gen, bundle2.unbundle20): tr = repo.transaction('unbundle') try: op = bundle2.applybundle(repo, gen, tr, source='unbundle', url='bundle:' + fname) tr.close() except error.BundleUnknownFeatureError as exc: raise error.Abort(_('%s: unknown bundle feature, %s') % (fname, exc), hint=_("see https://mercurial-scm.org/" "wiki/BundleFeature for more " "information")) finally: if tr: tr.release() changes = [r.get('return', 0) for r in op.records['changegroup']] modheads = changegroup.combineresults(changes) elif isinstance(gen, streamclone.streamcloneapplier): raise error.Abort( _('packed bundles cannot be applied with ' '"hg unbundle"'), hint=_('use "hg debugapplystreamclonebundle"')) else: modheads = gen.apply(repo, 'unbundle', 'bundle:' + fname) return postincoming(ui, repo, modheads, opts.get('update'), None) @command('^update|up|checkout|co', [('C', 'clean', None, _('discard uncommitted changes (no backup)')), ('c', 'check', None, _('update across branches if no uncommitted changes')), ('d', 'date', '', _('tipmost revision matching date'), _('DATE')), ('r', 'rev', '', _('revision'), _('REV')) ] + mergetoolopts, _('[-c] [-C] [-d DATE] [[-r] REV]')) def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False, tool=None): """update working directory (or switch revisions) Update the repository's working directory to the specified changeset. If no changeset is specified, update to the tip of the current named branch and move the active bookmark (see :hg:`help bookmarks`). Update sets the working directory's parent revision to the specified changeset (see :hg:`help parents`). If the changeset is not a descendant or ancestor of the working directory's parent, the update is aborted. With the -c/--check option, the working directory is checked for uncommitted changes; if none are found, the working directory is updated to the specified changeset. .. container:: verbose The following rules apply when the working directory contains uncommitted changes: 1. If neither -c/--check nor -C/--clean is specified, and if the requested changeset is an ancestor or descendant of the working directory's parent, the uncommitted changes are merged into the requested changeset and the merged result is left uncommitted. If the requested changeset is not an ancestor or descendant (that is, it is on another branch), the update is aborted and the uncommitted changes are preserved. 2. With the -c/--check option, the update is aborted and the uncommitted changes are preserved. 3. With the -C/--clean option, uncommitted changes are discarded and the working directory is updated to the requested changeset. To cancel an uncommitted merge (and lose your changes), use :hg:`update --clean .`. Use null as the changeset to remove the working directory (like :hg:`clone -U`). If you want to revert just one file to an older revision, use :hg:`revert [-r REV] NAME`. See :hg:`help dates` for a list of formats valid for -d/--date. Returns 0 on success, 1 if there are unresolved files. """ movemarkfrom = None if rev and node: raise error.Abort(_("please specify just one revision")) if rev is None or rev == '': rev = node with repo.wlock(): cmdutil.clearunfinished(repo) if date: if rev is not None: raise error.Abort(_("you can't specify a revision and a date")) rev = cmdutil.finddate(ui, repo, date) # if we defined a bookmark, we have to remember the original name brev = rev rev = scmutil.revsingle(repo, rev, rev).rev() if check and clean: raise error.Abort(_("cannot specify both -c/--check and -C/--clean") ) if check: cmdutil.bailifchanged(repo, merge=False) if rev is None: updata = destutil.destupdate(repo, clean=clean, check=check) rev, movemarkfrom, brev = updata repo.ui.setconfig('ui', 'forcemerge', tool, 'update') if clean: ret = hg.clean(repo, rev) else: ret = hg.update(repo, rev) if not ret and movemarkfrom: if movemarkfrom == repo['.'].node(): pass # no-op update elif bookmarks.update(repo, [movemarkfrom], repo['.'].node()): ui.status(_("updating bookmark %s\n") % repo._activebookmark) else: # this can happen with a non-linear update ui.status(_("(leaving bookmark %s)\n") % repo._activebookmark) bookmarks.deactivate(repo) elif brev in repo._bookmarks: bookmarks.activate(repo, brev) ui.status(_("(activating bookmark %s)\n") % brev) elif brev: if repo._activebookmark: ui.status(_("(leaving bookmark %s)\n") % repo._activebookmark) bookmarks.deactivate(repo) return ret @command('verify', []) def verify(ui, repo): """verify the integrity of the repository Verify the integrity of the current repository. This will perform an extensive check of the repository's integrity, validating the hashes and checksums of each entry in the changelog, manifest, and tracked files, as well as the integrity of their crosslinks and indices. Please see https://mercurial-scm.org/wiki/RepositoryCorruption for more information about recovery from corruption of the repository. Returns 0 on success, 1 if errors are encountered. """ return hg.verify(repo) @command('version', [], norepo=True) def version_(ui): """output version and copyright information""" ui.write(_("Mercurial Distributed SCM (version %s)\n") % util.version()) ui.status(_( "(see https://mercurial-scm.org for more information)\n" "\nCopyright (C) 2005-2016 Matt Mackall and others\n" "This is free software; see the source for copying conditions. " "There is NO\nwarranty; " "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" )) ui.note(_("\nEnabled extensions:\n\n")) if ui.verbose: # format names and versions into columns names = [] vers = [] for name, module in extensions.extensions(): names.append(name) vers.append(extensions.moduleversion(module)) if names: maxnamelen = max(len(n) for n in names) for i, name in enumerate(names): ui.write(" %-*s %s\n" % (maxnamelen, name, vers[i])) mercurial-3.7.3/mercurial/mail.py0000644000175000017500000002774512676531525016425 0ustar mpmmpm00000000000000# mail.py - mail sending bits for mercurial # # Copyright 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import, print_function import email import os import quopri import smtplib import socket import sys import time from .i18n import _ from . import ( encoding, error, sslutil, util, ) _oldheaderinit = email.Header.Header.__init__ def _unifiedheaderinit(self, *args, **kw): """ Python 2.7 introduces a backwards incompatible change (Python issue1974, r70772) in email.Generator.Generator code: pre-2.7 code passed "continuation_ws='\t'" to the Header constructor, and 2.7 removed this parameter. Default argument is continuation_ws=' ', which means that the behavior is different in <2.7 and 2.7 We consider the 2.7 behavior to be preferable, but need to have an unified behavior for versions 2.4 to 2.7 """ # override continuation_ws kw['continuation_ws'] = ' ' _oldheaderinit(self, *args, **kw) email.Header.Header.__dict__['__init__'] = _unifiedheaderinit class STARTTLS(smtplib.SMTP): '''Derived class to verify the peer certificate for STARTTLS. This class allows to pass any keyword arguments to SSL socket creation. ''' def __init__(self, sslkwargs, **kwargs): smtplib.SMTP.__init__(self, **kwargs) self._sslkwargs = sslkwargs def starttls(self, keyfile=None, certfile=None): if not self.has_extn("starttls"): msg = "STARTTLS extension not supported by server" raise smtplib.SMTPException(msg) (resp, reply) = self.docmd("STARTTLS") if resp == 220: self.sock = sslutil.wrapsocket(self.sock, keyfile, certfile, **self._sslkwargs) self.file = smtplib.SSLFakeFile(self.sock) self.helo_resp = None self.ehlo_resp = None self.esmtp_features = {} self.does_esmtp = 0 return (resp, reply) class SMTPS(smtplib.SMTP): '''Derived class to verify the peer certificate for SMTPS. This class allows to pass any keyword arguments to SSL socket creation. ''' def __init__(self, sslkwargs, keyfile=None, certfile=None, **kwargs): self.keyfile = keyfile self.certfile = certfile smtplib.SMTP.__init__(self, **kwargs) self.default_port = smtplib.SMTP_SSL_PORT self._sslkwargs = sslkwargs def _get_socket(self, host, port, timeout): if self.debuglevel > 0: print('connect:', (host, port), file=sys.stderr) new_socket = socket.create_connection((host, port), timeout) new_socket = sslutil.wrapsocket(new_socket, self.keyfile, self.certfile, **self._sslkwargs) self.file = smtplib.SSLFakeFile(new_socket) return new_socket def _smtp(ui): '''build an smtp connection and return a function to send mail''' local_hostname = ui.config('smtp', 'local_hostname') tls = ui.config('smtp', 'tls', 'none') # backward compatible: when tls = true, we use starttls. starttls = tls == 'starttls' or util.parsebool(tls) smtps = tls == 'smtps' if (starttls or smtps) and not util.safehasattr(socket, 'ssl'): raise error.Abort(_("can't use TLS: Python SSL support not installed")) mailhost = ui.config('smtp', 'host') if not mailhost: raise error.Abort(_('smtp.host not configured - cannot send mail')) verifycert = ui.config('smtp', 'verifycert', 'strict') if verifycert not in ['strict', 'loose']: if util.parsebool(verifycert) is not False: raise error.Abort(_('invalid smtp.verifycert configuration: %s') % (verifycert)) verifycert = False if (starttls or smtps) and verifycert: sslkwargs = sslutil.sslkwargs(ui, mailhost) else: # 'ui' is required by sslutil.wrapsocket() and set by sslkwargs() sslkwargs = {'ui': ui} if smtps: ui.note(_('(using smtps)\n')) s = SMTPS(sslkwargs, local_hostname=local_hostname) elif starttls: s = STARTTLS(sslkwargs, local_hostname=local_hostname) else: s = smtplib.SMTP(local_hostname=local_hostname) if smtps: defaultport = 465 else: defaultport = 25 mailport = util.getport(ui.config('smtp', 'port', defaultport)) ui.note(_('sending mail: smtp host %s, port %d\n') % (mailhost, mailport)) s.connect(host=mailhost, port=mailport) if starttls: ui.note(_('(using starttls)\n')) s.ehlo() s.starttls() s.ehlo() if (starttls or smtps) and verifycert: ui.note(_('(verifying remote certificate)\n')) sslutil.validator(ui, mailhost)(s.sock, verifycert == 'strict') username = ui.config('smtp', 'username') password = ui.config('smtp', 'password') if username and not password: password = ui.getpass() if username and password: ui.note(_('(authenticating to mail server as %s)\n') % (username)) try: s.login(username, password) except smtplib.SMTPException as inst: raise error.Abort(inst) def send(sender, recipients, msg): try: return s.sendmail(sender, recipients, msg) except smtplib.SMTPRecipientsRefused as inst: recipients = [r[1] for r in inst.recipients.values()] raise error.Abort('\n' + '\n'.join(recipients)) except smtplib.SMTPException as inst: raise error.Abort(inst) return send def _sendmail(ui, sender, recipients, msg): '''send mail using sendmail.''' program = ui.config('email', 'method', 'smtp') cmdline = '%s -f %s %s' % (program, util.email(sender), ' '.join(map(util.email, recipients))) ui.note(_('sending mail: %s\n') % cmdline) fp = util.popen(cmdline, 'w') fp.write(msg) ret = fp.close() if ret: raise error.Abort('%s %s' % ( os.path.basename(program.split(None, 1)[0]), util.explainexit(ret)[0])) def _mbox(mbox, sender, recipients, msg): '''write mails to mbox''' fp = open(mbox, 'ab+') # Should be time.asctime(), but Windows prints 2-characters day # of month instead of one. Make them print the same thing. date = time.strftime('%a %b %d %H:%M:%S %Y', time.localtime()) fp.write('From %s %s\n' % (sender, date)) fp.write(msg) fp.write('\n\n') fp.close() def connect(ui, mbox=None): '''make a mail connection. return a function to send mail. call as sendmail(sender, list-of-recipients, msg).''' if mbox: open(mbox, 'wb').close() return lambda s, r, m: _mbox(mbox, s, r, m) if ui.config('email', 'method', 'smtp') == 'smtp': return _smtp(ui) return lambda s, r, m: _sendmail(ui, s, r, m) def sendmail(ui, sender, recipients, msg, mbox=None): send = connect(ui, mbox=mbox) return send(sender, recipients, msg) def validateconfig(ui): '''determine if we have enough config data to try sending email.''' method = ui.config('email', 'method', 'smtp') if method == 'smtp': if not ui.config('smtp', 'host'): raise error.Abort(_('smtp specified as email transport, ' 'but no smtp host configured')) else: if not util.findexe(method): raise error.Abort(_('%r specified as email transport, ' 'but not in PATH') % method) def mimetextpatch(s, subtype='plain', display=False): '''Return MIME message suitable for a patch. Charset will be detected as utf-8 or (possibly fake) us-ascii. Transfer encodings will be used if necessary.''' cs = 'us-ascii' if not display: try: s.decode('us-ascii') except UnicodeDecodeError: try: s.decode('utf-8') cs = 'utf-8' except UnicodeDecodeError: # We'll go with us-ascii as a fallback. pass return mimetextqp(s, subtype, cs) def mimetextqp(body, subtype, charset): '''Return MIME message. Quoted-printable transfer encoding will be used if necessary. ''' enc = None for line in body.splitlines(): if len(line) > 950: body = quopri.encodestring(body) enc = "quoted-printable" break msg = email.MIMEText.MIMEText(body, subtype, charset) if enc: del msg['Content-Transfer-Encoding'] msg['Content-Transfer-Encoding'] = enc return msg def _charsets(ui): '''Obtains charsets to send mail parts not containing patches.''' charsets = [cs.lower() for cs in ui.configlist('email', 'charsets')] fallbacks = [encoding.fallbackencoding.lower(), encoding.encoding.lower(), 'utf-8'] for cs in fallbacks: # find unique charsets while keeping order if cs not in charsets: charsets.append(cs) return [cs for cs in charsets if not cs.endswith('ascii')] def _encode(ui, s, charsets): '''Returns (converted) string, charset tuple. Finds out best charset by cycling through sendcharsets in descending order. Tries both encoding and fallbackencoding for input. Only as last resort send as is in fake ascii. Caveat: Do not use for mail parts containing patches!''' try: s.decode('ascii') except UnicodeDecodeError: sendcharsets = charsets or _charsets(ui) for ics in (encoding.encoding, encoding.fallbackencoding): try: u = s.decode(ics) except UnicodeDecodeError: continue for ocs in sendcharsets: try: return u.encode(ocs), ocs except UnicodeEncodeError: pass except LookupError: ui.warn(_('ignoring invalid sendcharset: %s\n') % ocs) # if ascii, or all conversion attempts fail, send (broken) ascii return s, 'us-ascii' def headencode(ui, s, charsets=None, display=False): '''Returns RFC-2047 compliant header from given string.''' if not display: # split into words? s, cs = _encode(ui, s, charsets) return str(email.Header.Header(s, cs)) return s def _addressencode(ui, name, addr, charsets=None): name = headencode(ui, name, charsets) try: acc, dom = addr.split('@') acc = acc.encode('ascii') dom = dom.decode(encoding.encoding).encode('idna') addr = '%s@%s' % (acc, dom) except UnicodeDecodeError: raise error.Abort(_('invalid email address: %s') % addr) except ValueError: try: # too strict? addr = addr.encode('ascii') except UnicodeDecodeError: raise error.Abort(_('invalid local address: %s') % addr) return email.Utils.formataddr((name, addr)) def addressencode(ui, address, charsets=None, display=False): '''Turns address into RFC-2047 compliant header.''' if display or not address: return address or '' name, addr = email.Utils.parseaddr(address) return _addressencode(ui, name, addr, charsets) def addrlistencode(ui, addrs, charsets=None, display=False): '''Turns a list of addresses into a list of RFC-2047 compliant headers. A single element of input list may contain multiple addresses, but output always has one address per item''' if display: return [a.strip() for a in addrs if a.strip()] result = [] for name, addr in email.Utils.getaddresses(addrs): if name or addr: result.append(_addressencode(ui, name, addr, charsets)) return result def mimeencode(ui, s, charsets=None, display=False): '''creates mime text object, encodes it if needed, and sets charset and transfer-encoding accordingly.''' cs = 'us-ascii' if not display: s, cs = _encode(ui, s, charsets) return mimetextqp(s, 'plain', cs) mercurial-3.7.3/mercurial/node.py0000644000175000017500000000123312676531525016410 0ustar mpmmpm00000000000000# node.py - basic nodeid manipulation for mercurial # # Copyright 2005, 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import binascii # This ugly style has a noticeable effect in manifest parsing hex = binascii.hexlify bin = binascii.unhexlify nullrev = -1 nullid = "\0" * 20 nullhex = hex(nullid) # pseudo identifiers for working directory # (they are experimental, so don't add too many dependencies on them) wdirrev = 0x7fffffff wdirid = "\xff" * 20 def short(node): return hex(node[:6]) mercurial-3.7.3/mercurial/scmwindows.py0000644000175000017500000000320512676531525017661 0ustar mpmmpm00000000000000from __future__ import absolute_import import _winreg import os from . import ( osutil, util, ) def systemrcpath(): '''return default os-specific hgrc search path''' rcpath = [] filename = util.executablepath() # Use mercurial.ini found in directory with hg.exe progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini') rcpath.append(progrc) # Use hgrc.d found in directory with hg.exe progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d') if os.path.isdir(progrcd): for f, kind in osutil.listdir(progrcd): if f.endswith('.rc'): rcpath.append(os.path.join(progrcd, f)) # else look for a system rcpath in the registry value = util.lookupreg('SOFTWARE\\Mercurial', None, _winreg.HKEY_LOCAL_MACHINE) if not isinstance(value, str) or not value: return rcpath value = util.localpath(value) for p in value.split(os.pathsep): if p.lower().endswith('mercurial.ini'): rcpath.append(p) elif os.path.isdir(p): for f, kind in osutil.listdir(p): if f.endswith('.rc'): rcpath.append(os.path.join(p, f)) return rcpath def userrcpath(): '''return os-specific hgrc search path to the user dir''' home = os.path.expanduser('~') path = [os.path.join(home, 'mercurial.ini'), os.path.join(home, '.hgrc')] userprofile = os.environ.get('USERPROFILE') if userprofile and userprofile != home: path.append(os.path.join(userprofile, 'mercurial.ini')) path.append(os.path.join(userprofile, '.hgrc')) return path mercurial-3.7.3/mercurial/dummycert.pem0000644000175000017500000000430612676531525017631 0ustar mpmmpm00000000000000A dummy certificate that will make OS X 10.6+ Python use the system CA certificate store: -----BEGIN CERTIFICATE----- MIIBIzCBzgIJANjmj39sb3FmMA0GCSqGSIb3DQEBBQUAMBkxFzAVBgNVBAMTDmhn LmV4YW1wbGUuY29tMB4XDTE0MDgzMDA4NDU1OVoXDTE0MDgyOTA4NDU1OVowGTEX MBUGA1UEAxMOaGcuZXhhbXBsZS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEA mh/ZySGlcq0ALNLmA1gZqt61HruywPrRk6WyrLJRgt+X7OP9FFlEfl2tzHfzqvmK CtSQoPINWOdAJMekBYFgKQIDAQABMA0GCSqGSIb3DQEBBQUAA0EAF9h49LkSqJ6a IlpogZuUHtihXeKZBsiktVIDlDccYsNy0RSh9XxUfhk+XMLw8jBlYvcltSXdJ7We aKdQRekuMQ== -----END CERTIFICATE----- This certificate was generated to be syntactically valid but never be usable; it expired before it became valid. Created as: $ cat > cn.conf << EOT > [req] > distinguished_name = req_distinguished_name > [req_distinguished_name] > commonName = Common Name > commonName_default = no.example.com > EOT $ openssl req -nodes -new -x509 -keyout /dev/null \ > -out dummycert.pem -days -1 -config cn.conf -subj '/CN=hg.example.com' To verify the content of this certificate: $ openssl x509 -in dummycert.pem -noout -text Certificate: Data: Version: 1 (0x0) Serial Number: 15629337334278746470 (0xd8e68f7f6c6f7166) Signature Algorithm: sha1WithRSAEncryption Issuer: CN=hg.example.com Validity Not Before: Aug 30 08:45:59 2014 GMT Not After : Aug 29 08:45:59 2014 GMT Subject: CN=hg.example.com Subject Public Key Info: Public Key Algorithm: rsaEncryption Public-Key: (512 bit) Modulus: 00:9a:1f:d9:c9:21:a5:72:ad:00:2c:d2:e6:03:58: 19:aa:de:b5:1e:bb:b2:c0:fa:d1:93:a5:b2:ac:b2: 51:82:df:97:ec:e3:fd:14:59:44:7e:5d:ad:cc:77: f3:aa:f9:8a:0a:d4:90:a0:f2:0d:58:e7:40:24:c7: a4:05:81:60:29 Exponent: 65537 (0x10001) Signature Algorithm: sha1WithRSAEncryption 17:d8:78:f4:b9:12:a8:9e:9a:22:5a:68:81:9b:94:1e:d8:a1: 5d:e2:99:06:c8:a4:b5:52:03:94:37:1c:62:c3:72:d1:14:a1: f5:7c:54:7e:19:3e:5c:c2:f0:f2:30:65:62:f7:25:b5:25:dd: 27:b5:9e:68:a7:50:45:e9:2e:31 mercurial-3.7.3/mercurial/tags.py0000644000175000017500000004743312676531525016435 0ustar mpmmpm00000000000000# tags.py - read tag info from local repository # # Copyright 2009 Matt Mackall # Copyright 2009 Greg Ward # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. # Currently this module only deals with reading and caching tags. # Eventually, it could take care of updating (adding/removing/moving) # tags too. from __future__ import absolute_import import array import errno import time from .i18n import _ from .node import ( bin, hex, nullid, short, ) from . import ( encoding, error, util, ) array = array.array # Tags computation can be expensive and caches exist to make it fast in # the common case. # # The "hgtagsfnodes1" cache file caches the .hgtags filenode values for # each revision in the repository. The file is effectively an array of # fixed length records. Read the docs for "hgtagsfnodescache" for technical # details. # # The .hgtags filenode cache grows in proportion to the length of the # changelog. The file is truncated when the # changelog is stripped. # # The purpose of the filenode cache is to avoid the most expensive part # of finding global tags, which is looking up the .hgtags filenode in the # manifest for each head. This can take dozens or over 100ms for # repositories with very large manifests. Multiplied by dozens or even # hundreds of heads and there is a significant performance concern. # # There also exist a separate cache file for each repository filter. # These "tags-*" files store information about the history of tags. # # The tags cache files consists of a cache validation line followed by # a history of tags. # # The cache validation line has the format: # # [] # # is an integer revision and is a 40 character hex # node for that changeset. These redundantly identify the repository # tip from the time the cache was written. In addition, , # if present, is a 40 character hex hash of the contents of the filtered # revisions for this filter. If the set of filtered revs changes, the # hash will change and invalidate the cache. # # The history part of the tags cache consists of lines of the form: # # # # (This format is identical to that of .hgtags files.) # # is the tag name and is the 40 character hex changeset # the tag is associated with. # # Tags are written sorted by tag name. # # Tags associated with multiple changesets have an entry for each changeset. # The most recent changeset (in terms of revlog ordering for the head # setting it) for each tag is last. def findglobaltags(ui, repo, alltags, tagtypes): '''Find global tags in a repo. "alltags" maps tag name to (node, hist) 2-tuples. "tagtypes" maps tag name to tag type. Global tags always have the "global" tag type. The "alltags" and "tagtypes" dicts are updated in place. Empty dicts should be passed in. The tags cache is read and updated as a side-effect of calling. ''' # This is so we can be lazy and assume alltags contains only global # tags when we pass it to _writetagcache(). assert len(alltags) == len(tagtypes) == 0, \ "findglobaltags() should be called first" (heads, tagfnode, valid, cachetags, shouldwrite) = _readtagcache(ui, repo) if cachetags is not None: assert not shouldwrite # XXX is this really 100% correct? are there oddball special # cases where a global tag should outrank a local tag but won't, # because cachetags does not contain rank info? _updatetags(cachetags, 'global', alltags, tagtypes) return seen = set() # set of fnode fctx = None for head in reversed(heads): # oldest to newest assert head in repo.changelog.nodemap, \ "tag cache returned bogus head %s" % short(head) fnode = tagfnode.get(head) if fnode and fnode not in seen: seen.add(fnode) if not fctx: fctx = repo.filectx('.hgtags', fileid=fnode) else: fctx = fctx.filectx(fnode) filetags = _readtags(ui, repo, fctx.data().splitlines(), fctx) _updatetags(filetags, 'global', alltags, tagtypes) # and update the cache (if necessary) if shouldwrite: _writetagcache(ui, repo, valid, alltags) def readlocaltags(ui, repo, alltags, tagtypes): '''Read local tags in repo. Update alltags and tagtypes.''' try: data = repo.vfs.read("localtags") except IOError as inst: if inst.errno != errno.ENOENT: raise return # localtags is in the local encoding; re-encode to UTF-8 on # input for consistency with the rest of this module. filetags = _readtags( ui, repo, data.splitlines(), "localtags", recode=encoding.fromlocal) # remove tags pointing to invalid nodes cl = repo.changelog for t in filetags.keys(): try: cl.rev(filetags[t][0]) except (LookupError, ValueError): del filetags[t] _updatetags(filetags, "local", alltags, tagtypes) def _readtaghist(ui, repo, lines, fn, recode=None, calcnodelines=False): '''Read tag definitions from a file (or any source of lines). This function returns two sortdicts with similar information: - the first dict, bintaghist, contains the tag information as expected by the _readtags function, i.e. a mapping from tag name to (node, hist): - node is the node id from the last line read for that name, - hist is the list of node ids previously associated with it (in file order). All node ids are binary, not hex. - the second dict, hextaglines, is a mapping from tag name to a list of [hexnode, line number] pairs, ordered from the oldest to the newest node. When calcnodelines is False the hextaglines dict is not calculated (an empty dict is returned). This is done to improve this function's performance in cases where the line numbers are not needed. ''' bintaghist = util.sortdict() hextaglines = util.sortdict() count = 0 def warn(msg): ui.warn(_("%s, line %s: %s\n") % (fn, count, msg)) for nline, line in enumerate(lines): count += 1 if not line: continue try: (nodehex, name) = line.split(" ", 1) except ValueError: warn(_("cannot parse entry")) continue name = name.strip() if recode: name = recode(name) try: nodebin = bin(nodehex) except TypeError: warn(_("node '%s' is not well formed") % nodehex) continue # update filetags if calcnodelines: # map tag name to a list of line numbers if name not in hextaglines: hextaglines[name] = [] hextaglines[name].append([nodehex, nline]) continue # map tag name to (node, hist) if name not in bintaghist: bintaghist[name] = [] bintaghist[name].append(nodebin) return bintaghist, hextaglines def _readtags(ui, repo, lines, fn, recode=None, calcnodelines=False): '''Read tag definitions from a file (or any source of lines). Returns a mapping from tag name to (node, hist). "node" is the node id from the last line read for that name. "hist" is the list of node ids previously associated with it (in file order). All node ids are binary, not hex. ''' filetags, nodelines = _readtaghist(ui, repo, lines, fn, recode=recode, calcnodelines=calcnodelines) # util.sortdict().__setitem__ is much slower at replacing then inserting # new entries. The difference can matter if there are thousands of tags. # Create a new sortdict to avoid the performance penalty. newtags = util.sortdict() for tag, taghist in filetags.items(): newtags[tag] = (taghist[-1], taghist[:-1]) return newtags def _updatetags(filetags, tagtype, alltags, tagtypes): '''Incorporate the tag info read from one file into the two dictionaries, alltags and tagtypes, that contain all tag info (global across all heads plus local).''' for name, nodehist in filetags.iteritems(): if name not in alltags: alltags[name] = nodehist tagtypes[name] = tagtype continue # we prefer alltags[name] if: # it supersedes us OR # mutual supersedes and it has a higher rank # otherwise we win because we're tip-most anode, ahist = nodehist bnode, bhist = alltags[name] if (bnode != anode and anode in bhist and (bnode not in ahist or len(bhist) > len(ahist))): anode = bnode else: tagtypes[name] = tagtype ahist.extend([n for n in bhist if n not in ahist]) alltags[name] = anode, ahist def _filename(repo): """name of a tagcache file for a given repo or repoview""" filename = 'cache/tags2' if repo.filtername: filename = '%s-%s' % (filename, repo.filtername) return filename def _readtagcache(ui, repo): '''Read the tag cache. Returns a tuple (heads, fnodes, validinfo, cachetags, shouldwrite). If the cache is completely up-to-date, "cachetags" is a dict of the form returned by _readtags() and "heads", "fnodes", and "validinfo" are None and "shouldwrite" is False. If the cache is not up to date, "cachetags" is None. "heads" is a list of all heads currently in the repository, ordered from tip to oldest. "validinfo" is a tuple describing cache validation info. This is used when writing the tags cache. "fnodes" is a mapping from head to .hgtags filenode. "shouldwrite" is True. If the cache is not up to date, the caller is responsible for reading tag info from each returned head. (See findglobaltags().) ''' from . import scmutil # avoid cycle try: cachefile = repo.vfs(_filename(repo), 'r') # force reading the file for static-http cachelines = iter(cachefile) except IOError: cachefile = None cacherev = None cachenode = None cachehash = None if cachefile: try: validline = cachelines.next() validline = validline.split() cacherev = int(validline[0]) cachenode = bin(validline[1]) if len(validline) > 2: cachehash = bin(validline[2]) except Exception: # corruption of the cache, just recompute it. pass tipnode = repo.changelog.tip() tiprev = len(repo.changelog) - 1 # Case 1 (common): tip is the same, so nothing has changed. # (Unchanged tip trivially means no changesets have been added. # But, thanks to localrepository.destroyed(), it also means none # have been destroyed by strip or rollback.) if (cacherev == tiprev and cachenode == tipnode and cachehash == scmutil.filteredhash(repo, tiprev)): tags = _readtags(ui, repo, cachelines, cachefile.name) cachefile.close() return (None, None, None, tags, False) if cachefile: cachefile.close() # ignore rest of file valid = (tiprev, tipnode, scmutil.filteredhash(repo, tiprev)) repoheads = repo.heads() # Case 2 (uncommon): empty repo; get out quickly and don't bother # writing an empty cache. if repoheads == [nullid]: return ([], {}, valid, {}, False) # Case 3 (uncommon): cache file missing or empty. # Case 4 (uncommon): tip rev decreased. This should only happen # when we're called from localrepository.destroyed(). Refresh the # cache so future invocations will not see disappeared heads in the # cache. # Case 5 (common): tip has changed, so we've added/replaced heads. # As it happens, the code to handle cases 3, 4, 5 is the same. # N.B. in case 4 (nodes destroyed), "new head" really means "newly # exposed". if not len(repo.file('.hgtags')): # No tags have ever been committed, so we can avoid a # potentially expensive search. return ([], {}, valid, None, True) starttime = time.time() # Now we have to lookup the .hgtags filenode for every new head. # This is the most expensive part of finding tags, so performance # depends primarily on the size of newheads. Worst case: no cache # file, so newheads == repoheads. fnodescache = hgtagsfnodescache(repo.unfiltered()) cachefnode = {} for head in reversed(repoheads): fnode = fnodescache.getfnode(head) if fnode != nullid: cachefnode[head] = fnode fnodescache.write() duration = time.time() - starttime ui.log('tagscache', '%d/%d cache hits/lookups in %0.4f ' 'seconds\n', fnodescache.hitcount, fnodescache.lookupcount, duration) # Caller has to iterate over all heads, but can use the filenodes in # cachefnode to get to each .hgtags revision quickly. return (repoheads, cachefnode, valid, None, True) def _writetagcache(ui, repo, valid, cachetags): filename = _filename(repo) try: cachefile = repo.vfs(filename, 'w', atomictemp=True) except (OSError, IOError): return ui.log('tagscache', 'writing .hg/%s with %d tags\n', filename, len(cachetags)) if valid[2]: cachefile.write('%d %s %s\n' % (valid[0], hex(valid[1]), hex(valid[2]))) else: cachefile.write('%d %s\n' % (valid[0], hex(valid[1]))) # Tag names in the cache are in UTF-8 -- which is the whole reason # we keep them in UTF-8 throughout this module. If we converted # them local encoding on input, we would lose info writing them to # the cache. for (name, (node, hist)) in sorted(cachetags.iteritems()): for n in hist: cachefile.write("%s %s\n" % (hex(n), name)) cachefile.write("%s %s\n" % (hex(node), name)) try: cachefile.close() except (OSError, IOError): pass _fnodescachefile = 'cache/hgtagsfnodes1' _fnodesrecsize = 4 + 20 # changeset fragment + filenode _fnodesmissingrec = '\xff' * 24 class hgtagsfnodescache(object): """Persistent cache mapping revisions to .hgtags filenodes. The cache is an array of records. Each item in the array corresponds to a changelog revision. Values in the array contain the first 4 bytes of the node hash and the 20 bytes .hgtags filenode for that revision. The first 4 bytes are present as a form of verification. Repository stripping and rewriting may change the node at a numeric revision in the changelog. The changeset fragment serves as a verifier to detect rewriting. This logic is shared with the rev branch cache (see branchmap.py). The instance holds in memory the full cache content but entries are only parsed on read. Instances behave like lists. ``c[i]`` works where i is a rev or changeset node. Missing indexes are populated automatically on access. """ def __init__(self, repo): assert repo.filtername is None self._repo = repo # Only for reporting purposes. self.lookupcount = 0 self.hitcount = 0 self._raw = array('c') data = repo.vfs.tryread(_fnodescachefile) self._raw.fromstring(data) # The end state of self._raw is an array that is of the exact length # required to hold a record for every revision in the repository. # We truncate or extend the array as necessary. self._dirtyoffset is # defined to be the start offset at which we need to write the output # file. This offset is also adjusted when new entries are calculated # for array members. cllen = len(repo.changelog) wantedlen = cllen * _fnodesrecsize rawlen = len(self._raw) self._dirtyoffset = None if rawlen < wantedlen: self._dirtyoffset = rawlen self._raw.extend('\xff' * (wantedlen - rawlen)) elif rawlen > wantedlen: # There's no easy way to truncate array instances. This seems # slightly less evil than copying a potentially large array slice. for i in range(rawlen - wantedlen): self._raw.pop() self._dirtyoffset = len(self._raw) def getfnode(self, node, computemissing=True): """Obtain the filenode of the .hgtags file at a specified revision. If the value is in the cache, the entry will be validated and returned. Otherwise, the filenode will be computed and returned unless "computemissing" is False, in which case None will be returned without any potentially expensive computation being performed. If an .hgtags does not exist at the specified revision, nullid is returned. """ ctx = self._repo[node] rev = ctx.rev() self.lookupcount += 1 offset = rev * _fnodesrecsize record = self._raw[offset:offset + _fnodesrecsize].tostring() properprefix = node[0:4] # Validate and return existing entry. if record != _fnodesmissingrec: fileprefix = record[0:4] if fileprefix == properprefix: self.hitcount += 1 return record[4:] # Fall through. # If we get here, the entry is either missing or invalid. if not computemissing: return None # Populate missing entry. try: fnode = ctx.filenode('.hgtags') except error.LookupError: # No .hgtags file on this revision. fnode = nullid self._writeentry(offset, properprefix, fnode) return fnode def setfnode(self, node, fnode): """Set the .hgtags filenode for a given changeset.""" assert len(fnode) == 20 ctx = self._repo[node] # Do a lookup first to avoid writing if nothing has changed. if self.getfnode(ctx.node(), computemissing=False) == fnode: return self._writeentry(ctx.rev() * _fnodesrecsize, node[0:4], fnode) def _writeentry(self, offset, prefix, fnode): # Slices on array instances only accept other array. entry = array('c', prefix + fnode) self._raw[offset:offset + _fnodesrecsize] = entry # self._dirtyoffset could be None. self._dirtyoffset = min(self._dirtyoffset, offset) or 0 def write(self): """Perform all necessary writes to cache file. This may no-op if no writes are needed or if a write lock could not be obtained. """ if self._dirtyoffset is None: return data = self._raw[self._dirtyoffset:] if not data: return repo = self._repo try: lock = repo.wlock(wait=False) except error.LockError: repo.ui.log('tagscache', 'not writing .hg/%s because lock cannot be acquired\n' % (_fnodescachefile)) return try: f = repo.vfs.open(_fnodescachefile, 'ab') try: # if the file has been truncated actualoffset = f.tell() if actualoffset < self._dirtyoffset: self._dirtyoffset = actualoffset data = self._raw[self._dirtyoffset:] f.seek(self._dirtyoffset) f.truncate() repo.ui.log('tagscache', 'writing %d bytes to %s\n' % ( len(data), _fnodescachefile)) f.write(data) self._dirtyoffset = None finally: f.close() except (IOError, OSError) as inst: repo.ui.log('tagscache', "couldn't write %s: %s\n" % ( _fnodescachefile, inst)) finally: lock.release() mercurial-3.7.3/mercurial/unionrepo.py0000644000175000017500000002174312676531525017511 0ustar mpmmpm00000000000000# unionrepo.py - repository class for viewing union of repository changesets # # Derived from bundlerepo.py # Copyright 2006, 2007 Benoit Boissinot # Copyright 2013 Unity Technologies, Mads Kiilerich # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """Repository class for "in-memory pull" of one local repository to another, allowing operations like diff and log with revsets. """ from __future__ import absolute_import import os from .i18n import _ from .node import nullid from . import ( changelog, cmdutil, error, filelog, localrepo, manifest, mdiff, pathutil, revlog, scmutil, util, ) class unionrevlog(revlog.revlog): def __init__(self, opener, indexfile, revlog2, linkmapper): # How it works: # To retrieve a revision, we just need to know the node id so we can # look it up in revlog2. # # To differentiate a rev in the second revlog from a rev in the revlog, # we check revision against repotiprev. opener = scmutil.readonlyvfs(opener) revlog.revlog.__init__(self, opener, indexfile) self.revlog2 = revlog2 n = len(self) self.repotiprev = n - 1 self.bundlerevs = set() # used by 'bundle()' revset expression for rev2 in self.revlog2: rev = self.revlog2.index[rev2] # rev numbers - in revlog2, very different from self.rev _start, _csize, _rsize, base, linkrev, p1rev, p2rev, node = rev flags = _start & 0xFFFF if linkmapper is None: # link is to same revlog assert linkrev == rev2 # we never link back link = n else: # rev must be mapped from repo2 cl to unified cl by linkmapper link = linkmapper(linkrev) if linkmapper is not None: # link is to same revlog base = linkmapper(base) if node in self.nodemap: # this happens for the common revlog revisions self.bundlerevs.add(self.nodemap[node]) continue p1node = self.revlog2.node(p1rev) p2node = self.revlog2.node(p2rev) e = (flags, None, None, base, link, self.rev(p1node), self.rev(p2node), node) self.index.insert(-1, e) self.nodemap[node] = n self.bundlerevs.add(n) n += 1 def _chunk(self, rev): if rev <= self.repotiprev: return revlog.revlog._chunk(self, rev) return self.revlog2._chunk(self.node(rev)) def revdiff(self, rev1, rev2): """return or calculate a delta between two revisions""" if rev1 > self.repotiprev and rev2 > self.repotiprev: return self.revlog2.revdiff( self.revlog2.rev(self.node(rev1)), self.revlog2.rev(self.node(rev2))) elif rev1 <= self.repotiprev and rev2 <= self.repotiprev: return self.baserevdiff(rev1, rev2) return mdiff.textdiff(self.revision(self.node(rev1)), self.revision(self.node(rev2))) def revision(self, nodeorrev): """return an uncompressed revision of a given node or revision number. """ if isinstance(nodeorrev, int): rev = nodeorrev node = self.node(rev) else: node = nodeorrev rev = self.rev(node) if node == nullid: return "" if rev > self.repotiprev: text = self.revlog2.revision(node) self._cache = (node, rev, text) else: text = self.baserevision(rev) # already cached return text def baserevision(self, nodeorrev): # Revlog subclasses may override 'revision' method to modify format of # content retrieved from revlog. To use unionrevlog with such class one # needs to override 'baserevision' and make more specific call here. return revlog.revlog.revision(self, nodeorrev) def baserevdiff(self, rev1, rev2): # Exists for the same purpose as baserevision. return revlog.revlog.revdiff(self, rev1, rev2) def addrevision(self, text, transaction, link, p1=None, p2=None, d=None): raise NotImplementedError def addgroup(self, revs, linkmapper, transaction): raise NotImplementedError def strip(self, rev, minlink): raise NotImplementedError def checksize(self): raise NotImplementedError class unionchangelog(unionrevlog, changelog.changelog): def __init__(self, opener, opener2): changelog.changelog.__init__(self, opener) linkmapper = None changelog2 = changelog.changelog(opener2) unionrevlog.__init__(self, opener, self.indexfile, changelog2, linkmapper) def baserevision(self, nodeorrev): # Although changelog doesn't override 'revision' method, some extensions # may replace this class with another that does. Same story with # manifest and filelog classes. return changelog.changelog.revision(self, nodeorrev) def baserevdiff(self, rev1, rev2): return changelog.changelog.revdiff(self, rev1, rev2) class unionmanifest(unionrevlog, manifest.manifest): def __init__(self, opener, opener2, linkmapper): manifest.manifest.__init__(self, opener) manifest2 = manifest.manifest(opener2) unionrevlog.__init__(self, opener, self.indexfile, manifest2, linkmapper) def baserevision(self, nodeorrev): return manifest.manifest.revision(self, nodeorrev) def baserevdiff(self, rev1, rev2): return manifest.manifest.revdiff(self, rev1, rev2) class unionfilelog(unionrevlog, filelog.filelog): def __init__(self, opener, path, opener2, linkmapper, repo): filelog.filelog.__init__(self, opener, path) filelog2 = filelog.filelog(opener2, path) unionrevlog.__init__(self, opener, self.indexfile, filelog2, linkmapper) self._repo = repo def baserevision(self, nodeorrev): return filelog.filelog.revision(self, nodeorrev) def baserevdiff(self, rev1, rev2): return filelog.filelog.revdiff(self, rev1, rev2) def iscensored(self, rev): """Check if a revision is censored.""" if rev <= self.repotiprev: return filelog.filelog.iscensored(self, rev) node = self.node(rev) return self.revlog2.iscensored(self.revlog2.rev(node)) class unionpeer(localrepo.localpeer): def canpush(self): return False class unionrepository(localrepo.localrepository): def __init__(self, ui, path, path2): localrepo.localrepository.__init__(self, ui, path) self.ui.setconfig('phases', 'publish', False, 'unionrepo') self._url = 'union:%s+%s' % (util.expandpath(path), util.expandpath(path2)) self.repo2 = localrepo.localrepository(ui, path2) @localrepo.unfilteredpropertycache def changelog(self): return unionchangelog(self.svfs, self.repo2.svfs) def _clrev(self, rev2): """map from repo2 changelog rev to temporary rev in self.changelog""" node = self.repo2.changelog.node(rev2) return self.changelog.rev(node) @localrepo.unfilteredpropertycache def manifest(self): return unionmanifest(self.svfs, self.repo2.svfs, self.unfiltered()._clrev) def url(self): return self._url def file(self, f): return unionfilelog(self.svfs, f, self.repo2.svfs, self.unfiltered()._clrev, self) def close(self): self.repo2.close() def cancopy(self): return False def peer(self): return unionpeer(self) def getcwd(self): return os.getcwd() # always outside the repo def instance(ui, path, create): if create: raise error.Abort(_('cannot create new union repository')) parentpath = ui.config("bundle", "mainreporoot", "") if not parentpath: # try to find the correct path to the working directory repo parentpath = cmdutil.findrepo(os.getcwd()) if parentpath is None: parentpath = '' if parentpath: # Try to make the full path relative so we get a nice, short URL. # In particular, we don't want temp dir names in test outputs. cwd = os.getcwd() if parentpath == cwd: parentpath = '' else: cwd = pathutil.normasprefix(cwd) if parentpath.startswith(cwd): parentpath = parentpath[len(cwd):] if path.startswith('union:'): s = path.split(":", 1)[1].split("+", 1) if len(s) == 1: repopath, repopath2 = parentpath, s[0] else: repopath, repopath2 = s else: repopath, repopath2 = parentpath, path return unionrepository(ui, repopath, repopath2) mercurial-3.7.3/mercurial/copies.py0000644000175000017500000004473212676531525016760 0ustar mpmmpm00000000000000# copies.py - copy detection for Mercurial # # Copyright 2008 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import heapq from . import ( pathutil, util, ) def _findlimit(repo, a, b): """ Find the last revision that needs to be checked to ensure that a full transitive closure for file copies can be properly calculated. Generally, this means finding the earliest revision number that's an ancestor of a or b but not both, except when a or b is a direct descendent of the other, in which case we can return the minimum revnum of a and b. None if no such revision exists. """ # basic idea: # - mark a and b with different sides # - if a parent's children are all on the same side, the parent is # on that side, otherwise it is on no side # - walk the graph in topological order with the help of a heap; # - add unseen parents to side map # - clear side of any parent that has children on different sides # - track number of interesting revs that might still be on a side # - track the lowest interesting rev seen # - quit when interesting revs is zero cl = repo.changelog working = len(cl) # pseudo rev for the working directory if a is None: a = working if b is None: b = working side = {a: -1, b: 1} visit = [-a, -b] heapq.heapify(visit) interesting = len(visit) hascommonancestor = False limit = working while interesting: r = -heapq.heappop(visit) if r == working: parents = [cl.rev(p) for p in repo.dirstate.parents()] else: parents = cl.parentrevs(r) for p in parents: if p < 0: continue if p not in side: # first time we see p; add it to visit side[p] = side[r] if side[p]: interesting += 1 heapq.heappush(visit, -p) elif side[p] and side[p] != side[r]: # p was interesting but now we know better side[p] = 0 interesting -= 1 hascommonancestor = True if side[r]: limit = r # lowest rev visited interesting -= 1 if not hascommonancestor: return None # Consider the following flow (see test-commit-amend.t under issue4405): # 1/ File 'a0' committed # 2/ File renamed from 'a0' to 'a1' in a new commit (call it 'a1') # 3/ Move back to first commit # 4/ Create a new commit via revert to contents of 'a1' (call it 'a1-amend') # 5/ Rename file from 'a1' to 'a2' and commit --amend 'a1-msg' # # During the amend in step five, we will be in this state: # # @ 3 temporary amend commit for a1-amend # | # o 2 a1-amend # | # | o 1 a1 # |/ # o 0 a0 # # When _findlimit is called, a and b are revs 3 and 0, so limit will be 2, # yet the filelog has the copy information in rev 1 and we will not look # back far enough unless we also look at the a and b as candidates. # This only occurs when a is a descendent of b or visa-versa. return min(limit, a, b) def _chain(src, dst, a, b): '''chain two sets of copies a->b''' t = a.copy() for k, v in b.iteritems(): if v in t: # found a chain if t[v] != k: # file wasn't renamed back to itself t[k] = t[v] if v not in dst: # chain was a rename, not a copy del t[v] if v in src: # file is a copy of an existing file t[k] = v # remove criss-crossed copies for k, v in t.items(): if k in src and v in dst: del t[k] return t def _tracefile(fctx, am, limit=-1): '''return file context that is the ancestor of fctx present in ancestor manifest am, stopping after the first ancestor lower than limit''' for f in fctx.ancestors(): if am.get(f.path(), None) == f.filenode(): return f if limit >= 0 and f.linkrev() < limit and f.rev() < limit: return None def _dirstatecopies(d): ds = d._repo.dirstate c = ds.copies().copy() for k in c.keys(): if ds[k] not in 'anm': del c[k] return c def _computeforwardmissing(a, b, match=None): """Computes which files are in b but not a. This is its own function so extensions can easily wrap this call to see what files _forwardcopies is about to process. """ ma = a.manifest() mb = b.manifest() if match: ma = ma.matches(match) mb = mb.matches(match) return mb.filesnotin(ma) def _forwardcopies(a, b, match=None): '''find {dst@b: src@a} copy mapping where a is an ancestor of b''' # check for working copy w = None if b.rev() is None: w = b b = w.p1() if a == b: # short-circuit to avoid issues with merge states return _dirstatecopies(w) # files might have to be traced back to the fctx parent of the last # one-side-only changeset, but not further back than that limit = _findlimit(a._repo, a.rev(), b.rev()) if limit is None: limit = -1 am = a.manifest() # find where new files came from # we currently don't try to find where old files went, too expensive # this means we can miss a case like 'hg rm b; hg cp a b' cm = {} missing = _computeforwardmissing(a, b, match=match) ancestrycontext = a._repo.changelog.ancestors([b.rev()], inclusive=True) for f in missing: fctx = b[f] fctx._ancestrycontext = ancestrycontext ofctx = _tracefile(fctx, am, limit) if ofctx: cm[f] = ofctx.path() # combine copies from dirstate if necessary if w is not None: cm = _chain(a, w, cm, _dirstatecopies(w)) return cm def _backwardrenames(a, b): if a._repo.ui.configbool('experimental', 'disablecopytrace'): return {} # Even though we're not taking copies into account, 1:n rename situations # can still exist (e.g. hg cp a b; hg mv a c). In those cases we # arbitrarily pick one of the renames. f = _forwardcopies(b, a) r = {} for k, v in sorted(f.iteritems()): # remove copies if v in a: continue r[v] = k return r def pathcopies(x, y, match=None): '''find {dst@y: src@x} copy mapping for directed compare''' if x == y or not x or not y: return {} a = y.ancestor(x) if a == x: return _forwardcopies(x, y, match=match) if a == y: return _backwardrenames(x, y) return _chain(x, y, _backwardrenames(x, a), _forwardcopies(a, y, match=match)) def _computenonoverlap(repo, c1, c2, addedinm1, addedinm2): """Computes, based on addedinm1 and addedinm2, the files exclusive to c1 and c2. This is its own function so extensions can easily wrap this call to see what files mergecopies is about to process. Even though c1 and c2 are not used in this function, they are useful in other extensions for being able to read the file nodes of the changed files. """ u1 = sorted(addedinm1 - addedinm2) u2 = sorted(addedinm2 - addedinm1) if u1: repo.ui.debug(" unmatched files in local:\n %s\n" % "\n ".join(u1)) if u2: repo.ui.debug(" unmatched files in other:\n %s\n" % "\n ".join(u2)) return u1, u2 def _makegetfctx(ctx): """return a 'getfctx' function suitable for checkcopies usage We have to re-setup the function building 'filectx' for each 'checkcopies' to ensure the linkrev adjustment is properly setup for each. Linkrev adjustment is important to avoid bug in rename detection. Moreover, having a proper '_ancestrycontext' setup ensures the performance impact of this adjustment is kept limited. Without it, each file could do a full dag traversal making the time complexity of the operation explode (see issue4537). This function exists here mostly to limit the impact on stable. Feel free to refactor on default. """ rev = ctx.rev() repo = ctx._repo ac = getattr(ctx, '_ancestrycontext', None) if ac is None: revs = [rev] if rev is None: revs = [p.rev() for p in ctx.parents()] ac = repo.changelog.ancestors(revs, inclusive=True) ctx._ancestrycontext = ac def makectx(f, n): if len(n) != 20: # in a working context? if ctx.rev() is None: return ctx.filectx(f) return repo[None][f] fctx = repo.filectx(f, fileid=n) # setup only needed for filectx not create from a changectx fctx._ancestrycontext = ac fctx._descendantrev = rev return fctx return util.lrucachefunc(makectx) def mergecopies(repo, c1, c2, ca): """ Find moves and copies between context c1 and c2 that are relevant for merging. Returns four dicts: "copy", "movewithdir", "diverge", and "renamedelete". "copy" is a mapping from destination name -> source name, where source is in c1 and destination is in c2 or vice-versa. "movewithdir" is a mapping from source name -> destination name, where the file at source present in one context but not the other needs to be moved to destination by the merge process, because the other context moved the directory it is in. "diverge" is a mapping of source name -> list of destination names for divergent renames. "renamedelete" is a mapping of source name -> list of destination names for files deleted in c1 that were renamed in c2 or vice-versa. """ # avoid silly behavior for update from empty dir if not c1 or not c2 or c1 == c2: return {}, {}, {}, {} # avoid silly behavior for parent -> working dir if c2.node() is None and c1.node() == repo.dirstate.p1(): return repo.dirstate.copies(), {}, {}, {} # Copy trace disabling is explicitly below the node == p1 logic above # because the logic above is required for a simple copy to be kept across a # rebase. if repo.ui.configbool('experimental', 'disablecopytrace'): return {}, {}, {}, {} limit = _findlimit(repo, c1.rev(), c2.rev()) if limit is None: # no common ancestor, no copies return {}, {}, {}, {} repo.ui.debug(" searching for copies back to rev %d\n" % limit) m1 = c1.manifest() m2 = c2.manifest() ma = ca.manifest() copy1, copy2, = {}, {} movewithdir1, movewithdir2 = {}, {} fullcopy1, fullcopy2 = {}, {} diverge = {} # find interesting file sets from manifests addedinm1 = m1.filesnotin(ma) addedinm2 = m2.filesnotin(ma) u1, u2 = _computenonoverlap(repo, c1, c2, addedinm1, addedinm2) bothnew = sorted(addedinm1 & addedinm2) for f in u1: checkcopies(c1, f, m1, m2, ca, limit, diverge, copy1, fullcopy1) for f in u2: checkcopies(c2, f, m2, m1, ca, limit, diverge, copy2, fullcopy2) copy = dict(copy1.items() + copy2.items()) movewithdir = dict(movewithdir1.items() + movewithdir2.items()) fullcopy = dict(fullcopy1.items() + fullcopy2.items()) renamedelete = {} renamedeleteset = set() divergeset = set() for of, fl in diverge.items(): if len(fl) == 1 or of in c1 or of in c2: del diverge[of] # not actually divergent, or not a rename if of not in c1 and of not in c2: # renamed on one side, deleted on the other side, but filter # out files that have been renamed and then deleted renamedelete[of] = [f for f in fl if f in c1 or f in c2] renamedeleteset.update(fl) # reverse map for below else: divergeset.update(fl) # reverse map for below if bothnew: repo.ui.debug(" unmatched files new in both:\n %s\n" % "\n ".join(bothnew)) bothdiverge, _copy, _fullcopy = {}, {}, {} for f in bothnew: checkcopies(c1, f, m1, m2, ca, limit, bothdiverge, _copy, _fullcopy) checkcopies(c2, f, m2, m1, ca, limit, bothdiverge, _copy, _fullcopy) for of, fl in bothdiverge.items(): if len(fl) == 2 and fl[0] == fl[1]: copy[fl[0]] = of # not actually divergent, just matching renames if fullcopy and repo.ui.debugflag: repo.ui.debug(" all copies found (* = to merge, ! = divergent, " "% = renamed and deleted):\n") for f in sorted(fullcopy): note = "" if f in copy: note += "*" if f in divergeset: note += "!" if f in renamedeleteset: note += "%" repo.ui.debug(" src: '%s' -> dst: '%s' %s\n" % (fullcopy[f], f, note)) del divergeset if not fullcopy: return copy, movewithdir, diverge, renamedelete repo.ui.debug(" checking for directory renames\n") # generate a directory move map d1, d2 = c1.dirs(), c2.dirs() # Hack for adding '', which is not otherwise added, to d1 and d2 d1.addpath('/') d2.addpath('/') invalid = set() dirmove = {} # examine each file copy for a potential directory move, which is # when all the files in a directory are moved to a new directory for dst, src in fullcopy.iteritems(): dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst) if dsrc in invalid: # already seen to be uninteresting continue elif dsrc in d1 and ddst in d1: # directory wasn't entirely moved locally invalid.add(dsrc + "/") elif dsrc in d2 and ddst in d2: # directory wasn't entirely moved remotely invalid.add(dsrc + "/") elif dsrc + "/" in dirmove and dirmove[dsrc + "/"] != ddst + "/": # files from the same directory moved to two different places invalid.add(dsrc + "/") else: # looks good so far dirmove[dsrc + "/"] = ddst + "/" for i in invalid: if i in dirmove: del dirmove[i] del d1, d2, invalid if not dirmove: return copy, movewithdir, diverge, renamedelete for d in dirmove: repo.ui.debug(" discovered dir src: '%s' -> dst: '%s'\n" % (d, dirmove[d])) # check unaccounted nonoverlapping files against directory moves for f in u1 + u2: if f not in fullcopy: for d in dirmove: if f.startswith(d): # new file added in a directory that was moved, move it df = dirmove[d] + f[len(d):] if df not in copy: movewithdir[f] = df repo.ui.debug((" pending file src: '%s' -> " "dst: '%s'\n") % (f, df)) break return copy, movewithdir, diverge, renamedelete def checkcopies(ctx, f, m1, m2, ca, limit, diverge, copy, fullcopy): """ check possible copies of f from m1 to m2 ctx = starting context for f in m1 f = the filename to check m1 = the source manifest m2 = the destination manifest ca = the changectx of the common ancestor limit = the rev number to not search beyond diverge = record all diverges in this dict copy = record all non-divergent copies in this dict fullcopy = record all copies in this dict """ ma = ca.manifest() getfctx = _makegetfctx(ctx) def _related(f1, f2, limit): # Walk back to common ancestor to see if the two files originate # from the same file. Since workingfilectx's rev() is None it messes # up the integer comparison logic, hence the pre-step check for # None (f1 and f2 can only be workingfilectx's initially). if f1 == f2: return f1 # a match g1, g2 = f1.ancestors(), f2.ancestors() try: f1r, f2r = f1.linkrev(), f2.linkrev() if f1r is None: f1 = g1.next() if f2r is None: f2 = g2.next() while True: f1r, f2r = f1.linkrev(), f2.linkrev() if f1r > f2r: f1 = g1.next() elif f2r > f1r: f2 = g2.next() elif f1 == f2: return f1 # a match elif f1r == f2r or f1r < limit or f2r < limit: return False # copy no longer relevant except StopIteration: return False of = None seen = set([f]) for oc in getfctx(f, m1[f]).ancestors(): ocr = oc.linkrev() of = oc.path() if of in seen: # check limit late - grab last rename before if ocr < limit: break continue seen.add(of) fullcopy[f] = of # remember for dir rename detection if of not in m2: continue # no match, keep looking if m2[of] == ma.get(of): break # no merge needed, quit early c2 = getfctx(of, m2[of]) cr = _related(oc, c2, ca.rev()) if cr and (of == f or of == c2.path()): # non-divergent copy[f] = of of = None break if of in ma: diverge.setdefault(of, []).append(f) def duplicatecopies(repo, rev, fromrev, skiprev=None): '''reproduce copies from fromrev to rev in the dirstate If skiprev is specified, it's a revision that should be used to filter copy records. Any copies that occur between fromrev and skiprev will not be duplicated, even if they appear in the set of copies between fromrev and rev. ''' exclude = {} if (skiprev is not None and not repo.ui.configbool('experimental', 'disablecopytrace')): # disablecopytrace skips this line, but not the entire function because # the line below is O(size of the repo) during a rebase, while the rest # of the function is much faster (and is required for carrying copy # metadata across the rebase anyway). exclude = pathcopies(repo[fromrev], repo[skiprev]) for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems(): # copies.pathcopies returns backward renames, so dst might not # actually be in the dirstate if dst in exclude: continue if repo.dirstate[dst] in "nma": repo.dirstate.copy(src, dst) mercurial-3.7.3/mercurial/httpclient/0000755000175000017500000000000012676531544017271 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/httpclient/socketutil.py0000644000175000017500000001222412676531524022030 0ustar mpmmpm00000000000000# Copyright 2010, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Abstraction to simplify socket use for Python < 2.6 This will attempt to use the ssl module and the new socket.create_connection method, but fall back to the old methods if those are unavailable. """ from __future__ import absolute_import import logging import socket logger = logging.getLogger(__name__) try: import ssl # make demandimporters load the module ssl.wrap_socket # pylint: disable=W0104 have_ssl = True except ImportError: import httplib import urllib2 have_ssl = getattr(urllib2, 'HTTPSHandler', False) ssl = False try: create_connection = socket.create_connection except AttributeError: def create_connection(address): """Backport of socket.create_connection from Python 2.6.""" host, port = address msg = "getaddrinfo returns an empty list" sock = None for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): af, socktype, proto, unused_canonname, sa = res try: sock = socket.socket(af, socktype, proto) logger.info("connect: (%s, %s)", host, port) sock.connect(sa) except socket.error as msg: logger.info('connect fail: %s %s', host, port) if sock: sock.close() sock = None continue break if not sock: raise socket.error(msg) return sock if ssl: wrap_socket = ssl.wrap_socket CERT_NONE = ssl.CERT_NONE CERT_OPTIONAL = ssl.CERT_OPTIONAL CERT_REQUIRED = ssl.CERT_REQUIRED else: class FakeSocket(httplib.FakeSocket): """Socket wrapper that supports SSL.""" # Silence lint about this goofy backport class # pylint: disable=W0232,E1101,R0903,R0913,C0111 # backport the behavior from Python 2.6, which is to busy wait # on the socket instead of anything nice. Sigh. # See http://bugs.python.org/issue3890 for more info. def recv(self, buflen=1024, flags=0): """ssl-aware wrapper around socket.recv """ if flags != 0: raise ValueError( "non-zero flags not allowed in calls to recv() on %s" % self.__class__) while True: try: return self._ssl.read(buflen) except socket.sslerror as x: if x.args[0] == socket.SSL_ERROR_WANT_READ: continue else: raise x _PROTOCOL_SSLv23 = 2 CERT_NONE = 0 CERT_OPTIONAL = 1 CERT_REQUIRED = 2 # Disable unused-argument because we're making a dumb wrapper # that's like an upstream method. # # pylint: disable=W0613,R0913 def wrap_socket(sock, keyfile=None, certfile=None, server_side=False, cert_reqs=CERT_NONE, ssl_version=_PROTOCOL_SSLv23, ca_certs=None, do_handshake_on_connect=True, suppress_ragged_eofs=True): """Backport of ssl.wrap_socket from Python 2.6.""" if cert_reqs != CERT_NONE and ca_certs: raise CertificateValidationUnsupported( 'SSL certificate validation requires the ssl module' '(included in Python 2.6 and later.)') sslob = socket.ssl(sock) # borrow httplib's workaround for no ssl.wrap_socket sock = FakeSocket(sock, sslob) return sock # pylint: enable=W0613,R0913 class CertificateValidationUnsupported(Exception): """Exception raised when cert validation is requested but unavailable.""" # no-check-code mercurial-3.7.3/mercurial/httpclient/_readers.py0000644000175000017500000002060512676531525021431 0ustar mpmmpm00000000000000# Copyright 2011, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Reader objects to abstract out different body response types. This module is package-private. It is not expected that these will have any clients outside of httpplus. """ from __future__ import absolute_import import httplib import logging logger = logging.getLogger(__name__) class ReadNotReady(Exception): """Raised when read() is attempted but not enough data is loaded.""" class HTTPRemoteClosedError(httplib.HTTPException): """The server closed the remote socket in the middle of a response.""" class AbstractReader(object): """Abstract base class for response readers. Subclasses must implement _load, and should implement _close if it's not an error for the server to close their socket without some termination condition being detected during _load. """ def __init__(self): self._finished = False self._done_chunks = [] self.available_data = 0 def _addchunk(self, data): self._done_chunks.append(data) self.available_data += len(data) def _pushchunk(self, data): self._done_chunks.insert(0, data) self.available_data += len(data) def _popchunk(self): b = self._done_chunks.pop(0) self.available_data -= len(b) return b def done(self): """Returns true if the response body is entirely read.""" return self._finished def read(self, amt): """Read amt bytes from the response body.""" if self.available_data < amt and not self._finished: raise ReadNotReady() blocks = [] need = amt while self._done_chunks: b = self._popchunk() if len(b) > need: nb = b[:need] self._pushchunk(b[need:]) b = nb blocks.append(b) need -= len(b) if need == 0: break result = ''.join(blocks) assert len(result) == amt or (self._finished and len(result) < amt) return result def readto(self, delimstr, blocks = None): """return available data chunks up to the first one in which delimstr occurs. No data will be returned after delimstr -- the chunk in which it occurs will be split and the remainder pushed back onto the available data queue. If blocks is supplied chunks will be added to blocks, otherwise a new list will be allocated. """ if blocks is None: blocks = [] while self._done_chunks: b = self._popchunk() i = b.find(delimstr) + len(delimstr) if i: if i < len(b): self._pushchunk(b[i:]) blocks.append(b[:i]) break else: blocks.append(b) return blocks def _load(self, data): # pragma: no cover """Subclasses must implement this. As data is available to be read out of this object, it should be placed into the _done_chunks list. Subclasses should not rely on data remaining in _done_chunks forever, as it may be reaped if the client is parsing data as it comes in. """ raise NotImplementedError def _close(self): """Default implementation of close. The default implementation assumes that the reader will mark the response as finished on the _finished attribute once the entire response body has been read. In the event that this is not true, the subclass should override the implementation of close (for example, close-is-end responses have to set self._finished in the close handler.) """ if not self._finished: raise HTTPRemoteClosedError( 'server appears to have closed the socket mid-response') class AbstractSimpleReader(AbstractReader): """Abstract base class for simple readers that require no response decoding. Examples of such responses are Connection: Close (close-is-end) and responses that specify a content length. """ def _load(self, data): if data: assert not self._finished, ( 'tried to add data (%r) to a closed reader!' % data) logger.debug('%s read an additional %d data', self.name, len(data)) # pylint: disable=E1101 self._addchunk(data) class CloseIsEndReader(AbstractSimpleReader): """Reader for responses that specify Connection: Close for length.""" name = 'close-is-end' def _close(self): logger.info('Marking close-is-end reader as closed.') self._finished = True class ContentLengthReader(AbstractSimpleReader): """Reader for responses that specify an exact content length.""" name = 'content-length' def __init__(self, amount): AbstractSimpleReader.__init__(self) self._amount = amount if amount == 0: self._finished = True self._amount_seen = 0 def _load(self, data): AbstractSimpleReader._load(self, data) self._amount_seen += len(data) if self._amount_seen >= self._amount: self._finished = True logger.debug('content-length read complete') class ChunkedReader(AbstractReader): """Reader for chunked transfer encoding responses.""" def __init__(self, eol): AbstractReader.__init__(self) self._eol = eol self._leftover_skip_amt = 0 self._leftover_data = '' def _load(self, data): assert not self._finished, 'tried to add data to a closed reader!' logger.debug('chunked read an additional %d data', len(data)) position = 0 if self._leftover_data: logger.debug( 'chunked reader trying to finish block from leftover data') # TODO: avoid this string concatenation if possible data = self._leftover_data + data position = self._leftover_skip_amt self._leftover_data = '' self._leftover_skip_amt = 0 datalen = len(data) while position < datalen: split = data.find(self._eol, position) if split == -1: self._leftover_data = data self._leftover_skip_amt = position return amt = int(data[position:split], base=16) block_start = split + len(self._eol) # If the whole data chunk plus the eol trailer hasn't # loaded, we'll wait for the next load. if block_start + amt + len(self._eol) > len(data): self._leftover_data = data self._leftover_skip_amt = position return if amt == 0: self._finished = True logger.debug('closing chunked reader due to chunk of length 0') return self._addchunk(data[block_start:block_start + amt]) position = block_start + amt + len(self._eol) # no-check-code mercurial-3.7.3/mercurial/httpclient/__init__.py0000644000175000017500000007760612676531525021421 0ustar mpmmpm00000000000000# Copyright 2010, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Improved HTTP/1.1 client library This library contains an HTTPConnection which is similar to the one in httplib, but has several additional features: * supports keepalives natively * uses select() to block for incoming data * notices when the server responds early to a request * implements ssl inline instead of in a different class """ from __future__ import absolute_import # Many functions in this file have too many arguments. # pylint: disable=R0913 import cStringIO import errno import httplib import logging import rfc822 import select import socket from . import ( _readers, socketutil, ) logger = logging.getLogger(__name__) __all__ = ['HTTPConnection', 'HTTPResponse'] HTTP_VER_1_0 = 'HTTP/1.0' HTTP_VER_1_1 = 'HTTP/1.1' OUTGOING_BUFFER_SIZE = 1 << 15 INCOMING_BUFFER_SIZE = 1 << 20 HDR_ACCEPT_ENCODING = 'accept-encoding' HDR_CONNECTION_CTRL = 'connection' HDR_CONTENT_LENGTH = 'content-length' HDR_XFER_ENCODING = 'transfer-encoding' XFER_ENCODING_CHUNKED = 'chunked' CONNECTION_CLOSE = 'close' EOL = '\r\n' _END_HEADERS = EOL * 2 # Based on some searching around, 1 second seems like a reasonable # default here. TIMEOUT_ASSUME_CONTINUE = 1 TIMEOUT_DEFAULT = None class HTTPResponse(object): """Response from an HTTP server. The response will continue to load as available. If you need the complete response before continuing, check the .complete() method. """ def __init__(self, sock, timeout, method): self.sock = sock self.method = method self.raw_response = '' self._headers_len = 0 self.headers = None self.will_close = False self.status_line = '' self.status = None self.continued = False self.http_version = None self.reason = None self._reader = None self._read_location = 0 self._eol = EOL self._timeout = timeout @property def _end_headers(self): return self._eol * 2 def complete(self): """Returns true if this response is completely loaded. Note that if this is a connection where complete means the socket is closed, this will nearly always return False, even in cases where all the data has actually been loaded. """ if self._reader: return self._reader.done() def _close(self): if self._reader is not None: # We're a friend of the reader class here. # pylint: disable=W0212 self._reader._close() def getheader(self, header, default=None): return self.headers.getheader(header, default=default) def getheaders(self): return self.headers.items() def readline(self): """Read a single line from the response body. This may block until either a line ending is found or the response is complete. """ blocks = [] while True: self._reader.readto('\n', blocks) if blocks and blocks[-1][-1] == '\n' or self.complete(): break self._select() return ''.join(blocks) def read(self, length=None): """Read data from the response body.""" # if length is None, unbounded read while (not self.complete() # never select on a finished read and (not length # unbounded, so we wait for complete() or length > self._reader.available_data)): self._select() if not length: length = self._reader.available_data r = self._reader.read(length) if self.complete() and self.will_close: self.sock.close() return r def _select(self): r, unused_write, unused_err = select.select( [self.sock], [], [], self._timeout) if not r: # socket was not readable. If the response is not # complete, raise a timeout. if not self.complete(): logger.info('timed out with timeout of %s', self._timeout) raise HTTPTimeoutException('timeout reading data') try: data = self.sock.recv(INCOMING_BUFFER_SIZE) except socket.sslerror as e: if e.args[0] != socket.SSL_ERROR_WANT_READ: raise logger.debug('SSL_ERROR_WANT_READ in _select, should retry later') return True logger.debug('response read %d data during _select', len(data)) # If the socket was readable and no data was read, that means # the socket was closed. Inform the reader (if any) so it can # raise an exception if this is an invalid situation. if not data: if self._reader: # We're a friend of the reader class here. # pylint: disable=W0212 self._reader._close() return False else: self._load_response(data) return True # This method gets replaced by _load later, which confuses pylint. def _load_response(self, data): # pylint: disable=E0202 # Being here implies we're not at the end of the headers yet, # since at the end of this method if headers were completely # loaded we replace this method with the load() method of the # reader we created. self.raw_response += data # This is a bogus server with bad line endings if self._eol not in self.raw_response: for bad_eol in ('\n', '\r'): if (bad_eol in self.raw_response # verify that bad_eol is not the end of the incoming data # as this could be a response line that just got # split between \r and \n. and (self.raw_response.index(bad_eol) < (len(self.raw_response) - 1))): logger.info('bogus line endings detected, ' 'using %r for EOL', bad_eol) self._eol = bad_eol break # exit early if not at end of headers if self._end_headers not in self.raw_response or self.headers: return # handle 100-continue response hdrs, body = self.raw_response.split(self._end_headers, 1) unused_http_ver, status = hdrs.split(' ', 1) if status.startswith('100'): self.raw_response = body self.continued = True logger.debug('continue seen, setting body to %r', body) return # arriving here means we should parse response headers # as all headers have arrived completely hdrs, body = self.raw_response.split(self._end_headers, 1) del self.raw_response if self._eol in hdrs: self.status_line, hdrs = hdrs.split(self._eol, 1) else: self.status_line = hdrs hdrs = '' # TODO HTTP < 1.0 support (self.http_version, self.status, self.reason) = self.status_line.split(' ', 2) self.status = int(self.status) if self._eol != EOL: hdrs = hdrs.replace(self._eol, '\r\n') headers = rfc822.Message(cStringIO.StringIO(hdrs)) content_len = None if HDR_CONTENT_LENGTH in headers: content_len = int(headers[HDR_CONTENT_LENGTH]) if self.http_version == HTTP_VER_1_0: self.will_close = True elif HDR_CONNECTION_CTRL in headers: self.will_close = ( headers[HDR_CONNECTION_CTRL].lower() == CONNECTION_CLOSE) if (HDR_XFER_ENCODING in headers and headers[HDR_XFER_ENCODING].lower() == XFER_ENCODING_CHUNKED): self._reader = _readers.ChunkedReader(self._eol) logger.debug('using a chunked reader') else: # HEAD responses are forbidden from returning a body, and # it's implausible for a CONNECT response to use # close-is-end logic for an OK response. if (self.method == 'HEAD' or (self.method == 'CONNECT' and content_len is None)): content_len = 0 if content_len is not None: logger.debug('using a content-length reader with length %d', content_len) self._reader = _readers.ContentLengthReader(content_len) else: # Response body had no length specified and is not # chunked, so the end of the body will only be # identifiable by the termination of the socket by the # server. My interpretation of the spec means that we # are correct in hitting this case if # transfer-encoding, content-length, and # connection-control were left unspecified. self._reader = _readers.CloseIsEndReader() logger.debug('using a close-is-end reader') self.will_close = True if body: # We're a friend of the reader class here. # pylint: disable=W0212 self._reader._load(body) logger.debug('headers complete') self.headers = headers # We're a friend of the reader class here. # pylint: disable=W0212 self._load_response = self._reader._load def _foldheaders(headers): """Given some headers, rework them so we can safely overwrite values. >>> _foldheaders({'Accept-Encoding': 'wat'}) {'accept-encoding': ('Accept-Encoding', 'wat')} """ return dict((k.lower(), (k, v)) for k, v in headers.iteritems()) class HTTPConnection(object): """Connection to a single http server. Supports 100-continue and keepalives natively. Uses select() for non-blocking socket operations. """ http_version = HTTP_VER_1_1 response_class = HTTPResponse def __init__(self, host, port=None, use_ssl=None, ssl_validator=None, timeout=TIMEOUT_DEFAULT, continue_timeout=TIMEOUT_ASSUME_CONTINUE, proxy_hostport=None, proxy_headers=None, ssl_wrap_socket=None, **ssl_opts): """Create a new HTTPConnection. Args: host: The host to which we'll connect. port: Optional. The port over which we'll connect. Default 80 for non-ssl, 443 for ssl. use_ssl: Optional. Whether to use ssl. Defaults to False if port is not 443, true if port is 443. ssl_validator: a function(socket) to validate the ssl cert timeout: Optional. Connection timeout, default is TIMEOUT_DEFAULT. continue_timeout: Optional. Timeout for waiting on an expected "100 Continue" response. Default is TIMEOUT_ASSUME_CONTINUE. proxy_hostport: Optional. Tuple of (host, port) to use as an http proxy for the connection. Default is to not use a proxy. proxy_headers: Optional dict of header keys and values to send to a proxy when using CONNECT. For compatibility with httplib, the Proxy-Authorization header may be specified in headers for request(), which will clobber any such header specified here if specified. Providing this option and not proxy_hostport will raise an ValueError. ssl_wrap_socket: Optional function to use for wrapping sockets. If unspecified, the one from the ssl module will be used if available, or something that's compatible with it if on a Python older than 2.6. Any extra keyword arguments to this function will be provided to the ssl_wrap_socket method. If no ssl """ if port is None and host.count(':') == 1 or ']:' in host: host, port = host.rsplit(':', 1) port = int(port) if '[' in host: host = host[1:-1] if ssl_wrap_socket is not None: self._ssl_wrap_socket = ssl_wrap_socket else: self._ssl_wrap_socket = socketutil.wrap_socket if use_ssl is None and port is None: use_ssl = False port = 80 elif use_ssl is None: use_ssl = (port == 443) elif port is None: port = (use_ssl and 443 or 80) self.port = port if use_ssl and not socketutil.have_ssl: raise Exception('ssl requested but unavailable on this Python') self.ssl = use_ssl self.ssl_opts = ssl_opts self._ssl_validator = ssl_validator self.host = host self.sock = None self._current_response = None self._current_response_taken = False if proxy_hostport is None: self._proxy_host = self._proxy_port = None if proxy_headers: raise ValueError( 'proxy_headers may not be specified unless ' 'proxy_hostport is also specified.') else: self._proxy_headers = {} else: self._proxy_host, self._proxy_port = proxy_hostport self._proxy_headers = _foldheaders(proxy_headers or {}) self.timeout = timeout self.continue_timeout = continue_timeout def _connect(self, proxy_headers): """Connect to the host and port specified in __init__.""" if self.sock: return if self._proxy_host is not None: logger.info('Connecting to http proxy %s:%s', self._proxy_host, self._proxy_port) sock = socketutil.create_connection((self._proxy_host, self._proxy_port)) if self.ssl: data = self._buildheaders('CONNECT', '%s:%d' % (self.host, self.port), proxy_headers, HTTP_VER_1_0) sock.send(data) sock.setblocking(0) r = self.response_class(sock, self.timeout, 'CONNECT') timeout_exc = HTTPTimeoutException( 'Timed out waiting for CONNECT response from proxy') while not r.complete(): try: # We're a friend of the response class, so let # us use the private attribute. # pylint: disable=W0212 if not r._select(): if not r.complete(): raise timeout_exc except HTTPTimeoutException: # This raise/except pattern looks goofy, but # _select can raise the timeout as well as the # loop body. I wish it wasn't this convoluted, # but I don't have a better solution # immediately handy. raise timeout_exc if r.status != 200: raise HTTPProxyConnectFailedException( 'Proxy connection failed: %d %s' % (r.status, r.read())) logger.info('CONNECT (for SSL) to %s:%s via proxy succeeded.', self.host, self.port) else: sock = socketutil.create_connection((self.host, self.port)) if self.ssl: # This is the default, but in the case of proxied SSL # requests the proxy logic above will have cleared # blocking mode, so re-enable it just to be safe. sock.setblocking(1) logger.debug('wrapping socket for ssl with options %r', self.ssl_opts) sock = self._ssl_wrap_socket(sock, **self.ssl_opts) if self._ssl_validator: self._ssl_validator(sock) sock.setblocking(0) self.sock = sock def _buildheaders(self, method, path, headers, http_ver): if self.ssl and self.port == 443 or self.port == 80: # default port for protocol, so leave it out hdrhost = self.host else: # include nonstandard port in header if ':' in self.host: # must be IPv6 hdrhost = '[%s]:%d' % (self.host, self.port) else: hdrhost = '%s:%d' % (self.host, self.port) if self._proxy_host and not self.ssl: # When talking to a regular http proxy we must send the # full URI, but in all other cases we must not (although # technically RFC 2616 says servers must accept our # request if we screw up, experimentally few do that # correctly.) assert path[0] == '/', 'path must start with a /' path = 'http://%s%s' % (hdrhost, path) outgoing = ['%s %s %s%s' % (method, path, http_ver, EOL)] headers['host'] = ('Host', hdrhost) headers[HDR_ACCEPT_ENCODING] = (HDR_ACCEPT_ENCODING, 'identity') for hdr, val in headers.itervalues(): outgoing.append('%s: %s%s' % (hdr, val, EOL)) outgoing.append(EOL) return ''.join(outgoing) def close(self): """Close the connection to the server. This is a no-op if the connection is already closed. The connection may automatically close if requested by the server or required by the nature of a response. """ if self.sock is None: return self.sock.close() self.sock = None logger.info('closed connection to %s on %s', self.host, self.port) def busy(self): """Returns True if this connection object is currently in use. If a response is still pending, this will return True, even if the request has finished sending. In the future, HTTPConnection may transparently juggle multiple connections to the server, in which case this will be useful to detect if any of those connections is ready for use. """ cr = self._current_response if cr is not None: if self._current_response_taken: if cr.will_close: self.sock = None self._current_response = None return False elif cr.complete(): self._current_response = None return False return True return False def _reconnect(self, where, pheaders): logger.info('reconnecting during %s', where) self.close() self._connect(pheaders) def request(self, method, path, body=None, headers={}, expect_continue=False): """Send a request to the server. For increased flexibility, this does not return the response object. Future versions of HTTPConnection that juggle multiple sockets will be able to send (for example) 5 requests all at once, and then let the requests arrive as data is available. Use the `getresponse()` method to retrieve the response. """ if self.busy(): raise httplib.CannotSendRequest( 'Can not send another request before ' 'current response is read!') self._current_response_taken = False logger.info('sending %s request for %s to %s on port %s', method, path, self.host, self.port) hdrs = _foldheaders(headers) if hdrs.get('expect', ('', ''))[1].lower() == '100-continue': expect_continue = True elif expect_continue: hdrs['expect'] = ('Expect', '100-Continue') # httplib compatibility: if the user specified a # proxy-authorization header, that's actually intended for a # proxy CONNECT action, not the real request, but only if # we're going to use a proxy. pheaders = dict(self._proxy_headers) if self._proxy_host and self.ssl: pa = hdrs.pop('proxy-authorization', None) if pa is not None: pheaders['proxy-authorization'] = pa chunked = False if body and HDR_CONTENT_LENGTH not in hdrs: if getattr(body, '__len__', False): hdrs[HDR_CONTENT_LENGTH] = (HDR_CONTENT_LENGTH, len(body)) elif getattr(body, 'read', False): hdrs[HDR_XFER_ENCODING] = (HDR_XFER_ENCODING, XFER_ENCODING_CHUNKED) chunked = True else: raise BadRequestData('body has no __len__() nor read()') # If we're reusing the underlying socket, there are some # conditions where we'll want to retry, so make a note of the # state of self.sock fresh_socket = self.sock is None self._connect(pheaders) outgoing_headers = self._buildheaders( method, path, hdrs, self.http_version) response = None first = True while ((outgoing_headers or body) and not (response and response.complete())): select_timeout = self.timeout out = outgoing_headers or body blocking_on_continue = False if expect_continue and not outgoing_headers and not ( response and (response.headers or response.continued)): logger.info( 'waiting up to %s seconds for' ' continue response from server', self.continue_timeout) select_timeout = self.continue_timeout blocking_on_continue = True out = False if out: w = [self.sock] else: w = [] r, w, x = select.select([self.sock], w, [], select_timeout) # if we were expecting a 100 continue and it's been long # enough, just go ahead and assume it's ok. This is the # recommended behavior from the RFC. if r == w == x == []: if blocking_on_continue: expect_continue = False logger.info('no response to continue expectation from ' 'server, optimistically sending request body') else: raise HTTPTimeoutException('timeout sending data') was_first = first # incoming data if r: try: try: data = r[0].recv(INCOMING_BUFFER_SIZE) except socket.sslerror as e: if e.args[0] != socket.SSL_ERROR_WANT_READ: raise logger.debug('SSL_ERROR_WANT_READ while sending ' 'data, retrying...') continue if not data: logger.info('socket appears closed in read') self.sock = None self._current_response = None if response is not None: # We're a friend of the response class, so let # us use the private attribute. # pylint: disable=W0212 response._close() # This if/elif ladder is a bit subtle, # comments in each branch should help. if response is not None and response.complete(): # Server responded completely and then # closed the socket. We should just shut # things down and let the caller get their # response. logger.info('Got an early response, ' 'aborting remaining request.') break elif was_first and response is None: # Most likely a keepalive that got killed # on the server's end. Commonly happens # after getting a really large response # from the server. logger.info( 'Connection appeared closed in read on first' ' request loop iteration, will retry.') self._reconnect('read', pheaders) continue else: # We didn't just send the first data hunk, # and either have a partial response or no # response at all. There's really nothing # meaningful we can do here. raise HTTPStateError( 'Connection appears closed after ' 'some request data was written, but the ' 'response was missing or incomplete!') logger.debug('read %d bytes in request()', len(data)) if response is None: response = self.response_class( r[0], self.timeout, method) # We're a friend of the response class, so let us # use the private attribute. # pylint: disable=W0212 response._load_response(data) # Jump to the next select() call so we load more # data if the server is still sending us content. continue except socket.error as e: if e[0] != errno.EPIPE and not was_first: raise # outgoing data if w and out: try: if getattr(out, 'read', False): # pylint guesses the type of out incorrectly here # pylint: disable=E1103 data = out.read(OUTGOING_BUFFER_SIZE) if not data: continue if len(data) < OUTGOING_BUFFER_SIZE: if chunked: body = '0' + EOL + EOL else: body = None if chunked: out = hex(len(data))[2:] + EOL + data + EOL else: out = data amt = w[0].send(out) except socket.error as e: if e[0] == socket.SSL_ERROR_WANT_WRITE and self.ssl: # This means that SSL hasn't flushed its buffer into # the socket yet. # TODO: find a way to block on ssl flushing its buffer # similar to selecting on a raw socket. continue if e[0] == errno.EWOULDBLOCK or e[0] == errno.EAGAIN: continue elif (e[0] not in (errno.ECONNRESET, errno.EPIPE) and not first): raise self._reconnect('write', pheaders) amt = self.sock.send(out) logger.debug('sent %d', amt) first = False if out is body: body = out[amt:] else: outgoing_headers = out[amt:] # close if the server response said to or responded before eating # the whole request if response is None: response = self.response_class(self.sock, self.timeout, method) if not fresh_socket: if not response._select(): # This means the response failed to get any response # data at all, and in all probability the socket was # closed before the server even saw our request. Try # the request again on a fresh socket. logger.debug('response._select() failed during request().' ' Assuming request needs to be retried.') self.sock = None # Call this method explicitly to re-try the # request. We don't use self.request() because # some tools (notably Mercurial) expect to be able # to subclass and redefine request(), and they # don't have the same argspec as we do. # # TODO restructure sending of requests to avoid # this recursion return HTTPConnection.request( self, method, path, body=body, headers=headers, expect_continue=expect_continue) data_left = bool(outgoing_headers or body) if data_left: logger.info('stopped sending request early, ' 'will close the socket to be safe.') response.will_close = True if response.will_close: # The socket will be closed by the response, so we disown # the socket self.sock = None self._current_response = response def getresponse(self): """Returns the response to the most recent request.""" if self._current_response is None: raise httplib.ResponseNotReady() r = self._current_response while r.headers is None: # We're a friend of the response class, so let us use the # private attribute. # pylint: disable=W0212 if not r._select() and not r.complete(): raise _readers.HTTPRemoteClosedError() if r.will_close: self.sock = None self._current_response = None elif r.complete(): self._current_response = None else: self._current_response_taken = True return r class HTTPTimeoutException(httplib.HTTPException): """A timeout occurred while waiting on the server.""" class BadRequestData(httplib.HTTPException): """Request body object has neither __len__ nor read.""" class HTTPProxyConnectFailedException(httplib.HTTPException): """Connecting to the HTTP proxy failed.""" class HTTPStateError(httplib.HTTPException): """Invalid internal state encountered.""" # Forward this exception type from _readers since it needs to be part # of the public API. HTTPRemoteClosedError = _readers.HTTPRemoteClosedError # no-check-code mercurial-3.7.3/mercurial/templatefilters.py0000644000175000017500000003167012676531525020677 0ustar mpmmpm00000000000000# template-filters.py - common template expansion filters # # Copyright 2005-2008 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import cgi import os import re import time import urllib from . import ( encoding, hbisect, node, templatekw, util, ) def addbreaks(text): """:addbreaks: Any text. Add an XHTML "
" tag before the end of every line except the last. """ return text.replace('\n', '
\n') agescales = [("year", 3600 * 24 * 365, 'Y'), ("month", 3600 * 24 * 30, 'M'), ("week", 3600 * 24 * 7, 'W'), ("day", 3600 * 24, 'd'), ("hour", 3600, 'h'), ("minute", 60, 'm'), ("second", 1, 's')] def age(date, abbrev=False): """:age: Date. Returns a human-readable date/time difference between the given date/time and the current date/time. """ def plural(t, c): if c == 1: return t return t + "s" def fmt(t, c, a): if abbrev: return "%d%s" % (c, a) return "%d %s" % (c, plural(t, c)) now = time.time() then = date[0] future = False if then > now: future = True delta = max(1, int(then - now)) if delta > agescales[0][1] * 30: return 'in the distant future' else: delta = max(1, int(now - then)) if delta > agescales[0][1] * 2: return util.shortdate(date) for t, s, a in agescales: n = delta // s if n >= 2 or s == 1: if future: return '%s from now' % fmt(t, n, a) return '%s ago' % fmt(t, n, a) def basename(path): """:basename: Any text. Treats the text as a path, and returns the last component of the path after splitting by the path separator (ignoring trailing separators). For example, "foo/bar/baz" becomes "baz" and "foo/bar//" becomes "bar". """ return os.path.basename(path) def count(i): """:count: List or text. Returns the length as an integer.""" return len(i) def domain(author): """:domain: Any text. Finds the first string that looks like an email address, and extracts just the domain component. Example: ``User `` becomes ``example.com``. """ f = author.find('@') if f == -1: return '' author = author[f + 1:] f = author.find('>') if f >= 0: author = author[:f] return author def email(text): """:email: Any text. Extracts the first string that looks like an email address. Example: ``User `` becomes ``user@example.com``. """ return util.email(text) def escape(text): """:escape: Any text. Replaces the special XML/XHTML characters "&", "<" and ">" with XML entities, and filters out NUL characters. """ return cgi.escape(text.replace('\0', ''), True) para_re = None space_re = None def fill(text, width, initindent='', hangindent=''): '''fill many paragraphs with optional indentation.''' global para_re, space_re if para_re is None: para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M) space_re = re.compile(r' +') def findparas(): start = 0 while True: m = para_re.search(text, start) if not m: uctext = unicode(text[start:], encoding.encoding) w = len(uctext) while 0 < w and uctext[w - 1].isspace(): w -= 1 yield (uctext[:w].encode(encoding.encoding), uctext[w:].encode(encoding.encoding)) break yield text[start:m.start(0)], m.group(1) start = m.end(1) return "".join([util.wrap(space_re.sub(' ', util.wrap(para, width)), width, initindent, hangindent) + rest for para, rest in findparas()]) def fill68(text): """:fill68: Any text. Wraps the text to fit in 68 columns.""" return fill(text, 68) def fill76(text): """:fill76: Any text. Wraps the text to fit in 76 columns.""" return fill(text, 76) def firstline(text): """:firstline: Any text. Returns the first line of text.""" try: return text.splitlines(True)[0].rstrip('\r\n') except IndexError: return '' def hexfilter(text): """:hex: Any text. Convert a binary Mercurial node identifier into its long hexadecimal representation. """ return node.hex(text) def hgdate(text): """:hgdate: Date. Returns the date as a pair of numbers: "1157407993 25200" (Unix timestamp, timezone offset). """ return "%d %d" % text def isodate(text): """:isodate: Date. Returns the date in ISO 8601 format: "2009-08-18 13:00 +0200". """ return util.datestr(text, '%Y-%m-%d %H:%M %1%2') def isodatesec(text): """:isodatesec: Date. Returns the date in ISO 8601 format, including seconds: "2009-08-18 13:00:13 +0200". See also the rfc3339date filter. """ return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2') def indent(text, prefix): '''indent each non-empty line of text after first with prefix.''' lines = text.splitlines() num_lines = len(lines) endswithnewline = text[-1:] == '\n' def indenter(): for i in xrange(num_lines): l = lines[i] if i and l.strip(): yield prefix yield l if i < num_lines - 1 or endswithnewline: yield '\n' return "".join(indenter()) def json(obj): if obj is None or obj is False or obj is True: return {None: 'null', False: 'false', True: 'true'}[obj] elif isinstance(obj, int) or isinstance(obj, float): return str(obj) elif isinstance(obj, encoding.localstr): u = encoding.fromlocal(obj).decode('utf-8') # can round-trip return '"%s"' % jsonescape(u) elif isinstance(obj, str): # no encoding.fromlocal() because it may abort if obj can't be decoded u = unicode(obj, encoding.encoding, 'replace') return '"%s"' % jsonescape(u) elif isinstance(obj, unicode): return '"%s"' % jsonescape(obj) elif util.safehasattr(obj, 'keys'): out = [] for k, v in sorted(obj.iteritems()): s = '%s: %s' % (json(k), json(v)) out.append(s) return '{' + ', '.join(out) + '}' elif util.safehasattr(obj, '__iter__'): out = [] for i in obj: out.append(json(i)) return '[' + ', '.join(out) + ']' elif util.safehasattr(obj, '__call__'): return json(obj()) else: raise TypeError('cannot encode type %s' % obj.__class__.__name__) def _uescape(c): if 0x20 <= ord(c) < 0x80: return c else: return '\\u%04x' % ord(c) _escapes = [ ('\\', '\\\\'), ('"', '\\"'), ('\t', '\\t'), ('\n', '\\n'), ('\r', '\\r'), ('\f', '\\f'), ('\b', '\\b'), ('<', '\\u003c'), ('>', '\\u003e'), ('\0', '\\u0000') ] def jsonescape(s): for k, v in _escapes: s = s.replace(k, v) return ''.join(_uescape(c) for c in s) def lower(text): """:lower: Any text. Converts the text to lowercase.""" return encoding.lower(text) def nonempty(str): """:nonempty: Any text. Returns '(none)' if the string is empty.""" return str or "(none)" def obfuscate(text): """:obfuscate: Any text. Returns the input text rendered as a sequence of XML entities. """ text = unicode(text, encoding.encoding, 'replace') return ''.join(['&#%d;' % ord(c) for c in text]) def permissions(flags): if "l" in flags: return "lrwxrwxrwx" if "x" in flags: return "-rwxr-xr-x" return "-rw-r--r--" def person(author): """:person: Any text. Returns the name before an email address, interpreting it as per RFC 5322. >>> person('foo@bar') 'foo' >>> person('Foo Bar ') 'Foo Bar' >>> person('"Foo Bar" ') 'Foo Bar' >>> person('"Foo \"buz\" Bar" ') 'Foo "buz" Bar' >>> # The following are invalid, but do exist in real-life ... >>> person('Foo "buz" Bar ') 'Foo "buz" Bar' >>> person('"Foo Bar ') 'Foo Bar' """ if '@' not in author: return author f = author.find('<') if f != -1: return author[:f].strip(' "').replace('\\"', '"') f = author.find('@') return author[:f].replace('.', ' ') def revescape(text): """:revescape: Any text. Escapes all "special" characters, except @. Forward slashes are escaped twice to prevent web servers from prematurely unescaping them. For example, "@foo bar/baz" becomes "@foo%20bar%252Fbaz". """ return urllib.quote(text, safe='/@').replace('/', '%252F') def rfc3339date(text): """:rfc3339date: Date. Returns a date using the Internet date format specified in RFC 3339: "2009-08-18T13:00:13+02:00". """ return util.datestr(text, "%Y-%m-%dT%H:%M:%S%1:%2") def rfc822date(text): """:rfc822date: Date. Returns a date using the same format used in email headers: "Tue, 18 Aug 2009 13:00:13 +0200". """ return util.datestr(text, "%a, %d %b %Y %H:%M:%S %1%2") def short(text): """:short: Changeset hash. Returns the short form of a changeset hash, i.e. a 12 hexadecimal digit string. """ return text[:12] def shortbisect(text): """:shortbisect: Any text. Treats `text` as a bisection status, and returns a single-character representing the status (G: good, B: bad, S: skipped, U: untested, I: ignored). Returns single space if `text` is not a valid bisection status. """ return hbisect.shortlabel(text) or ' ' def shortdate(text): """:shortdate: Date. Returns a date like "2006-09-18".""" return util.shortdate(text) def splitlines(text): """:splitlines: Any text. Split text into a list of lines.""" return templatekw.showlist('line', text.splitlines(), 'lines') def stringescape(text): return text.encode('string_escape') def stringify(thing): """:stringify: Any type. Turns the value into text by converting values into text and concatenating them. """ if util.safehasattr(thing, '__iter__') and not isinstance(thing, str): return "".join([stringify(t) for t in thing if t is not None]) if thing is None: return "" return str(thing) def stripdir(text): """:stripdir: Treat the text as path and strip a directory level, if possible. For example, "foo" and "foo/bar" becomes "foo". """ dir = os.path.dirname(text) if dir == "": return os.path.basename(text) else: return dir def tabindent(text): """:tabindent: Any text. Returns the text, with every non-empty line except the first starting with a tab character. """ return indent(text, '\t') def upper(text): """:upper: Any text. Converts the text to uppercase.""" return encoding.upper(text) def urlescape(text): """:urlescape: Any text. Escapes all "special" characters. For example, "foo bar" becomes "foo%20bar". """ return urllib.quote(text) def userfilter(text): """:user: Any text. Returns a short representation of a user name or email address.""" return util.shortuser(text) def emailuser(text): """:emailuser: Any text. Returns the user portion of an email address.""" return util.emailuser(text) def xmlescape(text): text = (text .replace('&', '&') .replace('<', '<') .replace('>', '>') .replace('"', '"') .replace("'", ''')) # ' invalid in HTML return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text) filters = { "addbreaks": addbreaks, "age": age, "basename": basename, "count": count, "domain": domain, "email": email, "escape": escape, "fill68": fill68, "fill76": fill76, "firstline": firstline, "hex": hexfilter, "hgdate": hgdate, "isodate": isodate, "isodatesec": isodatesec, "json": json, "jsonescape": jsonescape, "lower": lower, "nonempty": nonempty, "obfuscate": obfuscate, "permissions": permissions, "person": person, "revescape": revescape, "rfc3339date": rfc3339date, "rfc822date": rfc822date, "short": short, "shortbisect": shortbisect, "shortdate": shortdate, "splitlines": splitlines, "stringescape": stringescape, "stringify": stringify, "stripdir": stripdir, "tabindent": tabindent, "upper": upper, "urlescape": urlescape, "user": userfilter, "emailuser": emailuser, "xmlescape": xmlescape, } def websub(text, websubtable): """:websub: Any text. Only applies to hgweb. Applies the regular expression replacements defined in the websub section. """ if websubtable: for regexp, format in websubtable: text = regexp.sub(format, text) return text # tell hggettext to extract docstrings from these functions: i18nfunctions = filters.values() mercurial-3.7.3/mercurial/httpconnection.py0000644000175000017500000002523512676531525020532 0ustar mpmmpm00000000000000# httpconnection.py - urllib2 handler for new http support # # Copyright 2005, 2006, 2007, 2008 Matt Mackall # Copyright 2006, 2007 Alexis S. L. Carvalho # Copyright 2006 Vadim Gelfer # Copyright 2011 Google, Inc. # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import logging import os import socket import urllib import urllib2 from .i18n import _ from . import ( httpclient, sslutil, util, ) # moved here from url.py to avoid a cycle class httpsendfile(object): """This is a wrapper around the objects returned by python's "open". Its purpose is to send file-like objects via HTTP. It do however not define a __len__ attribute because the length might be more than Py_ssize_t can handle. """ def __init__(self, ui, *args, **kwargs): # We can't just "self._data = open(*args, **kwargs)" here because there # is an "open" function defined in this module that shadows the global # one self.ui = ui self._data = open(*args, **kwargs) self.seek = self._data.seek self.close = self._data.close self.write = self._data.write self.length = os.fstat(self._data.fileno()).st_size self._pos = 0 self._total = self.length // 1024 * 2 def read(self, *args, **kwargs): try: ret = self._data.read(*args, **kwargs) except EOFError: self.ui.progress(_('sending'), None) self._pos += len(ret) # We pass double the max for total because we currently have # to send the bundle twice in the case of a server that # requires authentication. Since we can't know until we try # once whether authentication will be required, just lie to # the user and maybe the push succeeds suddenly at 50%. self.ui.progress(_('sending'), self._pos // 1024, unit=_('kb'), total=self._total) return ret # moved here from url.py to avoid a cycle def readauthforuri(ui, uri, user): # Read configuration config = dict() for key, val in ui.configitems('auth'): if '.' not in key: ui.warn(_("ignoring invalid [auth] key '%s'\n") % key) continue group, setting = key.rsplit('.', 1) gdict = config.setdefault(group, dict()) if setting in ('username', 'cert', 'key'): val = util.expandpath(val) gdict[setting] = val # Find the best match scheme, hostpath = uri.split('://', 1) bestuser = None bestlen = 0 bestauth = None for group, auth in config.iteritems(): if user and user != auth.get('username', user): # If a username was set in the URI, the entry username # must either match it or be unset continue prefix = auth.get('prefix') if not prefix: continue p = prefix.split('://', 1) if len(p) > 1: schemes, prefix = [p[0]], p[1] else: schemes = (auth.get('schemes') or 'https').split() if (prefix == '*' or hostpath.startswith(prefix)) and \ (len(prefix) > bestlen or (len(prefix) == bestlen and \ not bestuser and 'username' in auth)) \ and scheme in schemes: bestlen = len(prefix) bestauth = group, auth bestuser = auth.get('username') if user and not bestuser: auth['username'] = user return bestauth # Mercurial (at least until we can remove the old codepath) requires # that the http response object be sufficiently file-like, so we # provide a close() method here. class HTTPResponse(httpclient.HTTPResponse): def close(self): pass class HTTPConnection(httpclient.HTTPConnection): response_class = HTTPResponse def request(self, method, uri, body=None, headers=None): if headers is None: headers = {} if isinstance(body, httpsendfile): body.seek(0) httpclient.HTTPConnection.request(self, method, uri, body=body, headers=headers) _configuredlogging = False LOGFMT = '%(levelname)s:%(name)s:%(lineno)d:%(message)s' # Subclass BOTH of these because otherwise urllib2 "helpfully" # reinserts them since it notices we don't include any subclasses of # them. class http2handler(urllib2.HTTPHandler, urllib2.HTTPSHandler): def __init__(self, ui, pwmgr): global _configuredlogging urllib2.AbstractHTTPHandler.__init__(self) self.ui = ui self.pwmgr = pwmgr self._connections = {} # developer config: ui.http2debuglevel loglevel = ui.config('ui', 'http2debuglevel', default=None) if loglevel and not _configuredlogging: _configuredlogging = True logger = logging.getLogger('mercurial.httpclient') logger.setLevel(getattr(logging, loglevel.upper())) handler = logging.StreamHandler() handler.setFormatter(logging.Formatter(LOGFMT)) logger.addHandler(handler) def close_all(self): """Close and remove all connection objects being kept for reuse.""" for openconns in self._connections.values(): for conn in openconns: conn.close() self._connections = {} # shamelessly borrowed from urllib2.AbstractHTTPHandler def do_open(self, http_class, req, use_ssl): """Return an addinfourl object for the request, using http_class. http_class must implement the HTTPConnection API from httplib. The addinfourl return value is a file-like object. It also has methods and attributes including: - info(): return a mimetools.Message object for the headers - geturl(): return the original request URL - code: HTTP status code """ # If using a proxy, the host returned by get_host() is # actually the proxy. On Python 2.6.1, the real destination # hostname is encoded in the URI in the urllib2 request # object. On Python 2.6.5, it's stored in the _tunnel_host # attribute which has no accessor. tunhost = getattr(req, '_tunnel_host', None) host = req.get_host() if tunhost: proxyhost = host host = tunhost elif req.has_proxy(): proxyhost = req.get_host() host = req.get_selector().split('://', 1)[1].split('/', 1)[0] else: proxyhost = None if proxyhost: if ':' in proxyhost: # Note: this means we'll explode if we try and use an # IPv6 http proxy. This isn't a regression, so we # won't worry about it for now. proxyhost, proxyport = proxyhost.rsplit(':', 1) else: proxyport = 3128 # squid default proxy = (proxyhost, proxyport) else: proxy = None if not host: raise urllib2.URLError('no host given') connkey = use_ssl, host, proxy allconns = self._connections.get(connkey, []) conns = [c for c in allconns if not c.busy()] if conns: h = conns[0] else: if allconns: self.ui.debug('all connections for %s busy, making a new ' 'one\n' % host) timeout = None if req.timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: timeout = req.timeout h = http_class(host, timeout=timeout, proxy_hostport=proxy) self._connections.setdefault(connkey, []).append(h) headers = dict(req.headers) headers.update(req.unredirected_hdrs) headers = dict( (name.title(), val) for name, val in headers.items()) try: path = req.get_selector() if '://' in path: path = path.split('://', 1)[1].split('/', 1)[1] if path[0] != '/': path = '/' + path h.request(req.get_method(), path, req.data, headers) r = h.getresponse() except socket.error as err: # XXX what error? raise urllib2.URLError(err) # Pick apart the HTTPResponse object to get the addinfourl # object initialized properly. r.recv = r.read resp = urllib.addinfourl(r, r.headers, req.get_full_url()) resp.code = r.status resp.msg = r.reason return resp # httplib always uses the given host/port as the socket connect # target, and then allows full URIs in the request path, which it # then observes and treats as a signal to do proxying instead. def http_open(self, req): if req.get_full_url().startswith('https'): return self.https_open(req) def makehttpcon(*args, **kwargs): k2 = dict(kwargs) k2['use_ssl'] = False return HTTPConnection(*args, **k2) return self.do_open(makehttpcon, req, False) def https_open(self, req): # req.get_full_url() does not contain credentials and we may # need them to match the certificates. url = req.get_full_url() user, password = self.pwmgr.find_stored_password(url) res = readauthforuri(self.ui, url, user) if res: group, auth = res self.auth = auth self.ui.debug("using auth.%s.* for authentication\n" % group) else: self.auth = None return self.do_open(self._makesslconnection, req, True) def _makesslconnection(self, host, port=443, *args, **kwargs): keyfile = None certfile = None if args: # key_file keyfile = args.pop(0) if args: # cert_file certfile = args.pop(0) # if the user has specified different key/cert files in # hgrc, we prefer these if self.auth and 'key' in self.auth and 'cert' in self.auth: keyfile = self.auth['key'] certfile = self.auth['cert'] # let host port take precedence if ':' in host and '[' not in host or ']:' in host: host, port = host.rsplit(':', 1) port = int(port) if '[' in host: host = host[1:-1] kwargs['keyfile'] = keyfile kwargs['certfile'] = certfile kwargs.update(sslutil.sslkwargs(self.ui, host)) con = HTTPConnection(host, port, use_ssl=True, ssl_wrap_socket=sslutil.wrapsocket, ssl_validator=sslutil.validator(self.ui, host), **kwargs) return con mercurial-3.7.3/mercurial/posix.py0000644000175000017500000004335612676531524016640 0ustar mpmmpm00000000000000# posix.py - Posix utility function implementations for Mercurial # # Copyright 2005-2009 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import fcntl import getpass import grp import os import pwd import re import select import stat import sys import tempfile import unicodedata from .i18n import _ from . import ( encoding, ) posixfile = open normpath = os.path.normpath samestat = os.path.samestat try: oslink = os.link except AttributeError: # Some platforms build Python without os.link on systems that are # vaguely unix-like but don't have hardlink support. For those # poor souls, just say we tried and that it failed so we fall back # to copies. def oslink(src, dst): raise OSError(errno.EINVAL, 'hardlinks not supported: %s to %s' % (src, dst)) unlink = os.unlink rename = os.rename removedirs = os.removedirs expandglobs = False umask = os.umask(0) os.umask(umask) def split(p): '''Same as posixpath.split, but faster >>> import posixpath >>> for f in ['/absolute/path/to/file', ... 'relative/path/to/file', ... 'file_alone', ... 'path/to/directory/', ... '/multiple/path//separators', ... '/file_at_root', ... '///multiple_leading_separators_at_root', ... '']: ... assert split(f) == posixpath.split(f), f ''' ht = p.rsplit('/', 1) if len(ht) == 1: return '', p nh = ht[0].rstrip('/') if nh: return nh, ht[1] return ht[0] + '/', ht[1] def openhardlinks(): '''return true if it is safe to hold open file handles to hardlinks''' return True def nlinks(name): '''return number of hardlinks for the given file''' return os.lstat(name).st_nlink def parsepatchoutput(output_line): """parses the output produced by patch and returns the filename""" pf = output_line[14:] if os.sys.platform == 'OpenVMS': if pf[0] == '`': pf = pf[1:-1] # Remove the quotes else: if pf.startswith("'") and pf.endswith("'") and " " in pf: pf = pf[1:-1] # Remove the quotes return pf def sshargs(sshcmd, host, user, port): '''Build argument list for ssh''' args = user and ("%s@%s" % (user, host)) or host return port and ("%s -p %s" % (args, port)) or args def isexec(f): """check whether a file is executable""" return (os.lstat(f).st_mode & 0o100 != 0) def setflags(f, l, x): s = os.lstat(f).st_mode if l: if not stat.S_ISLNK(s): # switch file to link fp = open(f) data = fp.read() fp.close() os.unlink(f) try: os.symlink(data, f) except OSError: # failed to make a link, rewrite file fp = open(f, "w") fp.write(data) fp.close() # no chmod needed at this point return if stat.S_ISLNK(s): # switch link to file data = os.readlink(f) os.unlink(f) fp = open(f, "w") fp.write(data) fp.close() s = 0o666 & ~umask # avoid restatting for chmod sx = s & 0o100 if x and not sx: # Turn on +x for every +r bit when making a file executable # and obey umask. os.chmod(f, s | (s & 0o444) >> 2 & ~umask) elif not x and sx: # Turn off all +x bits os.chmod(f, s & 0o666) def copymode(src, dst, mode=None): '''Copy the file mode from the file at path src to dst. If src doesn't exist, we're using mode instead. If mode is None, we're using umask.''' try: st_mode = os.lstat(src).st_mode & 0o777 except OSError as inst: if inst.errno != errno.ENOENT: raise st_mode = mode if st_mode is None: st_mode = ~umask st_mode &= 0o666 os.chmod(dst, st_mode) def checkexec(path): """ Check whether the given path is on a filesystem with UNIX-like exec flags Requires a directory (like /foo/.hg) """ # VFAT on some Linux versions can flip mode but it doesn't persist # a FS remount. Frequently we can detect it if files are created # with exec bit on. try: EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-') try: os.close(fh) m = os.stat(fn).st_mode & 0o777 new_file_has_exec = m & EXECFLAGS os.chmod(fn, m ^ EXECFLAGS) exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0o777) == m) finally: os.unlink(fn) except (IOError, OSError): # we don't care, the user probably won't be able to commit anyway return False return not (new_file_has_exec or exec_flags_cannot_flip) def checklink(path): """check whether the given path is on a symlink-capable filesystem""" # mktemp is not racy because symlink creation will fail if the # file already exists while True: name = tempfile.mktemp(dir=path, prefix='hg-checklink-') try: fd = tempfile.NamedTemporaryFile(dir=path, prefix='hg-checklink-') try: os.symlink(os.path.basename(fd.name), name) os.unlink(name) return True except OSError as inst: # link creation might race, try again if inst[0] == errno.EEXIST: continue raise finally: fd.close() except AttributeError: return False except OSError as inst: # sshfs might report failure while successfully creating the link if inst[0] == errno.EIO and os.path.exists(name): os.unlink(name) return False def checkosfilename(path): '''Check that the base-relative path is a valid filename on this platform. Returns None if the path is ok, or a UI string describing the problem.''' pass # on posix platforms, every path is ok def setbinary(fd): pass def pconvert(path): return path def localpath(path): return path def samefile(fpath1, fpath2): """Returns whether path1 and path2 refer to the same file. This is only guaranteed to work for files, not directories.""" return os.path.samefile(fpath1, fpath2) def samedevice(fpath1, fpath2): """Returns whether fpath1 and fpath2 are on the same device. This is only guaranteed to work for files, not directories.""" st1 = os.lstat(fpath1) st2 = os.lstat(fpath2) return st1.st_dev == st2.st_dev # os.path.normcase is a no-op, which doesn't help us on non-native filesystems def normcase(path): return path.lower() # what normcase does to ASCII strings normcasespec = encoding.normcasespecs.lower # fallback normcase function for non-ASCII strings normcasefallback = normcase if sys.platform == 'darwin': def normcase(path): ''' Normalize a filename for OS X-compatible comparison: - escape-encode invalid characters - decompose to NFD - lowercase - omit ignored characters [200c-200f, 202a-202e, 206a-206f,feff] >>> normcase('UPPER') 'upper' >>> normcase('Caf\xc3\xa9') 'cafe\\xcc\\x81' >>> normcase('\xc3\x89') 'e\\xcc\\x81' >>> normcase('\xb8\xca\xc3\xca\xbe\xc8.JPG') # issue3918 '%b8%ca%c3\\xca\\xbe%c8.jpg' ''' try: return encoding.asciilower(path) # exception for non-ASCII except UnicodeDecodeError: return normcasefallback(path) normcasespec = encoding.normcasespecs.lower def normcasefallback(path): try: u = path.decode('utf-8') except UnicodeDecodeError: # OS X percent-encodes any bytes that aren't valid utf-8 s = '' pos = 0 l = len(path) while pos < l: try: c = encoding.getutf8char(path, pos) pos += len(c) except ValueError: c = '%%%02X' % ord(path[pos]) pos += 1 s += c u = s.decode('utf-8') # Decompose then lowercase (HFS+ technote specifies lower) enc = unicodedata.normalize('NFD', u).lower().encode('utf-8') # drop HFS+ ignored characters return encoding.hfsignoreclean(enc) if sys.platform == 'cygwin': # workaround for cygwin, in which mount point part of path is # treated as case sensitive, even though underlying NTFS is case # insensitive. # default mount points cygwinmountpoints = sorted([ "/usr/bin", "/usr/lib", "/cygdrive", ], reverse=True) # use upper-ing as normcase as same as NTFS workaround def normcase(path): pathlen = len(path) if (pathlen == 0) or (path[0] != os.sep): # treat as relative return encoding.upper(path) # to preserve case of mountpoint part for mp in cygwinmountpoints: if not path.startswith(mp): continue mplen = len(mp) if mplen == pathlen: # mount point itself return mp if path[mplen] == os.sep: return mp + encoding.upper(path[mplen:]) return encoding.upper(path) normcasespec = encoding.normcasespecs.other normcasefallback = normcase # Cygwin translates native ACLs to POSIX permissions, # but these translations are not supported by native # tools, so the exec bit tends to be set erroneously. # Therefore, disable executable bit access on Cygwin. def checkexec(path): return False # Similarly, Cygwin's symlink emulation is likely to create # problems when Mercurial is used from both Cygwin and native # Windows, with other native tools, or on shared volumes def checklink(path): return False _needsshellquote = None def shellquote(s): if os.sys.platform == 'OpenVMS': return '"%s"' % s global _needsshellquote if _needsshellquote is None: _needsshellquote = re.compile(r'[^a-zA-Z0-9._/+-]').search if s and not _needsshellquote(s): # "s" shouldn't have to be quoted return s else: return "'%s'" % s.replace("'", "'\\''") def quotecommand(cmd): return cmd def popen(command, mode='r'): return os.popen(command, mode) def testpid(pid): '''return False if pid dead, True if running or not sure''' if os.sys.platform == 'OpenVMS': return True try: os.kill(pid, 0) return True except OSError as inst: return inst.errno != errno.ESRCH def explainexit(code): """return a 2-tuple (desc, code) describing a subprocess status (codes from kill are negative - not os.system/wait encoding)""" if code >= 0: return _("exited with status %d") % code, code return _("killed by signal %d") % -code, -code def isowner(st): """Return True if the stat object st is from the current user.""" return st.st_uid == os.getuid() def findexe(command): '''Find executable for command searching like which does. If command is a basename then PATH is searched for command. PATH isn't searched if command is an absolute or relative path. If command isn't found None is returned.''' if sys.platform == 'OpenVMS': return command def findexisting(executable): 'Will return executable if existing file' if os.path.isfile(executable) and os.access(executable, os.X_OK): return executable return None if os.sep in command: return findexisting(command) if sys.platform == 'plan9': return findexisting(os.path.join('/bin', command)) for path in os.environ.get('PATH', '').split(os.pathsep): executable = findexisting(os.path.join(path, command)) if executable is not None: return executable return None def setsignalhandler(): pass _wantedkinds = set([stat.S_IFREG, stat.S_IFLNK]) def statfiles(files): '''Stat each file in files. Yield each stat, or None if a file does not exist or has a type we don't care about.''' lstat = os.lstat getkind = stat.S_IFMT for nf in files: try: st = lstat(nf) if getkind(st.st_mode) not in _wantedkinds: st = None except OSError as err: if err.errno not in (errno.ENOENT, errno.ENOTDIR): raise st = None yield st def getuser(): '''return name of current user''' return getpass.getuser() def username(uid=None): """Return the name of the user with the given uid. If uid is None, return the name of the current user.""" if uid is None: uid = os.getuid() try: return pwd.getpwuid(uid)[0] except KeyError: return str(uid) def groupname(gid=None): """Return the name of the group with the given gid. If gid is None, return the name of the current group.""" if gid is None: gid = os.getgid() try: return grp.getgrgid(gid)[0] except KeyError: return str(gid) def groupmembers(name): """Return the list of members of the group with the given name, KeyError if the group does not exist. """ return list(grp.getgrnam(name).gr_mem) def spawndetached(args): return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0), args[0], args) def gethgcmd(): return sys.argv[:1] def termwidth(): try: import array import termios for dev in (sys.stderr, sys.stdout, sys.stdin): try: try: fd = dev.fileno() except AttributeError: continue if not os.isatty(fd): continue try: arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8) width = array.array('h', arri)[1] if width > 0: return width except AttributeError: pass except ValueError: pass except IOError as e: if e[0] == errno.EINVAL: pass else: raise except ImportError: pass return 80 def makedir(path, notindexed): os.mkdir(path) def unlinkpath(f, ignoremissing=False): """unlink and remove the directory if it is empty""" try: os.unlink(f) except OSError as e: if not (ignoremissing and e.errno == errno.ENOENT): raise # try removing directories that might now be empty try: os.removedirs(os.path.dirname(f)) except OSError: pass def lookupreg(key, name=None, scope=None): return None def hidewindow(): """Hide current shell window. Used to hide the window opened when starting asynchronous child process under Windows, unneeded on other systems. """ pass class cachestat(object): def __init__(self, path): self.stat = os.stat(path) def cacheable(self): return bool(self.stat.st_ino) __hash__ = object.__hash__ def __eq__(self, other): try: # Only dev, ino, size, mtime and atime are likely to change. Out # of these, we shouldn't compare atime but should compare the # rest. However, one of the other fields changing indicates # something fishy going on, so return False if anything but atime # changes. return (self.stat.st_mode == other.stat.st_mode and self.stat.st_ino == other.stat.st_ino and self.stat.st_dev == other.stat.st_dev and self.stat.st_nlink == other.stat.st_nlink and self.stat.st_uid == other.stat.st_uid and self.stat.st_gid == other.stat.st_gid and self.stat.st_size == other.stat.st_size and self.stat.st_mtime == other.stat.st_mtime and self.stat.st_ctime == other.stat.st_ctime) except AttributeError: return False def __ne__(self, other): return not self == other def executablepath(): return None # available on Windows only def statislink(st): '''check whether a stat result is a symlink''' return st and stat.S_ISLNK(st.st_mode) def statisexec(st): '''check whether a stat result is an executable file''' return st and (st.st_mode & 0o100 != 0) def poll(fds): """block until something happens on any file descriptor This is a generic helper that will check for any activity (read, write. exception) and return the list of touched files. In unsupported cases, it will raise a NotImplementedError""" try: res = select.select(fds, fds, fds) except ValueError: # out of range file descriptor raise NotImplementedError() return sorted(list(set(sum(res, [])))) def readpipe(pipe): """Read all available data from a pipe.""" # We can't fstat() a pipe because Linux will always report 0. # So, we set the pipe to non-blocking mode and read everything # that's available. flags = fcntl.fcntl(pipe, fcntl.F_GETFL) flags |= os.O_NONBLOCK oldflags = fcntl.fcntl(pipe, fcntl.F_SETFL, flags) try: chunks = [] while True: try: s = pipe.read() if not s: break chunks.append(s) except IOError: break return ''.join(chunks) finally: fcntl.fcntl(pipe, fcntl.F_SETFL, oldflags) mercurial-3.7.3/mercurial/config.py0000644000175000017500000001413412676531524016733 0ustar mpmmpm00000000000000# config.py - configuration parsing for Mercurial # # Copyright 2009 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import os from .i18n import _ from . import ( error, util, ) class config(object): def __init__(self, data=None, includepaths=[]): self._data = {} self._source = {} self._unset = [] self._includepaths = includepaths if data: for k in data._data: self._data[k] = data[k].copy() self._source = data._source.copy() def copy(self): return config(self) def __contains__(self, section): return section in self._data def hasitem(self, section, item): return item in self._data.get(section, {}) def __getitem__(self, section): return self._data.get(section, {}) def __iter__(self): for d in self.sections(): yield d def update(self, src): for s, n in src._unset: if s in self and n in self._data[s]: del self._data[s][n] del self._source[(s, n)] for s in src: if s not in self: self._data[s] = util.sortdict() self._data[s].update(src._data[s]) self._source.update(src._source) def get(self, section, item, default=None): return self._data.get(section, {}).get(item, default) def backup(self, section, item): """return a tuple allowing restore to reinstall a previous value The main reason we need it is because it handles the "no data" case. """ try: value = self._data[section][item] source = self.source(section, item) return (section, item, value, source) except KeyError: return (section, item) def source(self, section, item): return self._source.get((section, item), "") def sections(self): return sorted(self._data.keys()) def items(self, section): return self._data.get(section, {}).items() def set(self, section, item, value, source=""): if section not in self: self._data[section] = util.sortdict() self._data[section][item] = value if source: self._source[(section, item)] = source def restore(self, data): """restore data returned by self.backup""" if len(data) == 4: # restore old data section, item, value, source = data self._data[section][item] = value self._source[(section, item)] = source else: # no data before, remove everything section, item = data if section in self._data: self._data[section].pop(item, None) self._source.pop((section, item), None) def parse(self, src, data, sections=None, remap=None, include=None): sectionre = util.re.compile(r'\[([^\[]+)\]') itemre = util.re.compile(r'([^=\s][^=]*?)\s*=\s*(.*\S|)') contre = util.re.compile(r'\s+(\S|\S.*\S)\s*$') emptyre = util.re.compile(r'(;|#|\s*$)') commentre = util.re.compile(r'(;|#)') unsetre = util.re.compile(r'%unset\s+(\S+)') includere = util.re.compile(r'%include\s+(\S|\S.*\S)\s*$') section = "" item = None line = 0 cont = False for l in data.splitlines(True): line += 1 if line == 1 and l.startswith('\xef\xbb\xbf'): # Someone set us up the BOM l = l[3:] if cont: if commentre.match(l): continue m = contre.match(l) if m: if sections and section not in sections: continue v = self.get(section, item) + "\n" + m.group(1) self.set(section, item, v, "%s:%d" % (src, line)) continue item = None cont = False m = includere.match(l) if m and include: expanded = util.expandpath(m.group(1)) includepaths = [os.path.dirname(src)] + self._includepaths for base in includepaths: inc = os.path.normpath(os.path.join(base, expanded)) try: include(inc, remap=remap, sections=sections) break except IOError as inst: if inst.errno != errno.ENOENT: raise error.ParseError(_("cannot include %s (%s)") % (inc, inst.strerror), "%s:%s" % (src, line)) continue if emptyre.match(l): continue m = sectionre.match(l) if m: section = m.group(1) if remap: section = remap.get(section, section) if section not in self: self._data[section] = util.sortdict() continue m = itemre.match(l) if m: item = m.group(1) cont = True if sections and section not in sections: continue self.set(section, item, m.group(2), "%s:%d" % (src, line)) continue m = unsetre.match(l) if m: name = m.group(1) if sections and section not in sections: continue if self.get(section, name) is not None: del self._data[section][name] self._unset.append((section, name)) continue raise error.ParseError(l.rstrip(), ("%s:%s" % (src, line))) def read(self, path, fp=None, sections=None, remap=None): if not fp: fp = util.posixfile(path) self.parse(path, fp.read(), sections, remap, self.read) mercurial-3.7.3/mercurial/demandimport.py0000644000175000017500000002271212676531525020153 0ustar mpmmpm00000000000000# demandimport.py - global demand-loading of modules for Mercurial # # Copyright 2006, 2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. ''' demandimport - automatic demandloading of modules To enable this module, do: import demandimport; demandimport.enable() Imports of the following forms will be demand-loaded: import a, b.c import a.b as c from a import b,c # a will be loaded immediately These imports will not be delayed: from a import * b = __import__(a) ''' from __future__ import absolute_import import contextlib import os import sys # __builtin__ in Python 2, builtins in Python 3. try: import __builtin__ as builtins except ImportError: import builtins contextmanager = contextlib.contextmanager _origimport = __import__ nothing = object() # Python 3 doesn't have relative imports nor level -1. level = -1 if sys.version_info[0] >= 3: level = 0 _import = _origimport def _hgextimport(importfunc, name, globals, *args, **kwargs): try: return importfunc(name, globals, *args, **kwargs) except ImportError: if not globals: raise # extensions are loaded with "hgext_" prefix hgextname = 'hgext_%s' % name nameroot = hgextname.split('.', 1)[0] contextroot = globals.get('__name__', '').split('.', 1)[0] if nameroot != contextroot: raise # retry to import with "hgext_" prefix return importfunc(hgextname, globals, *args, **kwargs) class _demandmod(object): """module demand-loader and proxy""" def __init__(self, name, globals, locals, level=level): if '.' in name: head, rest = name.split('.', 1) after = [rest] else: head = name after = [] object.__setattr__(self, "_data", (head, globals, locals, after, level, set())) object.__setattr__(self, "_module", None) def _extend(self, name): """add to the list of submodules to load""" self._data[3].append(name) def _addref(self, name): """Record that the named module ``name`` imports this module. References to this proxy class having the name of this module will be replaced at module load time. We assume the symbol inside the importing module is identical to the "head" name of this module. We don't actually know if "as X" syntax is being used to change the symbol name because this information isn't exposed to __import__. """ self._data[5].add(name) def _load(self): if not self._module: head, globals, locals, after, level, modrefs = self._data mod = _hgextimport(_import, head, globals, locals, None, level) # load submodules def subload(mod, p): h, t = p, None if '.' in p: h, t = p.split('.', 1) if getattr(mod, h, nothing) is nothing: setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__)) elif t: subload(getattr(mod, h), t) for x in after: subload(mod, x) # Replace references to this proxy instance with the actual module. if locals and locals.get(head) == self: locals[head] = mod for modname in modrefs: modref = sys.modules.get(modname, None) if modref and getattr(modref, head, None) == self: setattr(modref, head, mod) object.__setattr__(self, "_module", mod) def __repr__(self): if self._module: return "" % self._data[0] return "" % self._data[0] def __call__(self, *args, **kwargs): raise TypeError("%s object is not callable" % repr(self)) def __getattribute__(self, attr): if attr in ('_data', '_extend', '_load', '_module', '_addref'): return object.__getattribute__(self, attr) self._load() return getattr(self._module, attr) def __setattr__(self, attr, val): self._load() setattr(self._module, attr, val) _pypy = '__pypy__' in sys.builtin_module_names def _demandimport(name, globals=None, locals=None, fromlist=None, level=level): if not locals or name in ignore or fromlist == ('*',): # these cases we can't really delay return _hgextimport(_import, name, globals, locals, fromlist, level) elif not fromlist: # import a [as b] if '.' in name: # a.b base, rest = name.split('.', 1) # email.__init__ loading email.mime if globals and globals.get('__name__', None) == base: return _import(name, globals, locals, fromlist, level) # if a is already demand-loaded, add b to its submodule list if base in locals: if isinstance(locals[base], _demandmod): locals[base]._extend(rest) return locals[base] return _demandmod(name, globals, locals, level) else: # There is a fromlist. # from a import b,c,d # from . import b,c,d # from .a import b,c,d # level == -1: relative and absolute attempted (Python 2 only). # level >= 0: absolute only (Python 2 w/ absolute_import and Python 3). # The modern Mercurial convention is to use absolute_import everywhere, # so modern Mercurial code will have level >= 0. # The name of the module the import statement is located in. globalname = globals.get('__name__') def processfromitem(mod, attr): """Process an imported symbol in the import statement. If the symbol doesn't exist in the parent module, it must be a module. We set missing modules up as _demandmod instances. """ symbol = getattr(mod, attr, nothing) if symbol is nothing: symbol = _demandmod(attr, mod.__dict__, locals, level=1) setattr(mod, attr, symbol) # Record the importing module references this symbol so we can # replace the symbol with the actual module instance at load # time. if globalname and isinstance(symbol, _demandmod): symbol._addref(globalname) if level >= 0: # The "from a import b,c,d" or "from .a import b,c,d" # syntax gives errors with some modules for unknown # reasons. Work around the problem. if name: return _hgextimport(_origimport, name, globals, locals, fromlist, level) if _pypy: # PyPy's __import__ throws an exception if invoked # with an empty name and no fromlist. Recreate the # desired behaviour by hand. mn = globalname mod = sys.modules[mn] if getattr(mod, '__path__', nothing) is nothing: mn = mn.rsplit('.', 1)[0] mod = sys.modules[mn] if level > 1: mn = mn.rsplit('.', level - 1)[0] mod = sys.modules[mn] else: mod = _hgextimport(_origimport, name, globals, locals, level=level) for x in fromlist: processfromitem(mod, x) return mod # But, we still need to support lazy loading of standard library and 3rd # party modules. So handle level == -1. mod = _hgextimport(_origimport, name, globals, locals) # recurse down the module chain for comp in name.split('.')[1:]: if getattr(mod, comp, nothing) is nothing: setattr(mod, comp, _demandmod(comp, mod.__dict__, mod.__dict__)) mod = getattr(mod, comp) for x in fromlist: processfromitem(mod, x) return mod ignore = [ '__future__', '_hashlib', # ImportError during pkg_resources/__init__.py:fixup_namespace_package '_imp', '_xmlplus', 'fcntl', 'win32com.gen_py', '_winreg', # 2.7 mimetypes needs immediate ImportError 'pythoncom', # imported by tarfile, not available under Windows 'pwd', 'grp', # imported by profile, itself imported by hotshot.stats, # not available under Windows 'resource', # this trips up many extension authors 'gtk', # setuptools' pkg_resources.py expects "from __main__ import x" to # raise ImportError if x not defined '__main__', '_ssl', # conditional imports in the stdlib, issue1964 '_sre', # issue4920 'rfc822', 'mimetools', # setuptools 8 expects this module to explode early when not on windows 'distutils.msvc9compiler' ] def isenabled(): return builtins.__import__ == _demandimport def enable(): "enable global demand-loading of modules" if os.environ.get('HGDEMANDIMPORT') != 'disable': builtins.__import__ = _demandimport def disable(): "disable global demand-loading of modules" builtins.__import__ = _origimport @contextmanager def deactivated(): "context manager for disabling demandimport in 'with' blocks" demandenabled = isenabled() if demandenabled: disable() try: yield finally: if demandenabled: enable() mercurial-3.7.3/mercurial/progress.py0000644000175000017500000002225712676531525017340 0ustar mpmmpm00000000000000# progress.py progress bars related code # # Copyright (C) 2010 Augie Fackler # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import sys import threading import time from .i18n import _ from . import encoding def spacejoin(*args): return ' '.join(s for s in args if s) def shouldprint(ui): return not (ui.quiet or ui.plain()) and ( ui._isatty(sys.stderr) or ui.configbool('progress', 'assume-tty')) def fmtremaining(seconds): """format a number of remaining seconds in human readable way This will properly display seconds, minutes, hours, days if needed""" if seconds < 60: # i18n: format XX seconds as "XXs" return _("%02ds") % (seconds) minutes = seconds // 60 if minutes < 60: seconds -= minutes * 60 # i18n: format X minutes and YY seconds as "XmYYs" return _("%dm%02ds") % (minutes, seconds) # we're going to ignore seconds in this case minutes += 1 hours = minutes // 60 minutes -= hours * 60 if hours < 30: # i18n: format X hours and YY minutes as "XhYYm" return _("%dh%02dm") % (hours, minutes) # we're going to ignore minutes in this case hours += 1 days = hours // 24 hours -= days * 24 if days < 15: # i18n: format X days and YY hours as "XdYYh" return _("%dd%02dh") % (days, hours) # we're going to ignore hours in this case days += 1 weeks = days // 7 days -= weeks * 7 if weeks < 55: # i18n: format X weeks and YY days as "XwYYd" return _("%dw%02dd") % (weeks, days) # we're going to ignore days and treat a year as 52 weeks weeks += 1 years = weeks // 52 weeks -= years * 52 # i18n: format X years and YY weeks as "XyYYw" return _("%dy%02dw") % (years, weeks) class progbar(object): def __init__(self, ui): self.ui = ui self._refreshlock = threading.Lock() self.resetstate() def resetstate(self): self.topics = [] self.topicstates = {} self.starttimes = {} self.startvals = {} self.printed = False self.lastprint = time.time() + float(self.ui.config( 'progress', 'delay', default=3)) self.curtopic = None self.lasttopic = None self.indetcount = 0 self.refresh = float(self.ui.config( 'progress', 'refresh', default=0.1)) self.changedelay = max(3 * self.refresh, float(self.ui.config( 'progress', 'changedelay', default=1))) self.order = self.ui.configlist( 'progress', 'format', default=['topic', 'bar', 'number', 'estimate']) def show(self, now, topic, pos, item, unit, total): if not shouldprint(self.ui): return termwidth = self.width() self.printed = True head = '' needprogress = False tail = '' for indicator in self.order: add = '' if indicator == 'topic': add = topic elif indicator == 'number': if total: add = ('% ' + str(len(str(total))) + 's/%s') % (pos, total) else: add = str(pos) elif indicator.startswith('item') and item: slice = 'end' if '-' in indicator: wid = int(indicator.split('-')[1]) elif '+' in indicator: slice = 'beginning' wid = int(indicator.split('+')[1]) else: wid = 20 if slice == 'end': add = encoding.trim(item, wid, leftside=True) else: add = encoding.trim(item, wid) add += (wid - encoding.colwidth(add)) * ' ' elif indicator == 'bar': add = '' needprogress = True elif indicator == 'unit' and unit: add = unit elif indicator == 'estimate': add = self.estimate(topic, pos, total, now) elif indicator == 'speed': add = self.speed(topic, pos, unit, now) if not needprogress: head = spacejoin(head, add) else: tail = spacejoin(tail, add) if needprogress: used = 0 if head: used += encoding.colwidth(head) + 1 if tail: used += encoding.colwidth(tail) + 1 progwidth = termwidth - used - 3 if total and pos <= total: amt = pos * progwidth // total bar = '=' * (amt - 1) if amt > 0: bar += '>' bar += ' ' * (progwidth - amt) else: progwidth -= 3 self.indetcount += 1 # mod the count by twice the width so we can make the # cursor bounce between the right and left sides amt = self.indetcount % (2 * progwidth) amt -= progwidth bar = (' ' * int(progwidth - abs(amt)) + '<=>' + ' ' * int(abs(amt))) prog = ''.join(('[', bar , ']')) out = spacejoin(head, prog, tail) else: out = spacejoin(head, tail) sys.stderr.write('\r' + encoding.trim(out, termwidth)) self.lasttopic = topic sys.stderr.flush() def clear(self): if not shouldprint(self.ui): return sys.stderr.write('\r%s\r' % (' ' * self.width())) if self.printed: # force immediate re-paint of progress bar self.lastprint = 0 def complete(self): if not shouldprint(self.ui): return if self.ui.configbool('progress', 'clear-complete', default=True): self.clear() else: sys.stderr.write('\n') sys.stderr.flush() def width(self): tw = self.ui.termwidth() return min(int(self.ui.config('progress', 'width', default=tw)), tw) def estimate(self, topic, pos, total, now): if total is None: return '' initialpos = self.startvals[topic] target = total - initialpos delta = pos - initialpos if delta > 0: elapsed = now - self.starttimes[topic] # experimental config: progress.estimate if elapsed > float( self.ui.config('progress', 'estimate', default=2)): seconds = (elapsed * (target - delta)) // delta + 1 return fmtremaining(seconds) return '' def speed(self, topic, pos, unit, now): initialpos = self.startvals[topic] delta = pos - initialpos elapsed = now - self.starttimes[topic] if elapsed > float( self.ui.config('progress', 'estimate', default=2)): return _('%d %s/sec') % (delta / elapsed, unit) return '' def _oktoprint(self, now): '''Check if conditions are met to print - e.g. changedelay elapsed''' if (self.lasttopic is None # first time we printed # not a topic change or self.curtopic == self.lasttopic # it's been long enough we should print anyway or now - self.lastprint >= self.changedelay): return True else: return False def progress(self, topic, pos, item='', unit='', total=None): now = time.time() self._refreshlock.acquire() try: if pos is None: self.starttimes.pop(topic, None) self.startvals.pop(topic, None) self.topicstates.pop(topic, None) # reset the progress bar if this is the outermost topic if self.topics and self.topics[0] == topic and self.printed: self.complete() self.resetstate() # truncate the list of topics assuming all topics within # this one are also closed if topic in self.topics: self.topics = self.topics[:self.topics.index(topic)] # reset the last topic to the one we just unwound to, # so that higher-level topics will be stickier than # lower-level topics if self.topics: self.lasttopic = self.topics[-1] else: self.lasttopic = None else: if topic not in self.topics: self.starttimes[topic] = now self.startvals[topic] = pos self.topics.append(topic) self.topicstates[topic] = pos, item, unit, total self.curtopic = topic if now - self.lastprint >= self.refresh and self.topics: if self._oktoprint(now): self.lastprint = now self.show(now, topic, *self.topicstates[topic]) finally: self._refreshlock.release() mercurial-3.7.3/mercurial/keepalive.py0000644000175000017500000006225212676531524017437 0ustar mpmmpm00000000000000# This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, see # . # This file is part of urlgrabber, a high-level cross-protocol url-grabber # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko # Modified by Benoit Boissinot: # - fix for digest auth (inspired from urllib2.py @ Python v2.4) # Modified by Dirkjan Ochtman: # - import md5 function from a local util module # Modified by Augie Fackler: # - add safesend method and use it to prevent broken pipe errors # on large POST requests """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive. >>> import urllib2 >>> from keepalive import HTTPHandler >>> keepalive_handler = HTTPHandler() >>> opener = urllib2.build_opener(keepalive_handler) >>> urllib2.install_opener(opener) >>> >>> fo = urllib2.urlopen('http://www.python.org') If a connection to a given host is requested, and all of the existing connections are still in use, another connection will be opened. If the handler tries to use an existing connection but it fails in some way, it will be closed and removed from the pool. To remove the handler, simply re-run build_opener with no arguments, and install that opener. You can explicitly close connections by using the close_connection() method of the returned file-like object (described below) or you can use the handler methods: close_connection(host) close_all() open_connections() NOTE: using the close_connection and close_all methods of the handler should be done with care when using multiple threads. * there is nothing that prevents another thread from creating new connections immediately after connections are closed * no checks are done to prevent in-use connections from being closed >>> keepalive_handler.close_all() EXTRA ATTRIBUTES AND METHODS Upon a status of 200, the object returned has a few additional attributes and methods, which should not be used if you want to remain consistent with the normal urllib2-returned objects: close_connection() - close the connection to the host readlines() - you know, readlines() status - the return status (i.e. 404) reason - english translation of status (i.e. 'File not found') If you want the best of both worlds, use this inside an AttributeError-catching try: >>> try: status = fo.status >>> except AttributeError: status = None Unfortunately, these are ONLY there if status == 200, so it's not easy to distinguish between non-200 responses. The reason is that urllib2 tries to do clever things with error codes 301, 302, 401, and 407, and it wraps the object upon return. For python versions earlier than 2.4, you can avoid this fancy error handling by setting the module-level global HANDLE_ERRORS to zero. You see, prior to 2.4, it's the HTTP Handler's job to determine what to handle specially, and what to just pass up. HANDLE_ERRORS == 0 means "pass everything up". In python 2.4, however, this job no longer belongs to the HTTP Handler and is now done by a NEW handler, HTTPErrorProcessor. Here's the bottom line: python version < 2.4 HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as errors HANDLE_ERRORS == 0 pass everything up, error processing is left to the calling code python version >= 2.4 HANDLE_ERRORS == 1 pass up 200, treat the rest as errors HANDLE_ERRORS == 0 (default) pass everything up, let the other handlers (specifically, HTTPErrorProcessor) decide what to do In practice, setting the variable either way makes little difference in python 2.4, so for the most consistent behavior across versions, you probably just want to use the defaults, which will give you exceptions on errors. """ # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $ from __future__ import absolute_import, print_function import errno import httplib import socket import sys import thread import urllib2 DEBUG = None if sys.version_info < (2, 4): HANDLE_ERRORS = 1 else: HANDLE_ERRORS = 0 class ConnectionManager(object): """ The connection manager must be able to: * keep track of all existing """ def __init__(self): self._lock = thread.allocate_lock() self._hostmap = {} # map hosts to a list of connections self._connmap = {} # map connections to host self._readymap = {} # map connection to ready state def add(self, host, connection, ready): self._lock.acquire() try: if host not in self._hostmap: self._hostmap[host] = [] self._hostmap[host].append(connection) self._connmap[connection] = host self._readymap[connection] = ready finally: self._lock.release() def remove(self, connection): self._lock.acquire() try: try: host = self._connmap[connection] except KeyError: pass else: del self._connmap[connection] del self._readymap[connection] self._hostmap[host].remove(connection) if not self._hostmap[host]: del self._hostmap[host] finally: self._lock.release() def set_ready(self, connection, ready): try: self._readymap[connection] = ready except KeyError: pass def get_ready_conn(self, host): conn = None self._lock.acquire() try: if host in self._hostmap: for c in self._hostmap[host]: if self._readymap[c]: self._readymap[c] = 0 conn = c break finally: self._lock.release() return conn def get_all(self, host=None): if host: return list(self._hostmap.get(host, [])) else: return dict(self._hostmap) class KeepAliveHandler(object): def __init__(self): self._cm = ConnectionManager() #### Connection Management def open_connections(self): """return a list of connected hosts and the number of connections to each. [('foo.com:80', 2), ('bar.org', 1)]""" return [(host, len(li)) for (host, li) in self._cm.get_all().items()] def close_connection(self, host): """close connection(s) to host is the host:port spec, as in 'www.cnn.com:8080' as passed in. no error occurs if there is no connection to that host.""" for h in self._cm.get_all(host): self._cm.remove(h) h.close() def close_all(self): """close all open connections""" for host, conns in self._cm.get_all().iteritems(): for h in conns: self._cm.remove(h) h.close() def _request_closed(self, request, host, connection): """tells us that this request is now closed and that the connection is ready for another request""" self._cm.set_ready(connection, 1) def _remove_connection(self, host, connection, close=0): if close: connection.close() self._cm.remove(connection) #### Transaction Execution def http_open(self, req): return self.do_open(HTTPConnection, req) def do_open(self, http_class, req): host = req.get_host() if not host: raise urllib2.URLError('no host given') try: h = self._cm.get_ready_conn(host) while h: r = self._reuse_connection(h, req, host) # if this response is non-None, then it worked and we're # done. Break out, skipping the else block. if r: break # connection is bad - possibly closed by server # discard it and ask for the next free connection h.close() self._cm.remove(h) h = self._cm.get_ready_conn(host) else: # no (working) free connections were found. Create a new one. h = http_class(host) if DEBUG: DEBUG.info("creating new connection to %s (%d)", host, id(h)) self._cm.add(host, h, 0) self._start_transaction(h, req) r = h.getresponse() except (socket.error, httplib.HTTPException) as err: raise urllib2.URLError(err) # if not a persistent connection, don't try to reuse it if r.will_close: self._cm.remove(h) if DEBUG: DEBUG.info("STATUS: %s, %s", r.status, r.reason) r._handler = self r._host = host r._url = req.get_full_url() r._connection = h r.code = r.status r.headers = r.msg r.msg = r.reason if r.status == 200 or not HANDLE_ERRORS: return r else: return self.parent.error('http', req, r, r.status, r.msg, r.headers) def _reuse_connection(self, h, req, host): """start the transaction with a re-used connection return a response object (r) upon success or None on failure. This DOES not close or remove bad connections in cases where it returns. However, if an unexpected exception occurs, it will close and remove the connection before re-raising. """ try: self._start_transaction(h, req) r = h.getresponse() # note: just because we got something back doesn't mean it # worked. We'll check the version below, too. except (socket.error, httplib.HTTPException): r = None except: # re-raises # adding this block just in case we've missed # something we will still raise the exception, but # lets try and close the connection and remove it # first. We previously got into a nasty loop # where an exception was uncaught, and so the # connection stayed open. On the next try, the # same exception was raised, etc. The trade-off is # that it's now possible this call will raise # a DIFFERENT exception if DEBUG: DEBUG.error("unexpected exception - closing " "connection to %s (%d)", host, id(h)) self._cm.remove(h) h.close() raise if r is None or r.version == 9: # httplib falls back to assuming HTTP 0.9 if it gets a # bad header back. This is most likely to happen if # the socket has been closed by the server since we # last used the connection. if DEBUG: DEBUG.info("failed to re-use connection to %s (%d)", host, id(h)) r = None else: if DEBUG: DEBUG.info("re-using connection to %s (%d)", host, id(h)) return r def _start_transaction(self, h, req): # What follows mostly reimplements HTTPConnection.request() # except it adds self.parent.addheaders in the mix. headers = req.headers.copy() if sys.version_info >= (2, 4): headers.update(req.unredirected_hdrs) headers.update(self.parent.addheaders) headers = dict((n.lower(), v) for n, v in headers.items()) skipheaders = {} for n in ('host', 'accept-encoding'): if n in headers: skipheaders['skip_' + n.replace('-', '_')] = 1 try: if req.has_data(): data = req.get_data() h.putrequest('POST', req.get_selector(), **skipheaders) if 'content-type' not in headers: h.putheader('Content-type', 'application/x-www-form-urlencoded') if 'content-length' not in headers: h.putheader('Content-length', '%d' % len(data)) else: h.putrequest('GET', req.get_selector(), **skipheaders) except (socket.error) as err: raise urllib2.URLError(err) for k, v in headers.items(): h.putheader(k, v) h.endheaders() if req.has_data(): h.send(data) class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler): pass class HTTPResponse(httplib.HTTPResponse): # we need to subclass HTTPResponse in order to # 1) add readline() and readlines() methods # 2) add close_connection() methods # 3) add info() and geturl() methods # in order to add readline(), read must be modified to deal with a # buffer. example: readline must read a buffer and then spit back # one line at a time. The only real alternative is to read one # BYTE at a time (ick). Once something has been read, it can't be # put back (ok, maybe it can, but that's even uglier than this), # so if you THEN do a normal read, you must first take stuff from # the buffer. # the read method wraps the original to accommodate buffering, # although read() never adds to the buffer. # Both readline and readlines have been stolen with almost no # modification from socket.py def __init__(self, sock, debuglevel=0, strict=0, method=None): httplib.HTTPResponse.__init__(self, sock, debuglevel, method) self.fileno = sock.fileno self.code = None self._rbuf = '' self._rbufsize = 8096 self._handler = None # inserted by the handler later self._host = None # (same) self._url = None # (same) self._connection = None # (same) _raw_read = httplib.HTTPResponse.read def close(self): if self.fp: self.fp.close() self.fp = None if self._handler: self._handler._request_closed(self, self._host, self._connection) def close_connection(self): self._handler._remove_connection(self._host, self._connection, close=1) self.close() def info(self): return self.headers def geturl(self): return self._url def read(self, amt=None): # the _rbuf test is only in this first if for speed. It's not # logically necessary if self._rbuf and not amt is None: L = len(self._rbuf) if amt > L: amt -= L else: s = self._rbuf[:amt] self._rbuf = self._rbuf[amt:] return s s = self._rbuf + self._raw_read(amt) self._rbuf = '' return s # stolen from Python SVN #68532 to fix issue1088 def _read_chunked(self, amt): chunk_left = self.chunk_left value = '' # XXX This accumulates chunks by repeated string concatenation, # which is not efficient as the number or size of chunks gets big. while True: if chunk_left is None: line = self.fp.readline() i = line.find(';') if i >= 0: line = line[:i] # strip chunk-extensions try: chunk_left = int(line, 16) except ValueError: # close the connection as protocol synchronization is # probably lost self.close() raise httplib.IncompleteRead(value) if chunk_left == 0: break if amt is None: value += self._safe_read(chunk_left) elif amt < chunk_left: value += self._safe_read(amt) self.chunk_left = chunk_left - amt return value elif amt == chunk_left: value += self._safe_read(amt) self._safe_read(2) # toss the CRLF at the end of the chunk self.chunk_left = None return value else: value += self._safe_read(chunk_left) amt -= chunk_left # we read the whole chunk, get another self._safe_read(2) # toss the CRLF at the end of the chunk chunk_left = None # read and discard trailer up to the CRLF terminator ### note: we shouldn't have any trailers! while True: line = self.fp.readline() if not line: # a vanishingly small number of sites EOF without # sending the trailer break if line == '\r\n': break # we read everything; close the "file" self.close() return value def readline(self, limit=-1): i = self._rbuf.find('\n') while i < 0 and not (0 < limit <= len(self._rbuf)): new = self._raw_read(self._rbufsize) if not new: break i = new.find('\n') if i >= 0: i = i + len(self._rbuf) self._rbuf = self._rbuf + new if i < 0: i = len(self._rbuf) else: i = i + 1 if 0 <= limit < len(self._rbuf): i = limit data, self._rbuf = self._rbuf[:i], self._rbuf[i:] return data def readlines(self, sizehint=0): total = 0 list = [] while True: line = self.readline() if not line: break list.append(line) total += len(line) if sizehint and total >= sizehint: break return list def safesend(self, str): """Send `str' to the server. Shamelessly ripped off from httplib to patch a bad behavior. """ # _broken_pipe_resp is an attribute we set in this function # if the socket is closed while we're sending data but # the server sent us a response before hanging up. # In that case, we want to pretend to send the rest of the # outgoing data, and then let the user use getresponse() # (which we wrap) to get this last response before # opening a new socket. if getattr(self, '_broken_pipe_resp', None) is not None: return if self.sock is None: if self.auto_open: self.connect() else: raise httplib.NotConnected # send the data to the server. if we get a broken pipe, then close # the socket. we want to reconnect when somebody tries to send again. # # NOTE: we DO propagate the error, though, because we cannot simply # ignore the error... the caller will know if they can retry. if self.debuglevel > 0: print("send:", repr(str)) try: blocksize = 8192 read = getattr(str, 'read', None) if read is not None: if self.debuglevel > 0: print("sending a read()able") data = read(blocksize) while data: self.sock.sendall(data) data = read(blocksize) else: self.sock.sendall(str) except socket.error as v: reraise = True if v[0] == errno.EPIPE: # Broken pipe if self._HTTPConnection__state == httplib._CS_REQ_SENT: self._broken_pipe_resp = None self._broken_pipe_resp = self.getresponse() reraise = False self.close() if reraise: raise def wrapgetresponse(cls): """Wraps getresponse in cls with a broken-pipe sane version. """ def safegetresponse(self): # In safesend() we might set the _broken_pipe_resp # attribute, in which case the socket has already # been closed and we just need to give them the response # back. Otherwise, we use the normal response path. r = getattr(self, '_broken_pipe_resp', None) if r is not None: return r return cls.getresponse(self) safegetresponse.__doc__ = cls.getresponse.__doc__ return safegetresponse class HTTPConnection(httplib.HTTPConnection): # use the modified response class response_class = HTTPResponse send = safesend getresponse = wrapgetresponse(httplib.HTTPConnection) ######################################################################### ##### TEST FUNCTIONS ######################################################################### def error_handler(url): global HANDLE_ERRORS orig = HANDLE_ERRORS keepalive_handler = HTTPHandler() opener = urllib2.build_opener(keepalive_handler) urllib2.install_opener(opener) pos = {0: 'off', 1: 'on'} for i in (0, 1): print(" fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)) HANDLE_ERRORS = i try: fo = urllib2.urlopen(url) fo.read() fo.close() try: status, reason = fo.status, fo.reason except AttributeError: status, reason = None, None except IOError as e: print(" EXCEPTION: %s" % e) raise else: print(" status = %s, reason = %s" % (status, reason)) HANDLE_ERRORS = orig hosts = keepalive_handler.open_connections() print("open connections:", hosts) keepalive_handler.close_all() def continuity(url): from . import util md5 = util.md5 format = '%25s: %s' # first fetch the file with the normal http handler opener = urllib2.build_opener() urllib2.install_opener(opener) fo = urllib2.urlopen(url) foo = fo.read() fo.close() m = md5(foo) print(format % ('normal urllib', m.hexdigest())) # now install the keepalive handler and try again opener = urllib2.build_opener(HTTPHandler()) urllib2.install_opener(opener) fo = urllib2.urlopen(url) foo = fo.read() fo.close() m = md5(foo) print(format % ('keepalive read', m.hexdigest())) fo = urllib2.urlopen(url) foo = '' while True: f = fo.readline() if f: foo = foo + f else: break fo.close() m = md5(foo) print(format % ('keepalive readline', m.hexdigest())) def comp(N, url): print(' making %i connections to:\n %s' % (N, url)) sys.stdout.write(' first using the normal urllib handlers') # first use normal opener opener = urllib2.build_opener() urllib2.install_opener(opener) t1 = fetch(N, url) print(' TIME: %.3f s' % t1) sys.stdout.write(' now using the keepalive handler ') # now install the keepalive handler and try again opener = urllib2.build_opener(HTTPHandler()) urllib2.install_opener(opener) t2 = fetch(N, url) print(' TIME: %.3f s' % t2) print(' improvement factor: %.2f' % (t1 / t2)) def fetch(N, url, delay=0): import time lens = [] starttime = time.time() for i in range(N): if delay and i > 0: time.sleep(delay) fo = urllib2.urlopen(url) foo = fo.read() fo.close() lens.append(len(foo)) diff = time.time() - starttime j = 0 for i in lens[1:]: j = j + 1 if not i == lens[0]: print("WARNING: inconsistent length on read %i: %i" % (j, i)) return diff def test_timeout(url): global DEBUG dbbackup = DEBUG class FakeLogger(object): def debug(self, msg, *args): print(msg % args) info = warning = error = debug DEBUG = FakeLogger() print(" fetching the file to establish a connection") fo = urllib2.urlopen(url) data1 = fo.read() fo.close() i = 20 print(" waiting %i seconds for the server to close the connection" % i) while i > 0: sys.stdout.write('\r %2i' % i) sys.stdout.flush() time.sleep(1) i -= 1 sys.stderr.write('\r') print(" fetching the file a second time") fo = urllib2.urlopen(url) data2 = fo.read() fo.close() if data1 == data2: print(' data are identical') else: print(' ERROR: DATA DIFFER') DEBUG = dbbackup def test(url, N=10): print("checking error handler (do this on a non-200)") try: error_handler(url) except IOError: print("exiting - exception will prevent further tests") sys.exit() print('') print("performing continuity test (making sure stuff isn't corrupted)") continuity(url) print('') print("performing speed comparison") comp(N, url) print('') print("performing dropped-connection check") test_timeout(url) if __name__ == '__main__': import time try: N = int(sys.argv[1]) url = sys.argv[2] except (IndexError, ValueError): print("%s " % sys.argv[0]) else: test(url, N) mercurial-3.7.3/mercurial/fileset.py0000644000175000017500000004226412676531525017127 0ustar mpmmpm00000000000000# fileset.py - file set queries for mercurial # # Copyright 2010 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import re from .i18n import _ from . import ( error, merge, parser, util, ) elements = { # token-type: binding-strength, primary, prefix, infix, suffix "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None), "-": (5, None, ("negate", 19), ("minus", 5), None), "not": (10, None, ("not", 10), None, None), "!": (10, None, ("not", 10), None, None), "and": (5, None, None, ("and", 5), None), "&": (5, None, None, ("and", 5), None), "or": (4, None, None, ("or", 4), None), "|": (4, None, None, ("or", 4), None), "+": (4, None, None, ("or", 4), None), ",": (2, None, None, ("list", 2), None), ")": (0, None, None, None, None), "symbol": (0, "symbol", None, None, None), "string": (0, "string", None, None, None), "end": (0, None, None, None, None), } keywords = set(['and', 'or', 'not']) globchars = ".*{}[]?/\\_" def tokenize(program): pos, l = 0, len(program) while pos < l: c = program[pos] if c.isspace(): # skip inter-token whitespace pass elif c in "(),-|&+!": # handle simple operators yield (c, None, pos) elif (c in '"\'' or c == 'r' and program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings if c == 'r': pos += 1 c = program[pos] decode = lambda x: x else: decode = parser.unescapestr pos += 1 s = pos while pos < l: # find closing quote d = program[pos] if d == '\\': # skip over escaped characters pos += 2 continue if d == c: yield ('string', decode(program[s:pos]), s) break pos += 1 else: raise error.ParseError(_("unterminated string"), s) elif c.isalnum() or c in globchars or ord(c) > 127: # gather up a symbol/keyword s = pos pos += 1 while pos < l: # find end of symbol d = program[pos] if not (d.isalnum() or d in globchars or ord(d) > 127): break pos += 1 sym = program[s:pos] if sym in keywords: # operator keywords yield (sym, None, s) else: yield ('symbol', sym, s) pos -= 1 else: raise error.ParseError(_("syntax error"), pos) pos += 1 yield ('end', None, pos) def parse(expr): p = parser.parser(elements) tree, pos = p.parse(tokenize(expr)) if pos != len(expr): raise error.ParseError(_("invalid token"), pos) return tree def getstring(x, err): if x and (x[0] == 'string' or x[0] == 'symbol'): return x[1] raise error.ParseError(err) def getset(mctx, x): if not x: raise error.ParseError(_("missing argument")) return methods[x[0]](mctx, *x[1:]) def stringset(mctx, x): m = mctx.matcher([x]) return [f for f in mctx.subset if m(f)] def andset(mctx, x, y): return getset(mctx.narrow(getset(mctx, x)), y) def orset(mctx, x, y): # needs optimizing xl = getset(mctx, x) yl = getset(mctx, y) return xl + [f for f in yl if f not in xl] def notset(mctx, x): s = set(getset(mctx, x)) return [r for r in mctx.subset if r not in s] def minusset(mctx, x, y): xl = getset(mctx, x) yl = set(getset(mctx, y)) return [f for f in xl if f not in yl] def listset(mctx, a, b): raise error.ParseError(_("can't use a list in this context"), hint=_('see hg help "filesets.x or y"')) # symbols are callable like: # fun(mctx, x) # with: # mctx - current matchctx instance # x - argument in tree form symbols = {} # filesets using matchctx.status() _statuscallers = set() # filesets using matchctx.existing() _existingcallers = set() def predicate(decl, callstatus=False, callexisting=False): """Return a decorator for fileset predicate function 'decl' argument is the declaration (including argument list like 'adds(pattern)') or the name (for internal use only) of predicate. Optional 'callstatus' argument indicates whether predicate implies 'matchctx.status()' at runtime or not (False, by default). Optional 'callexisting' argument indicates whether predicate implies 'matchctx.existing()' at runtime or not (False, by default). """ def decorator(func): i = decl.find('(') if i > 0: name = decl[:i] else: name = decl symbols[name] = func if callstatus: _statuscallers.add(name) if callexisting: _existingcallers.add(name) if func.__doc__: func.__doc__ = "``%s``\n %s" % (decl, func.__doc__.strip()) return func return decorator @predicate('modified()', callstatus=True) def modified(mctx, x): """File that is modified according to :hg:`status`. """ # i18n: "modified" is a keyword getargs(x, 0, 0, _("modified takes no arguments")) s = mctx.status().modified return [f for f in mctx.subset if f in s] @predicate('added()', callstatus=True) def added(mctx, x): """File that is added according to :hg:`status`. """ # i18n: "added" is a keyword getargs(x, 0, 0, _("added takes no arguments")) s = mctx.status().added return [f for f in mctx.subset if f in s] @predicate('removed()', callstatus=True) def removed(mctx, x): """File that is removed according to :hg:`status`. """ # i18n: "removed" is a keyword getargs(x, 0, 0, _("removed takes no arguments")) s = mctx.status().removed return [f for f in mctx.subset if f in s] @predicate('deleted()', callstatus=True) def deleted(mctx, x): """Alias for ``missing()``. """ # i18n: "deleted" is a keyword getargs(x, 0, 0, _("deleted takes no arguments")) s = mctx.status().deleted return [f for f in mctx.subset if f in s] @predicate('missing()', callstatus=True) def missing(mctx, x): """File that is missing according to :hg:`status`. """ # i18n: "missing" is a keyword getargs(x, 0, 0, _("missing takes no arguments")) s = mctx.status().deleted return [f for f in mctx.subset if f in s] @predicate('unknown()', callstatus=True) def unknown(mctx, x): """File that is unknown according to :hg:`status`. These files will only be considered if this predicate is used. """ # i18n: "unknown" is a keyword getargs(x, 0, 0, _("unknown takes no arguments")) s = mctx.status().unknown return [f for f in mctx.subset if f in s] @predicate('ignored()', callstatus=True) def ignored(mctx, x): """File that is ignored according to :hg:`status`. These files will only be considered if this predicate is used. """ # i18n: "ignored" is a keyword getargs(x, 0, 0, _("ignored takes no arguments")) s = mctx.status().ignored return [f for f in mctx.subset if f in s] @predicate('clean()', callstatus=True) def clean(mctx, x): """File that is clean according to :hg:`status`. """ # i18n: "clean" is a keyword getargs(x, 0, 0, _("clean takes no arguments")) s = mctx.status().clean return [f for f in mctx.subset if f in s] def func(mctx, a, b): if a[0] == 'symbol' and a[1] in symbols: funcname = a[1] enabled = mctx._existingenabled mctx._existingenabled = funcname in _existingcallers try: return symbols[funcname](mctx, b) finally: mctx._existingenabled = enabled keep = lambda fn: getattr(fn, '__doc__', None) is not None syms = [s for (s, fn) in symbols.items() if keep(fn)] raise error.UnknownIdentifier(a[1], syms) def getlist(x): if not x: return [] if x[0] == 'list': return getlist(x[1]) + [x[2]] return [x] def getargs(x, min, max, err): l = getlist(x) if len(l) < min or len(l) > max: raise error.ParseError(err) return l @predicate('binary()', callexisting=True) def binary(mctx, x): """File that appears to be binary (contains NUL bytes). """ # i18n: "binary" is a keyword getargs(x, 0, 0, _("binary takes no arguments")) return [f for f in mctx.existing() if util.binary(mctx.ctx[f].data())] @predicate('exec()', callexisting=True) def exec_(mctx, x): """File that is marked as executable. """ # i18n: "exec" is a keyword getargs(x, 0, 0, _("exec takes no arguments")) return [f for f in mctx.existing() if mctx.ctx.flags(f) == 'x'] @predicate('symlink()', callexisting=True) def symlink(mctx, x): """File that is marked as a symlink. """ # i18n: "symlink" is a keyword getargs(x, 0, 0, _("symlink takes no arguments")) return [f for f in mctx.existing() if mctx.ctx.flags(f) == 'l'] @predicate('resolved()') def resolved(mctx, x): """File that is marked resolved according to :hg:`resolve -l`. """ # i18n: "resolved" is a keyword getargs(x, 0, 0, _("resolved takes no arguments")) if mctx.ctx.rev() is not None: return [] ms = merge.mergestate.read(mctx.ctx.repo()) return [f for f in mctx.subset if f in ms and ms[f] == 'r'] @predicate('unresolved()') def unresolved(mctx, x): """File that is marked unresolved according to :hg:`resolve -l`. """ # i18n: "unresolved" is a keyword getargs(x, 0, 0, _("unresolved takes no arguments")) if mctx.ctx.rev() is not None: return [] ms = merge.mergestate.read(mctx.ctx.repo()) return [f for f in mctx.subset if f in ms and ms[f] == 'u'] @predicate('hgignore()') def hgignore(mctx, x): """File that matches the active .hgignore pattern. """ # i18n: "hgignore" is a keyword getargs(x, 0, 0, _("hgignore takes no arguments")) ignore = mctx.ctx.repo().dirstate._ignore return [f for f in mctx.subset if ignore(f)] @predicate('portable()') def portable(mctx, x): """File that has a portable name. (This doesn't include filenames with case collisions.) """ # i18n: "portable" is a keyword getargs(x, 0, 0, _("portable takes no arguments")) checkwinfilename = util.checkwinfilename return [f for f in mctx.subset if checkwinfilename(f) is None] @predicate('grep(regex)', callexisting=True) def grep(mctx, x): """File contains the given regular expression. """ try: # i18n: "grep" is a keyword r = re.compile(getstring(x, _("grep requires a pattern"))) except re.error as e: raise error.ParseError(_('invalid match pattern: %s') % e) return [f for f in mctx.existing() if r.search(mctx.ctx[f].data())] def _sizetomax(s): try: s = s.strip().lower() for k, v in util._sizeunits: if s.endswith(k): # max(4k) = 5k - 1, max(4.5k) = 4.6k - 1 n = s[:-len(k)] inc = 1.0 if "." in n: inc /= 10 ** len(n.split(".")[1]) return int((float(n) + inc) * v) - 1 # no extension, this is a precise value return int(s) except ValueError: raise error.ParseError(_("couldn't parse size: %s") % s) @predicate('size(expression)', callexisting=True) def size(mctx, x): """File size matches the given expression. Examples: - 1k (files from 1024 to 2047 bytes) - < 20k (files less than 20480 bytes) - >= .5MB (files at least 524288 bytes) - 4k - 1MB (files from 4096 bytes to 1048576 bytes) """ # i18n: "size" is a keyword expr = getstring(x, _("size requires an expression")).strip() if '-' in expr: # do we have a range? a, b = expr.split('-', 1) a = util.sizetoint(a) b = util.sizetoint(b) m = lambda x: x >= a and x <= b elif expr.startswith("<="): a = util.sizetoint(expr[2:]) m = lambda x: x <= a elif expr.startswith("<"): a = util.sizetoint(expr[1:]) m = lambda x: x < a elif expr.startswith(">="): a = util.sizetoint(expr[2:]) m = lambda x: x >= a elif expr.startswith(">"): a = util.sizetoint(expr[1:]) m = lambda x: x > a elif expr[0].isdigit or expr[0] == '.': a = util.sizetoint(expr) b = _sizetomax(expr) m = lambda x: x >= a and x <= b else: raise error.ParseError(_("couldn't parse size: %s") % expr) return [f for f in mctx.existing() if m(mctx.ctx[f].size())] @predicate('encoding(name)', callexisting=True) def encoding(mctx, x): """File can be successfully decoded with the given character encoding. May not be useful for encodings other than ASCII and UTF-8. """ # i18n: "encoding" is a keyword enc = getstring(x, _("encoding requires an encoding name")) s = [] for f in mctx.existing(): d = mctx.ctx[f].data() try: d.decode(enc) except LookupError: raise error.Abort(_("unknown encoding '%s'") % enc) except UnicodeDecodeError: continue s.append(f) return s @predicate('eol(style)', callexisting=True) def eol(mctx, x): """File contains newlines of the given style (dos, unix, mac). Binary files are excluded, files with mixed line endings match multiple styles. """ # i18n: "eol" is a keyword enc = getstring(x, _("eol requires a style name")) s = [] for f in mctx.existing(): d = mctx.ctx[f].data() if util.binary(d): continue if (enc == 'dos' or enc == 'win') and '\r\n' in d: s.append(f) elif enc == 'unix' and re.search('(? and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import from . import ( util, ) class funcregistrar(object): """Base of decorator to register a fuction for specific purpose The least derived class can be defined by overriding 'table' and 'formatdoc', for example:: symbols = {} class keyword(funcregistrar): table = symbols formatdoc = ":%s: %s" @keyword('bar') def barfunc(*args, **kwargs): '''Explanation of bar keyword .... ''' pass In this case: - 'barfunc' is registered as 'bar' in 'symbols' - online help uses ":bar: Explanation of bar keyword" """ def __init__(self, decl): """'decl' is a name or more descriptive string of a function Specification of 'decl' depends on registration purpose. """ self.decl = decl table = None def __call__(self, func): """Execute actual registration for specified function """ name = self.getname() if func.__doc__ and not util.safehasattr(func, '_origdoc'): doc = func.__doc__.strip() func._origdoc = doc if callable(self.formatdoc): func.__doc__ = self.formatdoc(doc) else: # convenient shortcut for simple format func.__doc__ = self.formatdoc % (self.decl, doc) self.table[name] = func self.extraaction(name, func) return func def getname(self): """Return the name of the registered function from self.decl Derived class should override this, if it allows more descriptive 'decl' string than just a name. """ return self.decl def parsefuncdecl(self): """Parse function declaration and return the name of function in it """ i = self.decl.find('(') if i > 0: return self.decl[:i] else: return self.decl def formatdoc(self, doc): """Return formatted document of the registered function for help 'doc' is '__doc__.strip()' of the registered function. If this is overridden by non-callable object in derived class, such value is treated as "format string" and used to format document by 'self.formatdoc % (self.decl, doc)' for convenience. """ raise NotImplementedError() def extraaction(self, name, func): """Execute exra action for registered function, if needed """ pass class delayregistrar(object): """Decorator to delay actual registration until uisetup or so For example, the decorator class to delay registration by 'keyword' funcregistrar can be defined as below:: class extkeyword(delayregistrar): registrar = keyword """ def __init__(self): self._list = [] registrar = None def __call__(self, *args, **kwargs): """Return the decorator to delay actual registration until setup """ assert self.registrar is not None def decorator(func): # invocation of self.registrar() here can detect argument # mismatching immediately self._list.append((func, self.registrar(*args, **kwargs))) return func return decorator def setup(self): """Execute actual registration """ while self._list: func, decorator = self._list.pop(0) decorator(func) mercurial-3.7.3/mercurial/bookmarks.py0000644000175000017500000005023612676531525017462 0ustar mpmmpm00000000000000# Mercurial bookmark support code # # Copyright 2008 David Soria Parra # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import os from .i18n import _ from .node import ( bin, hex, ) from . import ( encoding, lock as lockmod, obsolete, util, ) def _getbkfile(repo): """Hook so that extensions that mess with the store can hook bm storage. For core, this just handles wether we should see pending bookmarks or the committed ones. Other extensions (like share) may need to tweak this behavior further. """ bkfile = None if 'HG_PENDING' in os.environ: try: bkfile = repo.vfs('bookmarks.pending') except IOError as inst: if inst.errno != errno.ENOENT: raise if bkfile is None: bkfile = repo.vfs('bookmarks') return bkfile class bmstore(dict): """Storage for bookmarks. This object should do all bookmark-related reads and writes, so that it's fairly simple to replace the storage underlying bookmarks without having to clone the logic surrounding bookmarks. This type also should manage the active bookmark, if any. This particular bmstore implementation stores bookmarks as {hash}\s{name}\n (the same format as localtags) in .hg/bookmarks. The mapping is stored as {name: nodeid}. """ def __init__(self, repo): dict.__init__(self) self._repo = repo try: bkfile = _getbkfile(repo) for line in bkfile: line = line.strip() if not line: continue if ' ' not in line: repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n') % line) continue sha, refspec = line.split(' ', 1) refspec = encoding.tolocal(refspec) try: self[refspec] = repo.changelog.lookup(sha) except LookupError: pass except IOError as inst: if inst.errno != errno.ENOENT: raise self._clean = True self._active = _readactive(repo, self) self._aclean = True @property def active(self): return self._active @active.setter def active(self, mark): if mark is not None and mark not in self: raise AssertionError('bookmark %s does not exist!' % mark) self._active = mark self._aclean = False def __setitem__(self, *args, **kwargs): self._clean = False return dict.__setitem__(self, *args, **kwargs) def __delitem__(self, key): self._clean = False return dict.__delitem__(self, key) def recordchange(self, tr): """record that bookmarks have been changed in a transaction The transaction is then responsible for updating the file content.""" tr.addfilegenerator('bookmarks', ('bookmarks',), self._write, location='plain') tr.hookargs['bookmark_moved'] = '1' def write(self): '''Write bookmarks Write the given bookmark => hash dictionary to the .hg/bookmarks file in a format equal to those of localtags. We also store a backup of the previous state in undo.bookmarks that can be copied back on rollback. ''' msg = 'bm.write() is deprecated, use bm.recordchange(transaction)' self._repo.ui.deprecwarn(msg, '3.7') # TODO: writing the active bookmark should probably also use a # transaction. self._writeactive() if self._clean: return repo = self._repo if (repo.ui.configbool('devel', 'all-warnings') or repo.ui.configbool('devel', 'check-locks')): l = repo._wlockref and repo._wlockref() if l is None or not l.held: repo.ui.develwarn('bookmarks write with no wlock') tr = repo.currenttransaction() if tr: self.recordchange(tr) # invalidatevolatilesets() is omitted because this doesn't # write changes out actually return self._writerepo(repo) repo.invalidatevolatilesets() def _writerepo(self, repo): """Factored out for extensibility""" rbm = repo._bookmarks if rbm.active not in self: rbm.active = None rbm._writeactive() with repo.wlock(): file_ = repo.vfs('bookmarks', 'w', atomictemp=True) try: self._write(file_) except: # re-raises file_.discard() raise finally: file_.close() def _writeactive(self): if self._aclean: return with self._repo.wlock(): if self._active is not None: f = self._repo.vfs('bookmarks.current', 'w', atomictemp=True) try: f.write(encoding.fromlocal(self._active)) finally: f.close() else: try: self._repo.vfs.unlink('bookmarks.current') except OSError as inst: if inst.errno != errno.ENOENT: raise self._aclean = True def _write(self, fp): for name, node in self.iteritems(): fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name))) self._clean = True def _readactive(repo, marks): """ Get the active bookmark. We can have an active bookmark that updates itself as we commit. This function returns the name of that bookmark. It is stored in .hg/bookmarks.current """ mark = None try: file = repo.vfs('bookmarks.current') except IOError as inst: if inst.errno != errno.ENOENT: raise return None try: # No readline() in osutil.posixfile, reading everything is # cheap. # Note that it's possible for readlines() here to raise # IOError, since we might be reading the active mark over # static-http which only tries to load the file when we try # to read from it. mark = encoding.tolocal((file.readlines() or [''])[0]) if mark == '' or mark not in marks: mark = None except IOError as inst: if inst.errno != errno.ENOENT: raise return None finally: file.close() return mark def activate(repo, mark): """ Set the given bookmark to be 'active', meaning that this bookmark will follow new commits that are made. The name is recorded in .hg/bookmarks.current """ repo._bookmarks.active = mark repo._bookmarks._writeactive() def deactivate(repo): """ Unset the active bookmark in this repository. """ repo._bookmarks.active = None repo._bookmarks._writeactive() def isactivewdirparent(repo): """ Tell whether the 'active' bookmark (the one that follows new commits) points to one of the parents of the current working directory (wdir). While this is normally the case, it can on occasion be false; for example, immediately after a pull, the active bookmark can be moved to point to a place different than the wdir. This is solved by running `hg update`. """ mark = repo._activebookmark marks = repo._bookmarks parents = [p.node() for p in repo[None].parents()] return (mark in marks and marks[mark] in parents) def deletedivergent(repo, deletefrom, bm): '''Delete divergent versions of bm on nodes in deletefrom. Return True if at least one bookmark was deleted, False otherwise.''' deleted = False marks = repo._bookmarks divergent = [b for b in marks if b.split('@', 1)[0] == bm.split('@', 1)[0]] for mark in divergent: if mark == '@' or '@' not in mark: # can't be divergent by definition continue if mark and marks[mark] in deletefrom: if mark != bm: del marks[mark] deleted = True return deleted def calculateupdate(ui, repo, checkout): '''Return a tuple (targetrev, movemarkfrom) indicating the rev to check out and where to move the active bookmark from, if needed.''' movemarkfrom = None if checkout is None: activemark = repo._activebookmark if isactivewdirparent(repo): movemarkfrom = repo['.'].node() elif activemark: ui.status(_("updating to active bookmark %s\n") % activemark) checkout = activemark return (checkout, movemarkfrom) def update(repo, parents, node): deletefrom = parents marks = repo._bookmarks update = False active = marks.active if not active: return False if marks[active] in parents: new = repo[node] divs = [repo[b] for b in marks if b.split('@', 1)[0] == active.split('@', 1)[0]] anc = repo.changelog.ancestors([new.rev()]) deletefrom = [b.node() for b in divs if b.rev() in anc or b == new] if validdest(repo, repo[marks[active]], new): marks[active] = new.node() update = True if deletedivergent(repo, deletefrom, active): update = True if update: lock = tr = None try: lock = repo.lock() tr = repo.transaction('bookmark') marks.recordchange(tr) tr.close() finally: lockmod.release(tr, lock) return update def listbookmarks(repo): # We may try to list bookmarks on a repo type that does not # support it (e.g., statichttprepository). marks = getattr(repo, '_bookmarks', {}) d = {} hasnode = repo.changelog.hasnode for k, v in marks.iteritems(): # don't expose local divergent bookmarks if hasnode(v) and ('@' not in k or k.endswith('@')): d[k] = hex(v) return d def pushbookmark(repo, key, old, new): w = l = tr = None try: w = repo.wlock() l = repo.lock() tr = repo.transaction('bookmarks') marks = repo._bookmarks existing = hex(marks.get(key, '')) if existing != old and existing != new: return False if new == '': del marks[key] else: if new not in repo: return False marks[key] = repo[new].node() marks.recordchange(tr) tr.close() return True finally: lockmod.release(tr, l, w) def compare(repo, srcmarks, dstmarks, srchex=None, dsthex=None, targets=None): '''Compare bookmarks between srcmarks and dstmarks This returns tuple "(addsrc, adddst, advsrc, advdst, diverge, differ, invalid)", each are list of bookmarks below: :addsrc: added on src side (removed on dst side, perhaps) :adddst: added on dst side (removed on src side, perhaps) :advsrc: advanced on src side :advdst: advanced on dst side :diverge: diverge :differ: changed, but changeset referred on src is unknown on dst :invalid: unknown on both side :same: same on both side Each elements of lists in result tuple is tuple "(bookmark name, changeset ID on source side, changeset ID on destination side)". Each changeset IDs are 40 hexadecimal digit string or None. Changeset IDs of tuples in "addsrc", "adddst", "differ" or "invalid" list may be unknown for repo. This function expects that "srcmarks" and "dstmarks" return changeset ID in 40 hexadecimal digit string for specified bookmark. If not so (e.g. bmstore "repo._bookmarks" returning binary value), "srchex" or "dsthex" should be specified to convert into such form. If "targets" is specified, only bookmarks listed in it are examined. ''' if not srchex: srchex = lambda x: x if not dsthex: dsthex = lambda x: x if targets: bset = set(targets) else: srcmarkset = set(srcmarks) dstmarkset = set(dstmarks) bset = srcmarkset | dstmarkset results = ([], [], [], [], [], [], [], []) addsrc = results[0].append adddst = results[1].append advsrc = results[2].append advdst = results[3].append diverge = results[4].append differ = results[5].append invalid = results[6].append same = results[7].append for b in sorted(bset): if b not in srcmarks: if b in dstmarks: adddst((b, None, dsthex(dstmarks[b]))) else: invalid((b, None, None)) elif b not in dstmarks: addsrc((b, srchex(srcmarks[b]), None)) else: scid = srchex(srcmarks[b]) dcid = dsthex(dstmarks[b]) if scid == dcid: same((b, scid, dcid)) elif scid in repo and dcid in repo: sctx = repo[scid] dctx = repo[dcid] if sctx.rev() < dctx.rev(): if validdest(repo, sctx, dctx): advdst((b, scid, dcid)) else: diverge((b, scid, dcid)) else: if validdest(repo, dctx, sctx): advsrc((b, scid, dcid)) else: diverge((b, scid, dcid)) else: # it is too expensive to examine in detail, in this case differ((b, scid, dcid)) return results def _diverge(ui, b, path, localmarks, remotenode): '''Return appropriate diverged bookmark for specified ``path`` This returns None, if it is failed to assign any divergent bookmark name. This reuses already existing one with "@number" suffix, if it refers ``remotenode``. ''' if b == '@': b = '' # try to use an @pathalias suffix # if an @pathalias already exists, we overwrite (update) it if path.startswith("file:"): path = util.url(path).path for p, u in ui.configitems("paths"): if u.startswith("file:"): u = util.url(u).path if path == u: return '%s@%s' % (b, p) # assign a unique "@number" suffix newly for x in range(1, 100): n = '%s@%d' % (b, x) if n not in localmarks or localmarks[n] == remotenode: return n return None def updatefromremote(ui, repo, remotemarks, path, trfunc, explicit=()): ui.debug("checking for updated bookmarks\n") localmarks = repo._bookmarks (addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same ) = compare(repo, remotemarks, localmarks, dsthex=hex) status = ui.status warn = ui.warn if ui.configbool('ui', 'quietbookmarkmove', False): status = warn = ui.debug explicit = set(explicit) changed = [] for b, scid, dcid in addsrc: if scid in repo: # add remote bookmarks for changes we already have changed.append((b, bin(scid), status, _("adding remote bookmark %s\n") % (b))) elif b in explicit: explicit.remove(b) ui.warn(_("remote bookmark %s points to locally missing %s\n") % (b, scid[:12])) for b, scid, dcid in advsrc: changed.append((b, bin(scid), status, _("updating bookmark %s\n") % (b))) # remove normal movement from explicit set explicit.difference_update(d[0] for d in changed) for b, scid, dcid in diverge: if b in explicit: explicit.discard(b) changed.append((b, bin(scid), status, _("importing bookmark %s\n") % (b))) else: snode = bin(scid) db = _diverge(ui, b, path, localmarks, snode) if db: changed.append((db, snode, warn, _("divergent bookmark %s stored as %s\n") % (b, db))) else: warn(_("warning: failed to assign numbered name " "to divergent bookmark %s\n") % (b)) for b, scid, dcid in adddst + advdst: if b in explicit: explicit.discard(b) changed.append((b, bin(scid), status, _("importing bookmark %s\n") % (b))) for b, scid, dcid in differ: if b in explicit: explicit.remove(b) ui.warn(_("remote bookmark %s points to locally missing %s\n") % (b, scid[:12])) if changed: tr = trfunc() for b, node, writer, msg in sorted(changed): localmarks[b] = node writer(msg) localmarks.recordchange(tr) def incoming(ui, repo, other): '''Show bookmarks incoming from other to repo ''' ui.status(_("searching for changed bookmarks\n")) r = compare(repo, other.listkeys('bookmarks'), repo._bookmarks, dsthex=hex) addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r incomings = [] if ui.debugflag: getid = lambda id: id else: getid = lambda id: id[:12] if ui.verbose: def add(b, id, st): incomings.append(" %-25s %s %s\n" % (b, getid(id), st)) else: def add(b, id, st): incomings.append(" %-25s %s\n" % (b, getid(id))) for b, scid, dcid in addsrc: # i18n: "added" refers to a bookmark add(b, scid, _('added')) for b, scid, dcid in advsrc: # i18n: "advanced" refers to a bookmark add(b, scid, _('advanced')) for b, scid, dcid in diverge: # i18n: "diverged" refers to a bookmark add(b, scid, _('diverged')) for b, scid, dcid in differ: # i18n: "changed" refers to a bookmark add(b, scid, _('changed')) if not incomings: ui.status(_("no changed bookmarks found\n")) return 1 for s in sorted(incomings): ui.write(s) return 0 def outgoing(ui, repo, other): '''Show bookmarks outgoing from repo to other ''' ui.status(_("searching for changed bookmarks\n")) r = compare(repo, repo._bookmarks, other.listkeys('bookmarks'), srchex=hex) addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r outgoings = [] if ui.debugflag: getid = lambda id: id else: getid = lambda id: id[:12] if ui.verbose: def add(b, id, st): outgoings.append(" %-25s %s %s\n" % (b, getid(id), st)) else: def add(b, id, st): outgoings.append(" %-25s %s\n" % (b, getid(id))) for b, scid, dcid in addsrc: # i18n: "added refers to a bookmark add(b, scid, _('added')) for b, scid, dcid in adddst: # i18n: "deleted" refers to a bookmark add(b, ' ' * 40, _('deleted')) for b, scid, dcid in advsrc: # i18n: "advanced" refers to a bookmark add(b, scid, _('advanced')) for b, scid, dcid in diverge: # i18n: "diverged" refers to a bookmark add(b, scid, _('diverged')) for b, scid, dcid in differ: # i18n: "changed" refers to a bookmark add(b, scid, _('changed')) if not outgoings: ui.status(_("no changed bookmarks found\n")) return 1 for s in sorted(outgoings): ui.write(s) return 0 def summary(repo, other): '''Compare bookmarks between repo and other for "hg summary" output This returns "(# of incoming, # of outgoing)" tuple. ''' r = compare(repo, other.listkeys('bookmarks'), repo._bookmarks, dsthex=hex) addsrc, adddst, advsrc, advdst, diverge, differ, invalid, same = r return (len(addsrc), len(adddst)) def validdest(repo, old, new): """Is the new bookmark destination a valid update from the old one""" repo = repo.unfiltered() if old == new: # Old == new -> nothing to update. return False elif not old: # old is nullrev, anything is valid. # (new != nullrev has been excluded by the previous check) return True elif repo.obsstore: return new.node() in obsolete.foreground(repo, [old.node()]) else: # still an independent clause as it is lazier (and therefore faster) return old.descendant(new) mercurial-3.7.3/mercurial/py3kcompat.py0000644000175000017500000000415412676531525017562 0ustar mpmmpm00000000000000# py3kcompat.py - compatibility definitions for running hg in py3k # # Copyright 2010 Renato Cunha # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import builtins import numbers Number = numbers.Number def bytesformatter(format, args): '''Custom implementation of a formatter for bytestrings. This function currently relies on the string formatter to do the formatting and always returns bytes objects. >>> bytesformatter(20, 10) 0 >>> bytesformatter('unicode %s, %s!', ('string', 'foo')) b'unicode string, foo!' >>> bytesformatter(b'test %s', 'me') b'test me' >>> bytesformatter('test %s', 'me') b'test me' >>> bytesformatter(b'test %s', b'me') b'test me' >>> bytesformatter('test %s', b'me') b'test me' >>> bytesformatter('test %d: %s', (1, b'result')) b'test 1: result' ''' # The current implementation just converts from bytes to unicode, do # what's needed and then convert the results back to bytes. # Another alternative is to use the Python C API implementation. if isinstance(format, Number): # If the fixer erroneously passes a number remainder operation to # bytesformatter, we just return the correct operation return format % args if isinstance(format, bytes): format = format.decode('utf-8', 'surrogateescape') if isinstance(args, bytes): args = args.decode('utf-8', 'surrogateescape') if isinstance(args, tuple): newargs = [] for arg in args: if isinstance(arg, bytes): arg = arg.decode('utf-8', 'surrogateescape') newargs.append(arg) args = tuple(newargs) ret = format % args return ret.encode('utf-8', 'surrogateescape') builtins.bytesformatter = bytesformatter origord = builtins.ord def fakeord(char): if isinstance(char, int): return char return origord(char) builtins.ord = fakeord if __name__ == '__main__': import doctest doctest.testmod() mercurial-3.7.3/mercurial/peer.py0000644000175000017500000000766012676531525016430 0ustar mpmmpm00000000000000# peer.py - repository base classes for mercurial # # Copyright 2005, 2006 Matt Mackall # Copyright 2006 Vadim Gelfer # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import from .i18n import _ from . import ( error, util, ) # abstract batching support class future(object): '''placeholder for a value to be set later''' def set(self, value): if util.safehasattr(self, 'value'): raise error.RepoError("future is already set") self.value = value class batcher(object): '''base class for batches of commands submittable in a single request All methods invoked on instances of this class are simply queued and return a a future for the result. Once you call submit(), all the queued calls are performed and the results set in their respective futures. ''' def __init__(self): self.calls = [] def __getattr__(self, name): def call(*args, **opts): resref = future() self.calls.append((name, args, opts, resref,)) return resref return call def submit(self): pass class localbatch(batcher): '''performs the queued calls directly''' def __init__(self, local): batcher.__init__(self) self.local = local def submit(self): for name, args, opts, resref in self.calls: resref.set(getattr(self.local, name)(*args, **opts)) def batchable(f): '''annotation for batchable methods Such methods must implement a coroutine as follows: @batchable def sample(self, one, two=None): # Handle locally computable results first: if not one: yield "a local result", None # Build list of encoded arguments suitable for your wire protocol: encargs = [('one', encode(one),), ('two', encode(two),)] # Create future for injection of encoded result: encresref = future() # Return encoded arguments and future: yield encargs, encresref # Assuming the future to be filled with the result from the batched # request now. Decode it: yield decode(encresref.value) The decorator returns a function which wraps this coroutine as a plain method, but adds the original method as an attribute called "batchable", which is used by remotebatch to split the call into separate encoding and decoding phases. ''' def plain(*args, **opts): batchable = f(*args, **opts) encargsorres, encresref = batchable.next() if not encresref: return encargsorres # a local result in this case self = args[0] encresref.set(self._submitone(f.func_name, encargsorres)) return batchable.next() setattr(plain, 'batchable', f) return plain class peerrepository(object): def batch(self): return localbatch(self) def capable(self, name): '''tell whether repo supports named capability. return False if not supported. if boolean capability, return True. if string capability, return string.''' caps = self._capabilities() if name in caps: return True name_eq = name + '=' for cap in caps: if cap.startswith(name_eq): return cap[len(name_eq):] return False def requirecap(self, name, purpose): '''raise an exception if the given capability is not present''' if not self.capable(name): raise error.CapabilityError( _('cannot %s; remote repository does not ' 'support the %r capability') % (purpose, name)) def local(self): '''return peer as a localrepo, or None''' return None def peer(self): return self def canpush(self): return True def close(self): pass mercurial-3.7.3/mercurial/revset.py0000644000175000017500000035215212676531525017004 0ustar mpmmpm00000000000000# revset.py - revision set queries for mercurial # # Copyright 2010 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import heapq import re from .i18n import _ from . import ( destutil, encoding, error, hbisect, match as matchmod, node, obsolete as obsmod, parser, pathutil, phases, registrar, repoview, util, ) def _revancestors(repo, revs, followfirst): """Like revlog.ancestors(), but supports followfirst.""" if followfirst: cut = 1 else: cut = None cl = repo.changelog def iterate(): revs.sort(reverse=True) irevs = iter(revs) h = [] inputrev = next(irevs, None) if inputrev is not None: heapq.heappush(h, -inputrev) seen = set() while h: current = -heapq.heappop(h) if current == inputrev: inputrev = next(irevs, None) if inputrev is not None: heapq.heappush(h, -inputrev) if current not in seen: seen.add(current) yield current for parent in cl.parentrevs(current)[:cut]: if parent != node.nullrev: heapq.heappush(h, -parent) return generatorset(iterate(), iterasc=False) def _revdescendants(repo, revs, followfirst): """Like revlog.descendants() but supports followfirst.""" if followfirst: cut = 1 else: cut = None def iterate(): cl = repo.changelog # XXX this should be 'parentset.min()' assuming 'parentset' is a # smartset (and if it is not, it should.) first = min(revs) nullrev = node.nullrev if first == nullrev: # Are there nodes with a null first parent and a non-null # second one? Maybe. Do we care? Probably not. for i in cl: yield i else: seen = set(revs) for i in cl.revs(first + 1): for x in cl.parentrevs(i)[:cut]: if x != nullrev and x in seen: seen.add(i) yield i break return generatorset(iterate(), iterasc=True) def _reachablerootspure(repo, minroot, roots, heads, includepath): """return (heads(:: and ::)) If includepath is True, return (::).""" if not roots: return [] parentrevs = repo.changelog.parentrevs roots = set(roots) visit = list(heads) reachable = set() seen = {} # prefetch all the things! (because python is slow) reached = reachable.add dovisit = visit.append nextvisit = visit.pop # open-code the post-order traversal due to the tiny size of # sys.getrecursionlimit() while visit: rev = nextvisit() if rev in roots: reached(rev) if not includepath: continue parents = parentrevs(rev) seen[rev] = parents for parent in parents: if parent >= minroot and parent not in seen: dovisit(parent) if not reachable: return baseset() if not includepath: return reachable for rev in sorted(seen): for parent in seen[rev]: if parent in reachable: reached(rev) return reachable def reachableroots(repo, roots, heads, includepath=False): """return (heads(:: and ::)) If includepath is True, return (::).""" if not roots: return baseset() minroot = roots.min() roots = list(roots) heads = list(heads) try: revs = repo.changelog.reachableroots(minroot, heads, roots, includepath) except AttributeError: revs = _reachablerootspure(repo, minroot, roots, heads, includepath) revs = baseset(revs) revs.sort() return revs elements = { # token-type: binding-strength, primary, prefix, infix, suffix "(": (21, None, ("group", 1, ")"), ("func", 1, ")"), None), "##": (20, None, None, ("_concat", 20), None), "~": (18, None, None, ("ancestor", 18), None), "^": (18, None, None, ("parent", 18), ("parentpost", 18)), "-": (5, None, ("negate", 19), ("minus", 5), None), "::": (17, None, ("dagrangepre", 17), ("dagrange", 17), ("dagrangepost", 17)), "..": (17, None, ("dagrangepre", 17), ("dagrange", 17), ("dagrangepost", 17)), ":": (15, "rangeall", ("rangepre", 15), ("range", 15), ("rangepost", 15)), "not": (10, None, ("not", 10), None, None), "!": (10, None, ("not", 10), None, None), "and": (5, None, None, ("and", 5), None), "&": (5, None, None, ("and", 5), None), "%": (5, None, None, ("only", 5), ("onlypost", 5)), "or": (4, None, None, ("or", 4), None), "|": (4, None, None, ("or", 4), None), "+": (4, None, None, ("or", 4), None), "=": (3, None, None, ("keyvalue", 3), None), ",": (2, None, None, ("list", 2), None), ")": (0, None, None, None, None), "symbol": (0, "symbol", None, None, None), "string": (0, "string", None, None, None), "end": (0, None, None, None, None), } keywords = set(['and', 'or', 'not']) # default set of valid characters for the initial letter of symbols _syminitletters = set(c for c in [chr(i) for i in xrange(256)] if c.isalnum() or c in '._@' or ord(c) > 127) # default set of valid characters for non-initial letters of symbols _symletters = set(c for c in [chr(i) for i in xrange(256)] if c.isalnum() or c in '-._/@' or ord(c) > 127) def tokenize(program, lookup=None, syminitletters=None, symletters=None): ''' Parse a revset statement into a stream of tokens ``syminitletters`` is the set of valid characters for the initial letter of symbols. By default, character ``c`` is recognized as valid for initial letter of symbols, if ``c.isalnum() or c in '._@' or ord(c) > 127``. ``symletters`` is the set of valid characters for non-initial letters of symbols. By default, character ``c`` is recognized as valid for non-initial letters of symbols, if ``c.isalnum() or c in '-._/@' or ord(c) > 127``. Check that @ is a valid unquoted token character (issue3686): >>> list(tokenize("@::")) [('symbol', '@', 0), ('::', None, 1), ('end', None, 3)] ''' if syminitletters is None: syminitletters = _syminitletters if symletters is None: symletters = _symletters if program and lookup: # attempt to parse old-style ranges first to deal with # things like old-tag which contain query metacharacters parts = program.split(':', 1) if all(lookup(sym) for sym in parts if sym): if parts[0]: yield ('symbol', parts[0], 0) if len(parts) > 1: s = len(parts[0]) yield (':', None, s) if parts[1]: yield ('symbol', parts[1], s + 1) yield ('end', None, len(program)) return pos, l = 0, len(program) while pos < l: c = program[pos] if c.isspace(): # skip inter-token whitespace pass elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully yield ('::', None, pos) pos += 1 # skip ahead elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully yield ('..', None, pos) pos += 1 # skip ahead elif c == '#' and program[pos:pos + 2] == '##': # look ahead carefully yield ('##', None, pos) pos += 1 # skip ahead elif c in "():=,-|&+!~^%": # handle simple operators yield (c, None, pos) elif (c in '"\'' or c == 'r' and program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings if c == 'r': pos += 1 c = program[pos] decode = lambda x: x else: decode = parser.unescapestr pos += 1 s = pos while pos < l: # find closing quote d = program[pos] if d == '\\': # skip over escaped characters pos += 2 continue if d == c: yield ('string', decode(program[s:pos]), s) break pos += 1 else: raise error.ParseError(_("unterminated string"), s) # gather up a symbol/keyword elif c in syminitletters: s = pos pos += 1 while pos < l: # find end of symbol d = program[pos] if d not in symletters: break if d == '.' and program[pos - 1] == '.': # special case for .. pos -= 1 break pos += 1 sym = program[s:pos] if sym in keywords: # operator keywords yield (sym, None, s) elif '-' in sym: # some jerk gave us foo-bar-baz, try to check if it's a symbol if lookup and lookup(sym): # looks like a real symbol yield ('symbol', sym, s) else: # looks like an expression parts = sym.split('-') for p in parts[:-1]: if p: # possible consecutive - yield ('symbol', p, s) s += len(p) yield ('-', None, pos) s += 1 if parts[-1]: # possible trailing - yield ('symbol', parts[-1], s) else: yield ('symbol', sym, s) pos -= 1 else: raise error.ParseError(_("syntax error in revset '%s'") % program, pos) pos += 1 yield ('end', None, pos) def parseerrordetail(inst): """Compose error message from specified ParseError object """ if len(inst.args) > 1: return _('at %s: %s') % (inst.args[1], inst.args[0]) else: return inst.args[0] # helpers def getstring(x, err): if x and (x[0] == 'string' or x[0] == 'symbol'): return x[1] raise error.ParseError(err) def getlist(x): if not x: return [] if x[0] == 'list': return list(x[1:]) return [x] def getargs(x, min, max, err): l = getlist(x) if len(l) < min or (max >= 0 and len(l) > max): raise error.ParseError(err) return l def getargsdict(x, funcname, keys): return parser.buildargsdict(getlist(x), funcname, keys.split(), keyvaluenode='keyvalue', keynode='symbol') def isvalidsymbol(tree): """Examine whether specified ``tree`` is valid ``symbol`` or not """ return tree[0] == 'symbol' and len(tree) > 1 def getsymbol(tree): """Get symbol name from valid ``symbol`` in ``tree`` This assumes that ``tree`` is already examined by ``isvalidsymbol``. """ return tree[1] def isvalidfunc(tree): """Examine whether specified ``tree`` is valid ``func`` or not """ return tree[0] == 'func' and len(tree) > 1 and isvalidsymbol(tree[1]) def getfuncname(tree): """Get function name from valid ``func`` in ``tree`` This assumes that ``tree`` is already examined by ``isvalidfunc``. """ return getsymbol(tree[1]) def getfuncargs(tree): """Get list of function arguments from valid ``func`` in ``tree`` This assumes that ``tree`` is already examined by ``isvalidfunc``. """ if len(tree) > 2: return getlist(tree[2]) else: return [] def getset(repo, subset, x): if not x: raise error.ParseError(_("missing argument")) s = methods[x[0]](repo, subset, *x[1:]) if util.safehasattr(s, 'isascending'): return s if (repo.ui.configbool('devel', 'all-warnings') or repo.ui.configbool('devel', 'old-revset')): # else case should not happen, because all non-func are internal, # ignoring for now. if x[0] == 'func' and x[1][0] == 'symbol' and x[1][1] in symbols: repo.ui.develwarn('revset "%s" use list instead of smartset, ' '(upgrade your code)' % x[1][1]) return baseset(s) def _getrevsource(repo, r): extra = repo[r].extra() for label in ('source', 'transplant_source', 'rebase_source'): if label in extra: try: return repo[extra[label]].rev() except error.RepoLookupError: pass return None # operator methods def stringset(repo, subset, x): x = repo[x].rev() if (x in subset or x == node.nullrev and isinstance(subset, fullreposet)): return baseset([x]) return baseset() def rangeset(repo, subset, x, y): m = getset(repo, fullreposet(repo), x) n = getset(repo, fullreposet(repo), y) if not m or not n: return baseset() m, n = m.first(), n.last() if m == n: r = baseset([m]) elif n == node.wdirrev: r = spanset(repo, m, len(repo)) + baseset([n]) elif m == node.wdirrev: r = baseset([m]) + spanset(repo, len(repo) - 1, n - 1) elif m < n: r = spanset(repo, m, n + 1) else: r = spanset(repo, m, n - 1) # XXX We should combine with subset first: 'subset & baseset(...)'. This is # necessary to ensure we preserve the order in subset. # # This has performance implication, carrying the sorting over when possible # would be more efficient. return r & subset def dagrange(repo, subset, x, y): r = fullreposet(repo) xs = reachableroots(repo, getset(repo, r, x), getset(repo, r, y), includepath=True) # XXX We should combine with subset first: 'subset & baseset(...)'. This is # necessary to ensure we preserve the order in subset. return xs & subset def andset(repo, subset, x, y): return getset(repo, getset(repo, subset, x), y) def orset(repo, subset, *xs): assert xs if len(xs) == 1: return getset(repo, subset, xs[0]) p = len(xs) // 2 a = orset(repo, subset, *xs[:p]) b = orset(repo, subset, *xs[p:]) return a + b def notset(repo, subset, x): return subset - getset(repo, subset, x) def listset(repo, subset, *xs): raise error.ParseError(_("can't use a list in this context"), hint=_('see hg help "revsets.x or y"')) def keyvaluepair(repo, subset, k, v): raise error.ParseError(_("can't use a key-value pair in this context")) def func(repo, subset, a, b): if a[0] == 'symbol' and a[1] in symbols: return symbols[a[1]](repo, subset, b) keep = lambda fn: getattr(fn, '__doc__', None) is not None syms = [s for (s, fn) in symbols.items() if keep(fn)] raise error.UnknownIdentifier(a[1], syms) # functions # symbols are callables like: # fn(repo, subset, x) # with: # repo - current repository instance # subset - of revisions to be examined # x - argument in tree form symbols = {} # symbols which can't be used for a DoS attack for any given input # (e.g. those which accept regexes as plain strings shouldn't be included) # functions that just return a lot of changesets (like all) don't count here safesymbols = set() class predicate(registrar.funcregistrar): """Decorator to register revset predicate Usage:: @predicate('mypredicate(arg1, arg2[, arg3])') def mypredicatefunc(repo, subset, x): '''Explanation of this revset predicate .... ''' pass The first string argument of the constructor is used also in online help. Use 'extpredicate' instead of this to register revset predicate in extensions. """ table = symbols formatdoc = "``%s``\n %s" getname = registrar.funcregistrar.parsefuncdecl def __init__(self, decl, safe=False): """'safe' indicates whether a predicate is safe for DoS attack """ super(predicate, self).__init__(decl) self.safe = safe def extraaction(self, name, func): if self.safe: safesymbols.add(name) class extpredicate(registrar.delayregistrar): """Decorator to register revset predicate in extensions Usage:: revsetpredicate = revset.extpredicate() @revsetpredicate('mypredicate(arg1, arg2[, arg3])') def mypredicatefunc(repo, subset, x): '''Explanation of this revset predicate .... ''' pass def uisetup(ui): revsetpredicate.setup() 'revsetpredicate' instance above can be used to decorate multiple functions, and 'setup()' on it registers all such functions at once. """ registrar = predicate @predicate('_destupdate') def _destupdate(repo, subset, x): # experimental revset for update destination args = getargsdict(x, 'limit', 'clean check') return subset & baseset([destutil.destupdate(repo, **args)[0]]) @predicate('_destmerge') def _destmerge(repo, subset, x): # experimental revset for merge destination getargs(x, 0, 0, _("_mergedefaultdest takes no arguments")) return subset & baseset([destutil.destmerge(repo)]) @predicate('adds(pattern)', safe=True) def adds(repo, subset, x): """Changesets that add a file matching pattern. The pattern without explicit kind like ``glob:`` is expected to be relative to the current directory and match against a file or a directory. """ # i18n: "adds" is a keyword pat = getstring(x, _("adds requires a pattern")) return checkstatus(repo, subset, pat, 1) @predicate('ancestor(*changeset)', safe=True) def ancestor(repo, subset, x): """A greatest common ancestor of the changesets. Accepts 0 or more changesets. Will return empty list when passed no args. Greatest common ancestor of a single changeset is that changeset. """ # i18n: "ancestor" is a keyword l = getlist(x) rl = fullreposet(repo) anc = None # (getset(repo, rl, i) for i in l) generates a list of lists for revs in (getset(repo, rl, i) for i in l): for r in revs: if anc is None: anc = repo[r] else: anc = anc.ancestor(repo[r]) if anc is not None and anc.rev() in subset: return baseset([anc.rev()]) return baseset() def _ancestors(repo, subset, x, followfirst=False): heads = getset(repo, fullreposet(repo), x) if not heads: return baseset() s = _revancestors(repo, heads, followfirst) return subset & s @predicate('ancestors(set)', safe=True) def ancestors(repo, subset, x): """Changesets that are ancestors of a changeset in set. """ return _ancestors(repo, subset, x) @predicate('_firstancestors', safe=True) def _firstancestors(repo, subset, x): # ``_firstancestors(set)`` # Like ``ancestors(set)`` but follows only the first parents. return _ancestors(repo, subset, x, followfirst=True) def ancestorspec(repo, subset, x, n): """``set~n`` Changesets that are the Nth ancestor (first parents only) of a changeset in set. """ try: n = int(n[1]) except (TypeError, ValueError): raise error.ParseError(_("~ expects a number")) ps = set() cl = repo.changelog for r in getset(repo, fullreposet(repo), x): for i in range(n): r = cl.parentrevs(r)[0] ps.add(r) return subset & ps @predicate('author(string)', safe=True) def author(repo, subset, x): """Alias for ``user(string)``. """ # i18n: "author" is a keyword n = encoding.lower(getstring(x, _("author requires a string"))) kind, pattern, matcher = _substringmatcher(n) return subset.filter(lambda x: matcher(encoding.lower(repo[x].user()))) @predicate('bisect(string)', safe=True) def bisect(repo, subset, x): """Changesets marked in the specified bisect status: - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip - ``goods``, ``bads`` : csets topologically good/bad - ``range`` : csets taking part in the bisection - ``pruned`` : csets that are goods, bads or skipped - ``untested`` : csets whose fate is yet unknown - ``ignored`` : csets ignored due to DAG topology - ``current`` : the cset currently being bisected """ # i18n: "bisect" is a keyword status = getstring(x, _("bisect requires a string")).lower() state = set(hbisect.get(repo, status)) return subset & state # Backward-compatibility # - no help entry so that we do not advertise it any more @predicate('bisected', safe=True) def bisected(repo, subset, x): return bisect(repo, subset, x) @predicate('bookmark([name])', safe=True) def bookmark(repo, subset, x): """The named bookmark or all bookmarks. If `name` starts with `re:`, the remainder of the name is treated as a regular expression. To match a bookmark that actually starts with `re:`, use the prefix `literal:`. """ # i18n: "bookmark" is a keyword args = getargs(x, 0, 1, _('bookmark takes one or no arguments')) if args: bm = getstring(args[0], # i18n: "bookmark" is a keyword _('the argument to bookmark must be a string')) kind, pattern, matcher = util.stringmatcher(bm) bms = set() if kind == 'literal': bmrev = repo._bookmarks.get(pattern, None) if not bmrev: raise error.RepoLookupError(_("bookmark '%s' does not exist") % pattern) bms.add(repo[bmrev].rev()) else: matchrevs = set() for name, bmrev in repo._bookmarks.iteritems(): if matcher(name): matchrevs.add(bmrev) if not matchrevs: raise error.RepoLookupError(_("no bookmarks exist" " that match '%s'") % pattern) for bmrev in matchrevs: bms.add(repo[bmrev].rev()) else: bms = set([repo[r].rev() for r in repo._bookmarks.values()]) bms -= set([node.nullrev]) return subset & bms @predicate('branch(string or set)', safe=True) def branch(repo, subset, x): """ All changesets belonging to the given branch or the branches of the given changesets. If `string` starts with `re:`, the remainder of the name is treated as a regular expression. To match a branch that actually starts with `re:`, use the prefix `literal:`. """ getbi = repo.revbranchcache().branchinfo try: b = getstring(x, '') except error.ParseError: # not a string, but another revspec, e.g. tip() pass else: kind, pattern, matcher = util.stringmatcher(b) if kind == 'literal': # note: falls through to the revspec case if no branch with # this name exists and pattern kind is not specified explicitly if pattern in repo.branchmap(): return subset.filter(lambda r: matcher(getbi(r)[0])) if b.startswith('literal:'): raise error.RepoLookupError(_("branch '%s' does not exist") % pattern) else: return subset.filter(lambda r: matcher(getbi(r)[0])) s = getset(repo, fullreposet(repo), x) b = set() for r in s: b.add(getbi(r)[0]) c = s.__contains__ return subset.filter(lambda r: c(r) or getbi(r)[0] in b) @predicate('bumped()', safe=True) def bumped(repo, subset, x): """Mutable changesets marked as successors of public changesets. Only non-public and non-obsolete changesets can be `bumped`. """ # i18n: "bumped" is a keyword getargs(x, 0, 0, _("bumped takes no arguments")) bumped = obsmod.getrevs(repo, 'bumped') return subset & bumped @predicate('bundle()', safe=True) def bundle(repo, subset, x): """Changesets in the bundle. Bundle must be specified by the -R option.""" try: bundlerevs = repo.changelog.bundlerevs except AttributeError: raise error.Abort(_("no bundle provided - specify with -R")) return subset & bundlerevs def checkstatus(repo, subset, pat, field): hasset = matchmod.patkind(pat) == 'set' mcache = [None] def matches(x): c = repo[x] if not mcache[0] or hasset: mcache[0] = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c) m = mcache[0] fname = None if not m.anypats() and len(m.files()) == 1: fname = m.files()[0] if fname is not None: if fname not in c.files(): return False else: for f in c.files(): if m(f): break else: return False files = repo.status(c.p1().node(), c.node())[field] if fname is not None: if fname in files: return True else: for f in files: if m(f): return True return subset.filter(matches) def _children(repo, narrow, parentset): if not parentset: return baseset() cs = set() pr = repo.changelog.parentrevs minrev = parentset.min() for r in narrow: if r <= minrev: continue for p in pr(r): if p in parentset: cs.add(r) # XXX using a set to feed the baseset is wrong. Sets are not ordered. # This does not break because of other fullreposet misbehavior. return baseset(cs) @predicate('children(set)', safe=True) def children(repo, subset, x): """Child changesets of changesets in set. """ s = getset(repo, fullreposet(repo), x) cs = _children(repo, subset, s) return subset & cs @predicate('closed()', safe=True) def closed(repo, subset, x): """Changeset is closed. """ # i18n: "closed" is a keyword getargs(x, 0, 0, _("closed takes no arguments")) return subset.filter(lambda r: repo[r].closesbranch()) @predicate('contains(pattern)') def contains(repo, subset, x): """The revision's manifest contains a file matching pattern (but might not modify it). See :hg:`help patterns` for information about file patterns. The pattern without explicit kind like ``glob:`` is expected to be relative to the current directory and match against a file exactly for efficiency. """ # i18n: "contains" is a keyword pat = getstring(x, _("contains requires a pattern")) def matches(x): if not matchmod.patkind(pat): pats = pathutil.canonpath(repo.root, repo.getcwd(), pat) if pats in repo[x]: return True else: c = repo[x] m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c) for f in c.manifest(): if m(f): return True return False return subset.filter(matches) @predicate('converted([id])', safe=True) def converted(repo, subset, x): """Changesets converted from the given identifier in the old repository if present, or all converted changesets if no identifier is specified. """ # There is exactly no chance of resolving the revision, so do a simple # string compare and hope for the best rev = None # i18n: "converted" is a keyword l = getargs(x, 0, 1, _('converted takes one or no arguments')) if l: # i18n: "converted" is a keyword rev = getstring(l[0], _('converted requires a revision')) def _matchvalue(r): source = repo[r].extra().get('convert_revision', None) return source is not None and (rev is None or source.startswith(rev)) return subset.filter(lambda r: _matchvalue(r)) @predicate('date(interval)', safe=True) def date(repo, subset, x): """Changesets within the interval, see :hg:`help dates`. """ # i18n: "date" is a keyword ds = getstring(x, _("date requires a string")) dm = util.matchdate(ds) return subset.filter(lambda x: dm(repo[x].date()[0])) @predicate('desc(string)', safe=True) def desc(repo, subset, x): """Search commit message for string. The match is case-insensitive. """ # i18n: "desc" is a keyword ds = encoding.lower(getstring(x, _("desc requires a string"))) def matches(x): c = repo[x] return ds in encoding.lower(c.description()) return subset.filter(matches) def _descendants(repo, subset, x, followfirst=False): roots = getset(repo, fullreposet(repo), x) if not roots: return baseset() s = _revdescendants(repo, roots, followfirst) # Both sets need to be ascending in order to lazily return the union # in the correct order. base = subset & roots desc = subset & s result = base + desc if subset.isascending(): result.sort() elif subset.isdescending(): result.sort(reverse=True) else: result = subset & result return result @predicate('descendants(set)', safe=True) def descendants(repo, subset, x): """Changesets which are descendants of changesets in set. """ return _descendants(repo, subset, x) @predicate('_firstdescendants', safe=True) def _firstdescendants(repo, subset, x): # ``_firstdescendants(set)`` # Like ``descendants(set)`` but follows only the first parents. return _descendants(repo, subset, x, followfirst=True) @predicate('destination([set])', safe=True) def destination(repo, subset, x): """Changesets that were created by a graft, transplant or rebase operation, with the given revisions specified as the source. Omitting the optional set is the same as passing all(). """ if x is not None: sources = getset(repo, fullreposet(repo), x) else: sources = fullreposet(repo) dests = set() # subset contains all of the possible destinations that can be returned, so # iterate over them and see if their source(s) were provided in the arg set. # Even if the immediate src of r is not in the arg set, src's source (or # further back) may be. Scanning back further than the immediate src allows # transitive transplants and rebases to yield the same results as transitive # grafts. for r in subset: src = _getrevsource(repo, r) lineage = None while src is not None: if lineage is None: lineage = list() lineage.append(r) # The visited lineage is a match if the current source is in the arg # set. Since every candidate dest is visited by way of iterating # subset, any dests further back in the lineage will be tested by a # different iteration over subset. Likewise, if the src was already # selected, the current lineage can be selected without going back # further. if src in sources or src in dests: dests.update(lineage) break r = src src = _getrevsource(repo, r) return subset.filter(dests.__contains__) @predicate('divergent()', safe=True) def divergent(repo, subset, x): """ Final successors of changesets with an alternative set of final successors. """ # i18n: "divergent" is a keyword getargs(x, 0, 0, _("divergent takes no arguments")) divergent = obsmod.getrevs(repo, 'divergent') return subset & divergent @predicate('extinct()', safe=True) def extinct(repo, subset, x): """Obsolete changesets with obsolete descendants only. """ # i18n: "extinct" is a keyword getargs(x, 0, 0, _("extinct takes no arguments")) extincts = obsmod.getrevs(repo, 'extinct') return subset & extincts @predicate('extra(label, [value])', safe=True) def extra(repo, subset, x): """Changesets with the given label in the extra metadata, with the given optional value. If `value` starts with `re:`, the remainder of the value is treated as a regular expression. To match a value that actually starts with `re:`, use the prefix `literal:`. """ args = getargsdict(x, 'extra', 'label value') if 'label' not in args: # i18n: "extra" is a keyword raise error.ParseError(_('extra takes at least 1 argument')) # i18n: "extra" is a keyword label = getstring(args['label'], _('first argument to extra must be ' 'a string')) value = None if 'value' in args: # i18n: "extra" is a keyword value = getstring(args['value'], _('second argument to extra must be ' 'a string')) kind, value, matcher = util.stringmatcher(value) def _matchvalue(r): extra = repo[r].extra() return label in extra and (value is None or matcher(extra[label])) return subset.filter(lambda r: _matchvalue(r)) @predicate('filelog(pattern)', safe=True) def filelog(repo, subset, x): """Changesets connected to the specified filelog. For performance reasons, visits only revisions mentioned in the file-level filelog, rather than filtering through all changesets (much faster, but doesn't include deletes or duplicate changes). For a slower, more accurate result, use ``file()``. The pattern without explicit kind like ``glob:`` is expected to be relative to the current directory and match against a file exactly for efficiency. If some linkrev points to revisions filtered by the current repoview, we'll work around it to return a non-filtered value. """ # i18n: "filelog" is a keyword pat = getstring(x, _("filelog requires a pattern")) s = set() cl = repo.changelog if not matchmod.patkind(pat): f = pathutil.canonpath(repo.root, repo.getcwd(), pat) files = [f] else: m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=repo[None]) files = (f for f in repo[None] if m(f)) for f in files: fl = repo.file(f) known = {} scanpos = 0 for fr in list(fl): fn = fl.node(fr) if fn in known: s.add(known[fn]) continue lr = fl.linkrev(fr) if lr in cl: s.add(lr) elif scanpos is not None: # lowest matching changeset is filtered, scan further # ahead in changelog start = max(lr, scanpos) + 1 scanpos = None for r in cl.revs(start): # minimize parsing of non-matching entries if f in cl.revision(r) and f in cl.readfiles(r): try: # try to use manifest delta fastpath n = repo[r].filenode(f) if n not in known: if n == fn: s.add(r) scanpos = r break else: known[n] = r except error.ManifestLookupError: # deletion in changelog continue return subset & s @predicate('first(set, [n])', safe=True) def first(repo, subset, x): """An alias for limit(). """ return limit(repo, subset, x) def _follow(repo, subset, x, name, followfirst=False): l = getargs(x, 0, 1, _("%s takes no arguments or a pattern") % name) c = repo['.'] if l: x = getstring(l[0], _("%s expected a pattern") % name) matcher = matchmod.match(repo.root, repo.getcwd(), [x], ctx=repo[None], default='path') s = set() for fname in c: if matcher(fname): fctx = c[fname] s = s.union(set(c.rev() for c in fctx.ancestors(followfirst))) # include the revision responsible for the most recent version s.add(fctx.introrev()) else: s = _revancestors(repo, baseset([c.rev()]), followfirst) return subset & s @predicate('follow([pattern])', safe=True) def follow(repo, subset, x): """ An alias for ``::.`` (ancestors of the working directory's first parent). If pattern is specified, the histories of files matching given pattern is followed, including copies. """ return _follow(repo, subset, x, 'follow') @predicate('_followfirst', safe=True) def _followfirst(repo, subset, x): # ``followfirst([pattern])`` # Like ``follow([pattern])`` but follows only the first parent of # every revisions or files revisions. return _follow(repo, subset, x, '_followfirst', followfirst=True) @predicate('all()', safe=True) def getall(repo, subset, x): """All changesets, the same as ``0:tip``. """ # i18n: "all" is a keyword getargs(x, 0, 0, _("all takes no arguments")) return subset & spanset(repo) # drop "null" if any @predicate('grep(regex)') def grep(repo, subset, x): """Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')`` to ensure special escape characters are handled correctly. Unlike ``keyword(string)``, the match is case-sensitive. """ try: # i18n: "grep" is a keyword gr = re.compile(getstring(x, _("grep requires a string"))) except re.error as e: raise error.ParseError(_('invalid match pattern: %s') % e) def matches(x): c = repo[x] for e in c.files() + [c.user(), c.description()]: if gr.search(e): return True return False return subset.filter(matches) @predicate('_matchfiles', safe=True) def _matchfiles(repo, subset, x): # _matchfiles takes a revset list of prefixed arguments: # # [p:foo, i:bar, x:baz] # # builds a match object from them and filters subset. Allowed # prefixes are 'p:' for regular patterns, 'i:' for include # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass # a revision identifier, or the empty string to reference the # working directory, from which the match object is # initialized. Use 'd:' to set the default matching mode, default # to 'glob'. At most one 'r:' and 'd:' argument can be passed. # i18n: "_matchfiles" is a keyword l = getargs(x, 1, -1, _("_matchfiles requires at least one argument")) pats, inc, exc = [], [], [] rev, default = None, None for arg in l: # i18n: "_matchfiles" is a keyword s = getstring(arg, _("_matchfiles requires string arguments")) prefix, value = s[:2], s[2:] if prefix == 'p:': pats.append(value) elif prefix == 'i:': inc.append(value) elif prefix == 'x:': exc.append(value) elif prefix == 'r:': if rev is not None: # i18n: "_matchfiles" is a keyword raise error.ParseError(_('_matchfiles expected at most one ' 'revision')) if value != '': # empty means working directory; leave rev as None rev = value elif prefix == 'd:': if default is not None: # i18n: "_matchfiles" is a keyword raise error.ParseError(_('_matchfiles expected at most one ' 'default mode')) default = value else: # i18n: "_matchfiles" is a keyword raise error.ParseError(_('invalid _matchfiles prefix: %s') % prefix) if not default: default = 'glob' m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc, exclude=exc, ctx=repo[rev], default=default) # This directly read the changelog data as creating changectx for all # revisions is quite expensive. getfiles = repo.changelog.readfiles wdirrev = node.wdirrev def matches(x): if x == wdirrev: files = repo[x].files() else: files = getfiles(x) for f in files: if m(f): return True return False return subset.filter(matches) @predicate('file(pattern)', safe=True) def hasfile(repo, subset, x): """Changesets affecting files matched by pattern. For a faster but less accurate result, consider using ``filelog()`` instead. This predicate uses ``glob:`` as the default kind of pattern. """ # i18n: "file" is a keyword pat = getstring(x, _("file requires a pattern")) return _matchfiles(repo, subset, ('string', 'p:' + pat)) @predicate('head()', safe=True) def head(repo, subset, x): """Changeset is a named branch head. """ # i18n: "head" is a keyword getargs(x, 0, 0, _("head takes no arguments")) hs = set() cl = repo.changelog for b, ls in repo.branchmap().iteritems(): hs.update(cl.rev(h) for h in ls) # XXX using a set to feed the baseset is wrong. Sets are not ordered. # This does not break because of other fullreposet misbehavior. # XXX We should combine with subset first: 'subset & baseset(...)'. This is # necessary to ensure we preserve the order in subset. return baseset(hs) & subset @predicate('heads(set)', safe=True) def heads(repo, subset, x): """Members of set with no children in set. """ s = getset(repo, subset, x) ps = parents(repo, subset, x) return s - ps @predicate('hidden()', safe=True) def hidden(repo, subset, x): """Hidden changesets. """ # i18n: "hidden" is a keyword getargs(x, 0, 0, _("hidden takes no arguments")) hiddenrevs = repoview.filterrevs(repo, 'visible') return subset & hiddenrevs @predicate('keyword(string)', safe=True) def keyword(repo, subset, x): """Search commit message, user name, and names of changed files for string. The match is case-insensitive. """ # i18n: "keyword" is a keyword kw = encoding.lower(getstring(x, _("keyword requires a string"))) def matches(r): c = repo[r] return any(kw in encoding.lower(t) for t in c.files() + [c.user(), c.description()]) return subset.filter(matches) @predicate('limit(set[, n[, offset]])', safe=True) def limit(repo, subset, x): """First n members of set, defaulting to 1, starting from offset. """ args = getargsdict(x, 'limit', 'set n offset') if 'set' not in args: # i18n: "limit" is a keyword raise error.ParseError(_("limit requires one to three arguments")) try: lim, ofs = 1, 0 if 'n' in args: # i18n: "limit" is a keyword lim = int(getstring(args['n'], _("limit requires a number"))) if 'offset' in args: # i18n: "limit" is a keyword ofs = int(getstring(args['offset'], _("limit requires a number"))) if ofs < 0: raise error.ParseError(_("negative offset")) except (TypeError, ValueError): # i18n: "limit" is a keyword raise error.ParseError(_("limit expects a number")) os = getset(repo, fullreposet(repo), args['set']) result = [] it = iter(os) for x in xrange(ofs): y = next(it, None) if y is None: break for x in xrange(lim): y = next(it, None) if y is None: break elif y in subset: result.append(y) return baseset(result) @predicate('last(set, [n])', safe=True) def last(repo, subset, x): """Last n members of set, defaulting to 1. """ # i18n: "last" is a keyword l = getargs(x, 1, 2, _("last requires one or two arguments")) try: lim = 1 if len(l) == 2: # i18n: "last" is a keyword lim = int(getstring(l[1], _("last requires a number"))) except (TypeError, ValueError): # i18n: "last" is a keyword raise error.ParseError(_("last expects a number")) os = getset(repo, fullreposet(repo), l[0]) os.reverse() result = [] it = iter(os) for x in xrange(lim): y = next(it, None) if y is None: break elif y in subset: result.append(y) return baseset(result) @predicate('max(set)', safe=True) def maxrev(repo, subset, x): """Changeset with highest revision number in set. """ os = getset(repo, fullreposet(repo), x) try: m = os.max() if m in subset: return baseset([m]) except ValueError: # os.max() throws a ValueError when the collection is empty. # Same as python's max(). pass return baseset() @predicate('merge()', safe=True) def merge(repo, subset, x): """Changeset is a merge changeset. """ # i18n: "merge" is a keyword getargs(x, 0, 0, _("merge takes no arguments")) cl = repo.changelog return subset.filter(lambda r: cl.parentrevs(r)[1] != -1) @predicate('branchpoint()', safe=True) def branchpoint(repo, subset, x): """Changesets with more than one child. """ # i18n: "branchpoint" is a keyword getargs(x, 0, 0, _("branchpoint takes no arguments")) cl = repo.changelog if not subset: return baseset() # XXX this should be 'parentset.min()' assuming 'parentset' is a smartset # (and if it is not, it should.) baserev = min(subset) parentscount = [0]*(len(repo) - baserev) for r in cl.revs(start=baserev + 1): for p in cl.parentrevs(r): if p >= baserev: parentscount[p - baserev] += 1 return subset.filter(lambda r: parentscount[r - baserev] > 1) @predicate('min(set)', safe=True) def minrev(repo, subset, x): """Changeset with lowest revision number in set. """ os = getset(repo, fullreposet(repo), x) try: m = os.min() if m in subset: return baseset([m]) except ValueError: # os.min() throws a ValueError when the collection is empty. # Same as python's min(). pass return baseset() @predicate('modifies(pattern)', safe=True) def modifies(repo, subset, x): """Changesets modifying files matched by pattern. The pattern without explicit kind like ``glob:`` is expected to be relative to the current directory and match against a file or a directory. """ # i18n: "modifies" is a keyword pat = getstring(x, _("modifies requires a pattern")) return checkstatus(repo, subset, pat, 0) @predicate('named(namespace)') def named(repo, subset, x): """The changesets in a given namespace. If `namespace` starts with `re:`, the remainder of the string is treated as a regular expression. To match a namespace that actually starts with `re:`, use the prefix `literal:`. """ # i18n: "named" is a keyword args = getargs(x, 1, 1, _('named requires a namespace argument')) ns = getstring(args[0], # i18n: "named" is a keyword _('the argument to named must be a string')) kind, pattern, matcher = util.stringmatcher(ns) namespaces = set() if kind == 'literal': if pattern not in repo.names: raise error.RepoLookupError(_("namespace '%s' does not exist") % ns) namespaces.add(repo.names[pattern]) else: for name, ns in repo.names.iteritems(): if matcher(name): namespaces.add(ns) if not namespaces: raise error.RepoLookupError(_("no namespace exists" " that match '%s'") % pattern) names = set() for ns in namespaces: for name in ns.listnames(repo): if name not in ns.deprecated: names.update(repo[n].rev() for n in ns.nodes(repo, name)) names -= set([node.nullrev]) return subset & names @predicate('id(string)', safe=True) def node_(repo, subset, x): """Revision non-ambiguously specified by the given hex string prefix. """ # i18n: "id" is a keyword l = getargs(x, 1, 1, _("id requires one argument")) # i18n: "id" is a keyword n = getstring(l[0], _("id requires a string")) if len(n) == 40: try: rn = repo.changelog.rev(node.bin(n)) except (LookupError, TypeError): rn = None else: rn = None pm = repo.changelog._partialmatch(n) if pm is not None: rn = repo.changelog.rev(pm) if rn is None: return baseset() result = baseset([rn]) return result & subset @predicate('obsolete()', safe=True) def obsolete(repo, subset, x): """Mutable changeset with a newer version.""" # i18n: "obsolete" is a keyword getargs(x, 0, 0, _("obsolete takes no arguments")) obsoletes = obsmod.getrevs(repo, 'obsolete') return subset & obsoletes @predicate('only(set, [set])', safe=True) def only(repo, subset, x): """Changesets that are ancestors of the first set that are not ancestors of any other head in the repo. If a second set is specified, the result is ancestors of the first set that are not ancestors of the second set (i.e. :: - ::). """ cl = repo.changelog # i18n: "only" is a keyword args = getargs(x, 1, 2, _('only takes one or two arguments')) include = getset(repo, fullreposet(repo), args[0]) if len(args) == 1: if not include: return baseset() descendants = set(_revdescendants(repo, include, False)) exclude = [rev for rev in cl.headrevs() if not rev in descendants and not rev in include] else: exclude = getset(repo, fullreposet(repo), args[1]) results = set(cl.findmissingrevs(common=exclude, heads=include)) # XXX we should turn this into a baseset instead of a set, smartset may do # some optimisations from the fact this is a baseset. return subset & results @predicate('origin([set])', safe=True) def origin(repo, subset, x): """ Changesets that were specified as a source for the grafts, transplants or rebases that created the given revisions. Omitting the optional set is the same as passing all(). If a changeset created by these operations is itself specified as a source for one of these operations, only the source changeset for the first operation is selected. """ if x is not None: dests = getset(repo, fullreposet(repo), x) else: dests = fullreposet(repo) def _firstsrc(rev): src = _getrevsource(repo, rev) if src is None: return None while True: prev = _getrevsource(repo, src) if prev is None: return src src = prev o = set([_firstsrc(r) for r in dests]) o -= set([None]) # XXX we should turn this into a baseset instead of a set, smartset may do # some optimisations from the fact this is a baseset. return subset & o @predicate('outgoing([path])', safe=True) def outgoing(repo, subset, x): """Changesets not found in the specified destination repository, or the default push location. """ # Avoid cycles. from . import ( discovery, hg, ) # i18n: "outgoing" is a keyword l = getargs(x, 0, 1, _("outgoing takes one or no arguments")) # i18n: "outgoing" is a keyword dest = l and getstring(l[0], _("outgoing requires a repository path")) or '' dest = repo.ui.expandpath(dest or 'default-push', dest or 'default') dest, branches = hg.parseurl(dest) revs, checkout = hg.addbranchrevs(repo, repo, branches, []) if revs: revs = [repo.lookup(rev) for rev in revs] other = hg.peer(repo, {}, dest) repo.ui.pushbuffer() outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs) repo.ui.popbuffer() cl = repo.changelog o = set([cl.rev(r) for r in outgoing.missing]) return subset & o @predicate('p1([set])', safe=True) def p1(repo, subset, x): """First parent of changesets in set, or the working directory. """ if x is None: p = repo[x].p1().rev() if p >= 0: return subset & baseset([p]) return baseset() ps = set() cl = repo.changelog for r in getset(repo, fullreposet(repo), x): ps.add(cl.parentrevs(r)[0]) ps -= set([node.nullrev]) # XXX we should turn this into a baseset instead of a set, smartset may do # some optimisations from the fact this is a baseset. return subset & ps @predicate('p2([set])', safe=True) def p2(repo, subset, x): """Second parent of changesets in set, or the working directory. """ if x is None: ps = repo[x].parents() try: p = ps[1].rev() if p >= 0: return subset & baseset([p]) return baseset() except IndexError: return baseset() ps = set() cl = repo.changelog for r in getset(repo, fullreposet(repo), x): ps.add(cl.parentrevs(r)[1]) ps -= set([node.nullrev]) # XXX we should turn this into a baseset instead of a set, smartset may do # some optimisations from the fact this is a baseset. return subset & ps @predicate('parents([set])', safe=True) def parents(repo, subset, x): """ The set of all parents for all changesets in set, or the working directory. """ if x is None: ps = set(p.rev() for p in repo[x].parents()) else: ps = set() cl = repo.changelog up = ps.update parentrevs = cl.parentrevs for r in getset(repo, fullreposet(repo), x): if r == node.wdirrev: up(p.rev() for p in repo[r].parents()) else: up(parentrevs(r)) ps -= set([node.nullrev]) return subset & ps def _phase(repo, subset, target): """helper to select all rev in phase """ repo._phasecache.loadphaserevs(repo) # ensure phase's sets are loaded if repo._phasecache._phasesets: s = repo._phasecache._phasesets[target] - repo.changelog.filteredrevs s = baseset(s) s.sort() # set are non ordered, so we enforce ascending return subset & s else: phase = repo._phasecache.phase condition = lambda r: phase(repo, r) == target return subset.filter(condition, cache=False) @predicate('draft()', safe=True) def draft(repo, subset, x): """Changeset in draft phase.""" # i18n: "draft" is a keyword getargs(x, 0, 0, _("draft takes no arguments")) target = phases.draft return _phase(repo, subset, target) @predicate('secret()', safe=True) def secret(repo, subset, x): """Changeset in secret phase.""" # i18n: "secret" is a keyword getargs(x, 0, 0, _("secret takes no arguments")) target = phases.secret return _phase(repo, subset, target) def parentspec(repo, subset, x, n): """``set^0`` The set. ``set^1`` (or ``set^``), ``set^2`` First or second parent, respectively, of all changesets in set. """ try: n = int(n[1]) if n not in (0, 1, 2): raise ValueError except (TypeError, ValueError): raise error.ParseError(_("^ expects a number 0, 1, or 2")) ps = set() cl = repo.changelog for r in getset(repo, fullreposet(repo), x): if n == 0: ps.add(r) elif n == 1: ps.add(cl.parentrevs(r)[0]) elif n == 2: parents = cl.parentrevs(r) if len(parents) > 1: ps.add(parents[1]) return subset & ps @predicate('present(set)', safe=True) def present(repo, subset, x): """An empty set, if any revision in set isn't found; otherwise, all revisions in set. If any of specified revisions is not present in the local repository, the query is normally aborted. But this predicate allows the query to continue even in such cases. """ try: return getset(repo, subset, x) except error.RepoLookupError: return baseset() # for internal use @predicate('_notpublic', safe=True) def _notpublic(repo, subset, x): getargs(x, 0, 0, "_notpublic takes no arguments") repo._phasecache.loadphaserevs(repo) # ensure phase's sets are loaded if repo._phasecache._phasesets: s = set() for u in repo._phasecache._phasesets[1:]: s.update(u) s = baseset(s - repo.changelog.filteredrevs) s.sort() return subset & s else: phase = repo._phasecache.phase target = phases.public condition = lambda r: phase(repo, r) != target return subset.filter(condition, cache=False) @predicate('public()', safe=True) def public(repo, subset, x): """Changeset in public phase.""" # i18n: "public" is a keyword getargs(x, 0, 0, _("public takes no arguments")) phase = repo._phasecache.phase target = phases.public condition = lambda r: phase(repo, r) == target return subset.filter(condition, cache=False) @predicate('remote([id [,path]])', safe=True) def remote(repo, subset, x): """Local revision that corresponds to the given identifier in a remote repository, if present. Here, the '.' identifier is a synonym for the current local branch. """ from . import hg # avoid start-up nasties # i18n: "remote" is a keyword l = getargs(x, 0, 2, _("remote takes zero, one, or two arguments")) q = '.' if len(l) > 0: # i18n: "remote" is a keyword q = getstring(l[0], _("remote requires a string id")) if q == '.': q = repo['.'].branch() dest = '' if len(l) > 1: # i18n: "remote" is a keyword dest = getstring(l[1], _("remote requires a repository path")) dest = repo.ui.expandpath(dest or 'default') dest, branches = hg.parseurl(dest) revs, checkout = hg.addbranchrevs(repo, repo, branches, []) if revs: revs = [repo.lookup(rev) for rev in revs] other = hg.peer(repo, {}, dest) n = other.lookup(q) if n in repo: r = repo[n].rev() if r in subset: return baseset([r]) return baseset() @predicate('removes(pattern)', safe=True) def removes(repo, subset, x): """Changesets which remove files matching pattern. The pattern without explicit kind like ``glob:`` is expected to be relative to the current directory and match against a file or a directory. """ # i18n: "removes" is a keyword pat = getstring(x, _("removes requires a pattern")) return checkstatus(repo, subset, pat, 2) @predicate('rev(number)', safe=True) def rev(repo, subset, x): """Revision with the given numeric identifier. """ # i18n: "rev" is a keyword l = getargs(x, 1, 1, _("rev requires one argument")) try: # i18n: "rev" is a keyword l = int(getstring(l[0], _("rev requires a number"))) except (TypeError, ValueError): # i18n: "rev" is a keyword raise error.ParseError(_("rev expects a number")) if l not in repo.changelog and l != node.nullrev: return baseset() return subset & baseset([l]) @predicate('matching(revision [, field])', safe=True) def matching(repo, subset, x): """Changesets in which a given set of fields match the set of fields in the selected revision or set. To match more than one field pass the list of fields to match separated by spaces (e.g. ``author description``). Valid fields are most regular revision fields and some special fields. Regular revision fields are ``description``, ``author``, ``branch``, ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user`` and ``diff``. Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the contents of the revision. Two revisions matching their ``diff`` will also match their ``files``. Special fields are ``summary`` and ``metadata``: ``summary`` matches the first line of the description. ``metadata`` is equivalent to matching ``description user date`` (i.e. it matches the main metadata fields). ``metadata`` is the default field which is used when no fields are specified. You can match more than one field at a time. """ # i18n: "matching" is a keyword l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments")) revs = getset(repo, fullreposet(repo), l[0]) fieldlist = ['metadata'] if len(l) > 1: fieldlist = getstring(l[1], # i18n: "matching" is a keyword _("matching requires a string " "as its second argument")).split() # Make sure that there are no repeated fields, # expand the 'special' 'metadata' field type # and check the 'files' whenever we check the 'diff' fields = [] for field in fieldlist: if field == 'metadata': fields += ['user', 'description', 'date'] elif field == 'diff': # a revision matching the diff must also match the files # since matching the diff is very costly, make sure to # also match the files first fields += ['files', 'diff'] else: if field == 'author': field = 'user' fields.append(field) fields = set(fields) if 'summary' in fields and 'description' in fields: # If a revision matches its description it also matches its summary fields.discard('summary') # We may want to match more than one field # Not all fields take the same amount of time to be matched # Sort the selected fields in order of increasing matching cost fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary', 'files', 'description', 'substate', 'diff'] def fieldkeyfunc(f): try: return fieldorder.index(f) except ValueError: # assume an unknown field is very costly return len(fieldorder) fields = list(fields) fields.sort(key=fieldkeyfunc) # Each field will be matched with its own "getfield" function # which will be added to the getfieldfuncs array of functions getfieldfuncs = [] _funcs = { 'user': lambda r: repo[r].user(), 'branch': lambda r: repo[r].branch(), 'date': lambda r: repo[r].date(), 'description': lambda r: repo[r].description(), 'files': lambda r: repo[r].files(), 'parents': lambda r: repo[r].parents(), 'phase': lambda r: repo[r].phase(), 'substate': lambda r: repo[r].substate, 'summary': lambda r: repo[r].description().splitlines()[0], 'diff': lambda r: list(repo[r].diff(git=True),) } for info in fields: getfield = _funcs.get(info, None) if getfield is None: raise error.ParseError( # i18n: "matching" is a keyword _("unexpected field name passed to matching: %s") % info) getfieldfuncs.append(getfield) # convert the getfield array of functions into a "getinfo" function # which returns an array of field values (or a single value if there # is only one field to match) getinfo = lambda r: [f(r) for f in getfieldfuncs] def matches(x): for rev in revs: target = getinfo(rev) match = True for n, f in enumerate(getfieldfuncs): if target[n] != f(x): match = False if match: return True return False return subset.filter(matches) @predicate('reverse(set)', safe=True) def reverse(repo, subset, x): """Reverse order of set. """ l = getset(repo, subset, x) l.reverse() return l @predicate('roots(set)', safe=True) def roots(repo, subset, x): """Changesets in set with no parent changeset in set. """ s = getset(repo, fullreposet(repo), x) parents = repo.changelog.parentrevs def filter(r): for p in parents(r): if 0 <= p and p in s: return False return True return subset & s.filter(filter) @predicate('sort(set[, [-]key...])', safe=True) def sort(repo, subset, x): """Sort set by keys. The default sort order is ascending, specify a key as ``-key`` to sort in descending order. The keys can be: - ``rev`` for the revision number, - ``branch`` for the branch name, - ``desc`` for the commit message (description), - ``user`` for user name (``author`` can be used as an alias), - ``date`` for the commit date """ # i18n: "sort" is a keyword l = getargs(x, 1, 2, _("sort requires one or two arguments")) keys = "rev" if len(l) == 2: # i18n: "sort" is a keyword keys = getstring(l[1], _("sort spec must be a string")) s = l[0] keys = keys.split() l = [] def invert(s): return "".join(chr(255 - ord(c)) for c in s) revs = getset(repo, subset, s) if keys == ["rev"]: revs.sort() return revs elif keys == ["-rev"]: revs.sort(reverse=True) return revs for r in revs: c = repo[r] e = [] for k in keys: if k == 'rev': e.append(r) elif k == '-rev': e.append(-r) elif k == 'branch': e.append(c.branch()) elif k == '-branch': e.append(invert(c.branch())) elif k == 'desc': e.append(c.description()) elif k == '-desc': e.append(invert(c.description())) elif k in 'user author': e.append(c.user()) elif k in '-user -author': e.append(invert(c.user())) elif k == 'date': e.append(c.date()[0]) elif k == '-date': e.append(-c.date()[0]) else: raise error.ParseError(_("unknown sort key %r") % k) e.append(r) l.append(e) l.sort() return baseset([e[-1] for e in l]) @predicate('subrepo([pattern])') def subrepo(repo, subset, x): """Changesets that add, modify or remove the given subrepo. If no subrepo pattern is named, any subrepo changes are returned. """ # i18n: "subrepo" is a keyword args = getargs(x, 0, 1, _('subrepo takes at most one argument')) if len(args) != 0: pat = getstring(args[0], _("subrepo requires a pattern")) m = matchmod.exact(repo.root, repo.root, ['.hgsubstate']) def submatches(names): k, p, m = util.stringmatcher(pat) for name in names: if m(name): yield name def matches(x): c = repo[x] s = repo.status(c.p1().node(), c.node(), match=m) if len(args) == 0: return s.added or s.modified or s.removed if s.added: return any(submatches(c.substate.keys())) if s.modified: subs = set(c.p1().substate.keys()) subs.update(c.substate.keys()) for path in submatches(subs): if c.p1().substate.get(path) != c.substate.get(path): return True if s.removed: return any(submatches(c.p1().substate.keys())) return False return subset.filter(matches) def _substringmatcher(pattern): kind, pattern, matcher = util.stringmatcher(pattern) if kind == 'literal': matcher = lambda s: pattern in s return kind, pattern, matcher @predicate('tag([name])', safe=True) def tag(repo, subset, x): """The specified tag by name, or all tagged revisions if no name is given. If `name` starts with `re:`, the remainder of the name is treated as a regular expression. To match a tag that actually starts with `re:`, use the prefix `literal:`. """ # i18n: "tag" is a keyword args = getargs(x, 0, 1, _("tag takes one or no arguments")) cl = repo.changelog if args: pattern = getstring(args[0], # i18n: "tag" is a keyword _('the argument to tag must be a string')) kind, pattern, matcher = util.stringmatcher(pattern) if kind == 'literal': # avoid resolving all tags tn = repo._tagscache.tags.get(pattern, None) if tn is None: raise error.RepoLookupError(_("tag '%s' does not exist") % pattern) s = set([repo[tn].rev()]) else: s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)]) else: s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip']) return subset & s @predicate('tagged', safe=True) def tagged(repo, subset, x): return tag(repo, subset, x) @predicate('unstable()', safe=True) def unstable(repo, subset, x): """Non-obsolete changesets with obsolete ancestors. """ # i18n: "unstable" is a keyword getargs(x, 0, 0, _("unstable takes no arguments")) unstables = obsmod.getrevs(repo, 'unstable') return subset & unstables @predicate('user(string)', safe=True) def user(repo, subset, x): """User name contains string. The match is case-insensitive. If `string` starts with `re:`, the remainder of the string is treated as a regular expression. To match a user that actually contains `re:`, use the prefix `literal:`. """ return author(repo, subset, x) # experimental @predicate('wdir', safe=True) def wdir(repo, subset, x): # i18n: "wdir" is a keyword getargs(x, 0, 0, _("wdir takes no arguments")) if node.wdirrev in subset or isinstance(subset, fullreposet): return baseset([node.wdirrev]) return baseset() # for internal use @predicate('_list', safe=True) def _list(repo, subset, x): s = getstring(x, "internal error") if not s: return baseset() # remove duplicates here. it's difficult for caller to deduplicate sets # because different symbols can point to the same rev. cl = repo.changelog ls = [] seen = set() for t in s.split('\0'): try: # fast path for integer revision r = int(t) if str(r) != t or r not in cl: raise ValueError revs = [r] except ValueError: revs = stringset(repo, subset, t) for r in revs: if r in seen: continue if (r in subset or r == node.nullrev and isinstance(subset, fullreposet)): ls.append(r) seen.add(r) return baseset(ls) # for internal use @predicate('_intlist', safe=True) def _intlist(repo, subset, x): s = getstring(x, "internal error") if not s: return baseset() ls = [int(r) for r in s.split('\0')] s = subset return baseset([r for r in ls if r in s]) # for internal use @predicate('_hexlist', safe=True) def _hexlist(repo, subset, x): s = getstring(x, "internal error") if not s: return baseset() cl = repo.changelog ls = [cl.rev(node.bin(r)) for r in s.split('\0')] s = subset return baseset([r for r in ls if r in s]) methods = { "range": rangeset, "dagrange": dagrange, "string": stringset, "symbol": stringset, "and": andset, "or": orset, "not": notset, "list": listset, "keyvalue": keyvaluepair, "func": func, "ancestor": ancestorspec, "parent": parentspec, "parentpost": p1, } def optimize(x, small): if x is None: return 0, x smallbonus = 1 if small: smallbonus = .5 op = x[0] if op == 'minus': return optimize(('and', x[1], ('not', x[2])), small) elif op == 'only': return optimize(('func', ('symbol', 'only'), ('list', x[1], x[2])), small) elif op == 'onlypost': return optimize(('func', ('symbol', 'only'), x[1]), small) elif op == 'dagrangepre': return optimize(('func', ('symbol', 'ancestors'), x[1]), small) elif op == 'dagrangepost': return optimize(('func', ('symbol', 'descendants'), x[1]), small) elif op == 'rangeall': return optimize(('range', ('string', '0'), ('string', 'tip')), small) elif op == 'rangepre': return optimize(('range', ('string', '0'), x[1]), small) elif op == 'rangepost': return optimize(('range', x[1], ('string', 'tip')), small) elif op == 'negate': return optimize(('string', '-' + getstring(x[1], _("can't negate that"))), small) elif op in 'string symbol negate': return smallbonus, x # single revisions are small elif op == 'and': wa, ta = optimize(x[1], True) wb, tb = optimize(x[2], True) # (::x and not ::y)/(not ::y and ::x) have a fast path def isonly(revs, bases): return ( revs is not None and revs[0] == 'func' and getstring(revs[1], _('not a symbol')) == 'ancestors' and bases is not None and bases[0] == 'not' and bases[1][0] == 'func' and getstring(bases[1][1], _('not a symbol')) == 'ancestors') w = min(wa, wb) if isonly(ta, tb): return w, ('func', ('symbol', 'only'), ('list', ta[2], tb[1][2])) if isonly(tb, ta): return w, ('func', ('symbol', 'only'), ('list', tb[2], ta[1][2])) if wa > wb: return w, (op, tb, ta) return w, (op, ta, tb) elif op == 'or': # fast path for machine-generated expression, that is likely to have # lots of trivial revisions: 'a + b + c()' to '_list(a b) + c()' ws, ts, ss = [], [], [] def flushss(): if not ss: return if len(ss) == 1: w, t = ss[0] else: s = '\0'.join(t[1] for w, t in ss) y = ('func', ('symbol', '_list'), ('string', s)) w, t = optimize(y, False) ws.append(w) ts.append(t) del ss[:] for y in x[1:]: w, t = optimize(y, False) if t is not None and (t[0] == 'string' or t[0] == 'symbol'): ss.append((w, t)) continue flushss() ws.append(w) ts.append(t) flushss() if len(ts) == 1: return ws[0], ts[0] # 'or' operation is fully optimized out # we can't reorder trees by weight because it would change the order. # ("sort(a + b)" == "sort(b + a)", but "a + b" != "b + a") # ts = tuple(t for w, t in sorted(zip(ws, ts), key=lambda wt: wt[0])) return max(ws), (op,) + tuple(ts) elif op == 'not': # Optimize not public() to _notpublic() because we have a fast version if x[1] == ('func', ('symbol', 'public'), None): newsym = ('func', ('symbol', '_notpublic'), None) o = optimize(newsym, not small) return o[0], o[1] else: o = optimize(x[1], not small) return o[0], (op, o[1]) elif op == 'parentpost': o = optimize(x[1], small) return o[0], (op, o[1]) elif op == 'group': return optimize(x[1], small) elif op in 'dagrange range parent ancestorspec': if op == 'parent': # x^:y means (x^) : y, not x ^ (:y) post = ('parentpost', x[1]) if x[2][0] == 'dagrangepre': return optimize(('dagrange', post, x[2][1]), small) elif x[2][0] == 'rangepre': return optimize(('range', post, x[2][1]), small) wa, ta = optimize(x[1], small) wb, tb = optimize(x[2], small) return wa + wb, (op, ta, tb) elif op == 'list': ws, ts = zip(*(optimize(y, small) for y in x[1:])) return sum(ws), (op,) + ts elif op == 'func': f = getstring(x[1], _("not a symbol")) wa, ta = optimize(x[2], small) if f in ("author branch closed date desc file grep keyword " "outgoing user"): w = 10 # slow elif f in "modifies adds removes": w = 30 # slower elif f == "contains": w = 100 # very slow elif f == "ancestor": w = 1 * smallbonus elif f in "reverse limit first _intlist": w = 0 elif f in "sort": w = 10 # assume most sorts look at changelog else: w = 1 return w + wa, (op, x[1], ta) return 1, x _aliasarg = ('func', ('symbol', '_aliasarg')) def _getaliasarg(tree): """If tree matches ('func', ('symbol', '_aliasarg'), ('string', X)) return X, None otherwise. """ if (len(tree) == 3 and tree[:2] == _aliasarg and tree[2][0] == 'string'): return tree[2][1] return None def _checkaliasarg(tree, known=None): """Check tree contains no _aliasarg construct or only ones which value is in known. Used to avoid alias placeholders injection. """ if isinstance(tree, tuple): arg = _getaliasarg(tree) if arg is not None and (not known or arg not in known): raise error.UnknownIdentifier('_aliasarg', []) for t in tree: _checkaliasarg(t, known) # the set of valid characters for the initial letter of symbols in # alias declarations and definitions _aliassyminitletters = set(c for c in [chr(i) for i in xrange(256)] if c.isalnum() or c in '._@$' or ord(c) > 127) def _tokenizealias(program, lookup=None): """Parse alias declaration/definition into a stream of tokens This allows symbol names to use also ``$`` as an initial letter (for backward compatibility), and callers of this function should examine whether ``$`` is used also for unexpected symbols or not. """ return tokenize(program, lookup=lookup, syminitletters=_aliassyminitletters) def _parsealiasdecl(decl): """Parse alias declaration ``decl`` This returns ``(name, tree, args, errorstr)`` tuple: - ``name``: of declared alias (may be ``decl`` itself at error) - ``tree``: parse result (or ``None`` at error) - ``args``: list of alias argument names (or None for symbol declaration) - ``errorstr``: detail about detected error (or None) >>> _parsealiasdecl('foo') ('foo', ('symbol', 'foo'), None, None) >>> _parsealiasdecl('$foo') ('$foo', None, None, "'$' not for alias arguments") >>> _parsealiasdecl('foo::bar') ('foo::bar', None, None, 'invalid format') >>> _parsealiasdecl('foo bar') ('foo bar', None, None, 'at 4: invalid token') >>> _parsealiasdecl('foo()') ('foo', ('func', ('symbol', 'foo')), [], None) >>> _parsealiasdecl('$foo()') ('$foo()', None, None, "'$' not for alias arguments") >>> _parsealiasdecl('foo($1, $2)') ('foo', ('func', ('symbol', 'foo')), ['$1', '$2'], None) >>> _parsealiasdecl('foo(bar_bar, baz.baz)') ('foo', ('func', ('symbol', 'foo')), ['bar_bar', 'baz.baz'], None) >>> _parsealiasdecl('foo($1, $2, nested($1, $2))') ('foo($1, $2, nested($1, $2))', None, None, 'invalid argument list') >>> _parsealiasdecl('foo(bar($1, $2))') ('foo(bar($1, $2))', None, None, 'invalid argument list') >>> _parsealiasdecl('foo("string")') ('foo("string")', None, None, 'invalid argument list') >>> _parsealiasdecl('foo($1, $2') ('foo($1, $2', None, None, 'at 10: unexpected token: end') >>> _parsealiasdecl('foo("string') ('foo("string', None, None, 'at 5: unterminated string') >>> _parsealiasdecl('foo($1, $2, $1)') ('foo', None, None, 'argument names collide with each other') """ p = parser.parser(elements) try: tree, pos = p.parse(_tokenizealias(decl)) if (pos != len(decl)): raise error.ParseError(_('invalid token'), pos) tree = parser.simplifyinfixops(tree, ('list',)) if isvalidsymbol(tree): # "name = ...." style name = getsymbol(tree) if name.startswith('$'): return (decl, None, None, _("'$' not for alias arguments")) return (name, ('symbol', name), None, None) if isvalidfunc(tree): # "name(arg, ....) = ...." style name = getfuncname(tree) if name.startswith('$'): return (decl, None, None, _("'$' not for alias arguments")) args = [] for arg in getfuncargs(tree): if not isvalidsymbol(arg): return (decl, None, None, _("invalid argument list")) args.append(getsymbol(arg)) if len(args) != len(set(args)): return (name, None, None, _("argument names collide with each other")) return (name, ('func', ('symbol', name)), args, None) return (decl, None, None, _("invalid format")) except error.ParseError as inst: return (decl, None, None, parseerrordetail(inst)) def _parsealiasdefn(defn, args): """Parse alias definition ``defn`` This function also replaces alias argument references in the specified definition by ``_aliasarg(ARGNAME)``. ``args`` is a list of alias argument names, or None if the alias is declared as a symbol. This returns "tree" as parsing result. >>> args = ['$1', '$2', 'foo'] >>> print prettyformat(_parsealiasdefn('$1 or foo', args)) (or (func ('symbol', '_aliasarg') ('string', '$1')) (func ('symbol', '_aliasarg') ('string', 'foo'))) >>> try: ... _parsealiasdefn('$1 or $bar', args) ... except error.ParseError, inst: ... print parseerrordetail(inst) at 6: '$' not for alias arguments >>> args = ['$1', '$10', 'foo'] >>> print prettyformat(_parsealiasdefn('$10 or foobar', args)) (or (func ('symbol', '_aliasarg') ('string', '$10')) ('symbol', 'foobar')) >>> print prettyformat(_parsealiasdefn('"$1" or "foo"', args)) (or ('string', '$1') ('string', 'foo')) """ def tokenizedefn(program, lookup=None): if args: argset = set(args) else: argset = set() for t, value, pos in _tokenizealias(program, lookup=lookup): if t == 'symbol': if value in argset: # emulate tokenization of "_aliasarg('ARGNAME')": # "_aliasarg()" is an unknown symbol only used separate # alias argument placeholders from regular strings. yield ('symbol', '_aliasarg', pos) yield ('(', None, pos) yield ('string', value, pos) yield (')', None, pos) continue elif value.startswith('$'): raise error.ParseError(_("'$' not for alias arguments"), pos) yield (t, value, pos) p = parser.parser(elements) tree, pos = p.parse(tokenizedefn(defn)) if pos != len(defn): raise error.ParseError(_('invalid token'), pos) return parser.simplifyinfixops(tree, ('list', 'or')) class revsetalias(object): # whether own `error` information is already shown or not. # this avoids showing same warning multiple times at each `findaliases`. warned = False def __init__(self, name, value): '''Aliases like: h = heads(default) b($1) = ancestors($1) - ancestors(default) ''' self.name, self.tree, self.args, self.error = _parsealiasdecl(name) if self.error: self.error = _('failed to parse the declaration of revset alias' ' "%s": %s') % (self.name, self.error) return try: self.replacement = _parsealiasdefn(value, self.args) # Check for placeholder injection _checkaliasarg(self.replacement, self.args) except error.ParseError as inst: self.error = _('failed to parse the definition of revset alias' ' "%s": %s') % (self.name, parseerrordetail(inst)) def _getalias(aliases, tree): """If tree looks like an unexpanded alias, return it. Return None otherwise. """ if isinstance(tree, tuple) and tree: if tree[0] == 'symbol' and len(tree) == 2: name = tree[1] alias = aliases.get(name) if alias and alias.args is None and alias.tree == tree: return alias if tree[0] == 'func' and len(tree) > 1: if tree[1][0] == 'symbol' and len(tree[1]) == 2: name = tree[1][1] alias = aliases.get(name) if alias and alias.args is not None and alias.tree == tree[:2]: return alias return None def _expandargs(tree, args): """Replace _aliasarg instances with the substitution value of the same name in args, recursively. """ if not tree or not isinstance(tree, tuple): return tree arg = _getaliasarg(tree) if arg is not None: return args[arg] return tuple(_expandargs(t, args) for t in tree) def _expandaliases(aliases, tree, expanding, cache): """Expand aliases in tree, recursively. 'aliases' is a dictionary mapping user defined aliases to revsetalias objects. """ if not isinstance(tree, tuple): # Do not expand raw strings return tree alias = _getalias(aliases, tree) if alias is not None: if alias.error: raise error.Abort(alias.error) if alias in expanding: raise error.ParseError(_('infinite expansion of revset alias "%s" ' 'detected') % alias.name) expanding.append(alias) if alias.name not in cache: cache[alias.name] = _expandaliases(aliases, alias.replacement, expanding, cache) result = cache[alias.name] expanding.pop() if alias.args is not None: l = getlist(tree[2]) if len(l) != len(alias.args): raise error.ParseError( _('invalid number of arguments: %d') % len(l)) l = [_expandaliases(aliases, a, [], cache) for a in l] result = _expandargs(result, dict(zip(alias.args, l))) else: result = tuple(_expandaliases(aliases, t, expanding, cache) for t in tree) return result def findaliases(ui, tree, showwarning=None): _checkaliasarg(tree) aliases = {} for k, v in ui.configitems('revsetalias'): alias = revsetalias(k, v) aliases[alias.name] = alias tree = _expandaliases(aliases, tree, [], {}) if showwarning: # warn about problematic (but not referred) aliases for name, alias in sorted(aliases.iteritems()): if alias.error and not alias.warned: showwarning(_('warning: %s\n') % (alias.error)) alias.warned = True return tree def foldconcat(tree): """Fold elements to be concatenated by `##` """ if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'): return tree if tree[0] == '_concat': pending = [tree] l = [] while pending: e = pending.pop() if e[0] == '_concat': pending.extend(reversed(e[1:])) elif e[0] in ('string', 'symbol'): l.append(e[1]) else: msg = _("\"##\" can't concatenate \"%s\" element") % (e[0]) raise error.ParseError(msg) return ('string', ''.join(l)) else: return tuple(foldconcat(t) for t in tree) def parse(spec, lookup=None): p = parser.parser(elements) tree, pos = p.parse(tokenize(spec, lookup=lookup)) if pos != len(spec): raise error.ParseError(_("invalid token"), pos) return parser.simplifyinfixops(tree, ('list', 'or')) def posttreebuilthook(tree, repo): # hook for extensions to execute code on the optimized tree pass def match(ui, spec, repo=None): if not spec: raise error.ParseError(_("empty query")) lookup = None if repo: lookup = repo.__contains__ tree = parse(spec, lookup) return _makematcher(ui, tree, repo) def matchany(ui, specs, repo=None): """Create a matcher that will include any revisions matching one of the given specs""" if not specs: def mfunc(repo, subset=None): return baseset() return mfunc if not all(specs): raise error.ParseError(_("empty query")) lookup = None if repo: lookup = repo.__contains__ if len(specs) == 1: tree = parse(specs[0], lookup) else: tree = ('or',) + tuple(parse(s, lookup) for s in specs) return _makematcher(ui, tree, repo) def _makematcher(ui, tree, repo): if ui: tree = findaliases(ui, tree, showwarning=ui.warn) tree = foldconcat(tree) weight, tree = optimize(tree, True) posttreebuilthook(tree, repo) def mfunc(repo, subset=None): if subset is None: subset = fullreposet(repo) if util.safehasattr(subset, 'isascending'): result = getset(repo, subset, tree) else: result = getset(repo, baseset(subset), tree) return result return mfunc def formatspec(expr, *args): ''' This is a convenience function for using revsets internally, and escapes arguments appropriately. Aliases are intentionally ignored so that intended expression behavior isn't accidentally subverted. Supported arguments: %r = revset expression, parenthesized %d = int(arg), no quoting %s = string(arg), escaped and single-quoted %b = arg.branch(), escaped and single-quoted %n = hex(arg), single-quoted %% = a literal '%' Prefixing the type with 'l' specifies a parenthesized list of that type. >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()")) '(10 or 11):: and ((this()) or (that()))' >>> formatspec('%d:: and not %d::', 10, 20) '10:: and not 20::' >>> formatspec('%ld or %ld', [], [1]) "_list('') or 1" >>> formatspec('keyword(%s)', 'foo\\xe9') "keyword('foo\\\\xe9')" >>> b = lambda: 'default' >>> b.branch = b >>> formatspec('branch(%b)', b) "branch('default')" >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd']) "root(_list('a\\x00b\\x00c\\x00d'))" ''' def quote(s): return repr(str(s)) def argtype(c, arg): if c == 'd': return str(int(arg)) elif c == 's': return quote(arg) elif c == 'r': parse(arg) # make sure syntax errors are confined return '(%s)' % arg elif c == 'n': return quote(node.hex(arg)) elif c == 'b': return quote(arg.branch()) def listexp(s, t): l = len(s) if l == 0: return "_list('')" elif l == 1: return argtype(t, s[0]) elif t == 'd': return "_intlist('%s')" % "\0".join(str(int(a)) for a in s) elif t == 's': return "_list('%s')" % "\0".join(s) elif t == 'n': return "_hexlist('%s')" % "\0".join(node.hex(a) for a in s) elif t == 'b': return "_list('%s')" % "\0".join(a.branch() for a in s) m = l // 2 return '(%s or %s)' % (listexp(s[:m], t), listexp(s[m:], t)) ret = '' pos = 0 arg = 0 while pos < len(expr): c = expr[pos] if c == '%': pos += 1 d = expr[pos] if d == '%': ret += d elif d in 'dsnbr': ret += argtype(d, args[arg]) arg += 1 elif d == 'l': # a list of some type pos += 1 d = expr[pos] ret += listexp(list(args[arg]), d) arg += 1 else: raise error.Abort('unexpected revspec format character %s' % d) else: ret += c pos += 1 return ret def prettyformat(tree): return parser.prettyformat(tree, ('string', 'symbol')) def depth(tree): if isinstance(tree, tuple): return max(map(depth, tree)) + 1 else: return 0 def funcsused(tree): if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'): return set() else: funcs = set() for s in tree[1:]: funcs |= funcsused(s) if tree[0] == 'func': funcs.add(tree[1][1]) return funcs class abstractsmartset(object): def __nonzero__(self): """True if the smartset is not empty""" raise NotImplementedError() def __contains__(self, rev): """provide fast membership testing""" raise NotImplementedError() def __iter__(self): """iterate the set in the order it is supposed to be iterated""" raise NotImplementedError() # Attributes containing a function to perform a fast iteration in a given # direction. A smartset can have none, one, or both defined. # # Default value is None instead of a function returning None to avoid # initializing an iterator just for testing if a fast method exists. fastasc = None fastdesc = None def isascending(self): """True if the set will iterate in ascending order""" raise NotImplementedError() def isdescending(self): """True if the set will iterate in descending order""" raise NotImplementedError() @util.cachefunc def min(self): """return the minimum element in the set""" if self.fastasc is not None: for r in self.fastasc(): return r raise ValueError('arg is an empty sequence') return min(self) @util.cachefunc def max(self): """return the maximum element in the set""" if self.fastdesc is not None: for r in self.fastdesc(): return r raise ValueError('arg is an empty sequence') return max(self) def first(self): """return the first element in the set (user iteration perspective) Return None if the set is empty""" raise NotImplementedError() def last(self): """return the last element in the set (user iteration perspective) Return None if the set is empty""" raise NotImplementedError() def __len__(self): """return the length of the smartsets This can be expensive on smartset that could be lazy otherwise.""" raise NotImplementedError() def reverse(self): """reverse the expected iteration order""" raise NotImplementedError() def sort(self, reverse=True): """get the set to iterate in an ascending or descending order""" raise NotImplementedError() def __and__(self, other): """Returns a new object with the intersection of the two collections. This is part of the mandatory API for smartset.""" if isinstance(other, fullreposet): return self return self.filter(other.__contains__, cache=False) def __add__(self, other): """Returns a new object with the union of the two collections. This is part of the mandatory API for smartset.""" return addset(self, other) def __sub__(self, other): """Returns a new object with the substraction of the two collections. This is part of the mandatory API for smartset.""" c = other.__contains__ return self.filter(lambda r: not c(r), cache=False) def filter(self, condition, cache=True): """Returns this smartset filtered by condition as a new smartset. `condition` is a callable which takes a revision number and returns a boolean. This is part of the mandatory API for smartset.""" # builtin cannot be cached. but do not needs to if cache and util.safehasattr(condition, 'func_code'): condition = util.cachefunc(condition) return filteredset(self, condition) class baseset(abstractsmartset): """Basic data structure that represents a revset and contains the basic operation that it should be able to perform. Every method in this class should be implemented by any smartset class. """ def __init__(self, data=()): if not isinstance(data, list): if isinstance(data, set): self._set = data data = list(data) self._list = data self._ascending = None @util.propertycache def _set(self): return set(self._list) @util.propertycache def _asclist(self): asclist = self._list[:] asclist.sort() return asclist def __iter__(self): if self._ascending is None: return iter(self._list) elif self._ascending: return iter(self._asclist) else: return reversed(self._asclist) def fastasc(self): return iter(self._asclist) def fastdesc(self): return reversed(self._asclist) @util.propertycache def __contains__(self): return self._set.__contains__ def __nonzero__(self): return bool(self._list) def sort(self, reverse=False): self._ascending = not bool(reverse) def reverse(self): if self._ascending is None: self._list.reverse() else: self._ascending = not self._ascending def __len__(self): return len(self._list) def isascending(self): """Returns True if the collection is ascending order, False if not. This is part of the mandatory API for smartset.""" if len(self) <= 1: return True return self._ascending is not None and self._ascending def isdescending(self): """Returns True if the collection is descending order, False if not. This is part of the mandatory API for smartset.""" if len(self) <= 1: return True return self._ascending is not None and not self._ascending def first(self): if self: if self._ascending is None: return self._list[0] elif self._ascending: return self._asclist[0] else: return self._asclist[-1] return None def last(self): if self: if self._ascending is None: return self._list[-1] elif self._ascending: return self._asclist[-1] else: return self._asclist[0] return None def __repr__(self): d = {None: '', False: '-', True: '+'}[self._ascending] return '<%s%s %r>' % (type(self).__name__, d, self._list) class filteredset(abstractsmartset): """Duck type for baseset class which iterates lazily over the revisions in the subset and contains a function which tests for membership in the revset """ def __init__(self, subset, condition=lambda x: True): """ condition: a function that decide whether a revision in the subset belongs to the revset or not. """ self._subset = subset self._condition = condition def __contains__(self, x): return x in self._subset and self._condition(x) def __iter__(self): return self._iterfilter(self._subset) def _iterfilter(self, it): cond = self._condition for x in it: if cond(x): yield x @property def fastasc(self): it = self._subset.fastasc if it is None: return None return lambda: self._iterfilter(it()) @property def fastdesc(self): it = self._subset.fastdesc if it is None: return None return lambda: self._iterfilter(it()) def __nonzero__(self): fast = self.fastasc if fast is None: fast = self.fastdesc if fast is not None: it = fast() else: it = self for r in it: return True return False def __len__(self): # Basic implementation to be changed in future patches. l = baseset([r for r in self]) return len(l) def sort(self, reverse=False): self._subset.sort(reverse=reverse) def reverse(self): self._subset.reverse() def isascending(self): return self._subset.isascending() def isdescending(self): return self._subset.isdescending() def first(self): for x in self: return x return None def last(self): it = None if self.isascending(): it = self.fastdesc elif self.isdescending(): it = self.fastasc if it is not None: for x in it(): return x return None #empty case else: x = None for x in self: pass return x def __repr__(self): return '<%s %r>' % (type(self).__name__, self._subset) def _iterordered(ascending, iter1, iter2): """produce an ordered iteration from two iterators with the same order The ascending is used to indicated the iteration direction. """ choice = max if ascending: choice = min val1 = None val2 = None try: # Consume both iterators in an ordered way until one is empty while True: if val1 is None: val1 = iter1.next() if val2 is None: val2 = iter2.next() next = choice(val1, val2) yield next if val1 == next: val1 = None if val2 == next: val2 = None except StopIteration: # Flush any remaining values and consume the other one it = iter2 if val1 is not None: yield val1 it = iter1 elif val2 is not None: # might have been equality and both are empty yield val2 for val in it: yield val class addset(abstractsmartset): """Represent the addition of two sets Wrapper structure for lazily adding two structures without losing much performance on the __contains__ method If the ascending attribute is set, that means the two structures are ordered in either an ascending or descending way. Therefore, we can add them maintaining the order by iterating over both at the same time >>> xs = baseset([0, 3, 2]) >>> ys = baseset([5, 2, 4]) >>> rs = addset(xs, ys) >>> bool(rs), 0 in rs, 1 in rs, 5 in rs, rs.first(), rs.last() (True, True, False, True, 0, 4) >>> rs = addset(xs, baseset([])) >>> bool(rs), 0 in rs, 1 in rs, rs.first(), rs.last() (True, True, False, 0, 2) >>> rs = addset(baseset([]), baseset([])) >>> bool(rs), 0 in rs, rs.first(), rs.last() (False, False, None, None) iterate unsorted: >>> rs = addset(xs, ys) >>> [x for x in rs] # without _genlist [0, 3, 2, 5, 4] >>> assert not rs._genlist >>> len(rs) 5 >>> [x for x in rs] # with _genlist [0, 3, 2, 5, 4] >>> assert rs._genlist iterate ascending: >>> rs = addset(xs, ys, ascending=True) >>> [x for x in rs], [x for x in rs.fastasc()] # without _asclist ([0, 2, 3, 4, 5], [0, 2, 3, 4, 5]) >>> assert not rs._asclist >>> len(rs) 5 >>> [x for x in rs], [x for x in rs.fastasc()] ([0, 2, 3, 4, 5], [0, 2, 3, 4, 5]) >>> assert rs._asclist iterate descending: >>> rs = addset(xs, ys, ascending=False) >>> [x for x in rs], [x for x in rs.fastdesc()] # without _asclist ([5, 4, 3, 2, 0], [5, 4, 3, 2, 0]) >>> assert not rs._asclist >>> len(rs) 5 >>> [x for x in rs], [x for x in rs.fastdesc()] ([5, 4, 3, 2, 0], [5, 4, 3, 2, 0]) >>> assert rs._asclist iterate ascending without fastasc: >>> rs = addset(xs, generatorset(ys), ascending=True) >>> assert rs.fastasc is None >>> [x for x in rs] [0, 2, 3, 4, 5] iterate descending without fastdesc: >>> rs = addset(generatorset(xs), ys, ascending=False) >>> assert rs.fastdesc is None >>> [x for x in rs] [5, 4, 3, 2, 0] """ def __init__(self, revs1, revs2, ascending=None): self._r1 = revs1 self._r2 = revs2 self._iter = None self._ascending = ascending self._genlist = None self._asclist = None def __len__(self): return len(self._list) def __nonzero__(self): return bool(self._r1) or bool(self._r2) @util.propertycache def _list(self): if not self._genlist: self._genlist = baseset(iter(self)) return self._genlist def __iter__(self): """Iterate over both collections without repeating elements If the ascending attribute is not set, iterate over the first one and then over the second one checking for membership on the first one so we dont yield any duplicates. If the ascending attribute is set, iterate over both collections at the same time, yielding only one value at a time in the given order. """ if self._ascending is None: if self._genlist: return iter(self._genlist) def arbitraryordergen(): for r in self._r1: yield r inr1 = self._r1.__contains__ for r in self._r2: if not inr1(r): yield r return arbitraryordergen() # try to use our own fast iterator if it exists self._trysetasclist() if self._ascending: attr = 'fastasc' else: attr = 'fastdesc' it = getattr(self, attr) if it is not None: return it() # maybe half of the component supports fast # get iterator for _r1 iter1 = getattr(self._r1, attr) if iter1 is None: # let's avoid side effect (not sure it matters) iter1 = iter(sorted(self._r1, reverse=not self._ascending)) else: iter1 = iter1() # get iterator for _r2 iter2 = getattr(self._r2, attr) if iter2 is None: # let's avoid side effect (not sure it matters) iter2 = iter(sorted(self._r2, reverse=not self._ascending)) else: iter2 = iter2() return _iterordered(self._ascending, iter1, iter2) def _trysetasclist(self): """populate the _asclist attribute if possible and necessary""" if self._genlist is not None and self._asclist is None: self._asclist = sorted(self._genlist) @property def fastasc(self): self._trysetasclist() if self._asclist is not None: return self._asclist.__iter__ iter1 = self._r1.fastasc iter2 = self._r2.fastasc if None in (iter1, iter2): return None return lambda: _iterordered(True, iter1(), iter2()) @property def fastdesc(self): self._trysetasclist() if self._asclist is not None: return self._asclist.__reversed__ iter1 = self._r1.fastdesc iter2 = self._r2.fastdesc if None in (iter1, iter2): return None return lambda: _iterordered(False, iter1(), iter2()) def __contains__(self, x): return x in self._r1 or x in self._r2 def sort(self, reverse=False): """Sort the added set For this we use the cached list with all the generated values and if we know they are ascending or descending we can sort them in a smart way. """ self._ascending = not reverse def isascending(self): return self._ascending is not None and self._ascending def isdescending(self): return self._ascending is not None and not self._ascending def reverse(self): if self._ascending is None: self._list.reverse() else: self._ascending = not self._ascending def first(self): for x in self: return x return None def last(self): self.reverse() val = self.first() self.reverse() return val def __repr__(self): d = {None: '', False: '-', True: '+'}[self._ascending] return '<%s%s %r, %r>' % (type(self).__name__, d, self._r1, self._r2) class generatorset(abstractsmartset): """Wrap a generator for lazy iteration Wrapper structure for generators that provides lazy membership and can be iterated more than once. When asked for membership it generates values until either it finds the requested one or has gone through all the elements in the generator """ def __init__(self, gen, iterasc=None): """ gen: a generator producing the values for the generatorset. """ self._gen = gen self._asclist = None self._cache = {} self._genlist = [] self._finished = False self._ascending = True if iterasc is not None: if iterasc: self.fastasc = self._iterator self.__contains__ = self._asccontains else: self.fastdesc = self._iterator self.__contains__ = self._desccontains def __nonzero__(self): # Do not use 'for r in self' because it will enforce the iteration # order (default ascending), possibly unrolling a whole descending # iterator. if self._genlist: return True for r in self._consumegen(): return True return False def __contains__(self, x): if x in self._cache: return self._cache[x] # Use new values only, as existing values would be cached. for l in self._consumegen(): if l == x: return True self._cache[x] = False return False def _asccontains(self, x): """version of contains optimised for ascending generator""" if x in self._cache: return self._cache[x] # Use new values only, as existing values would be cached. for l in self._consumegen(): if l == x: return True if l > x: break self._cache[x] = False return False def _desccontains(self, x): """version of contains optimised for descending generator""" if x in self._cache: return self._cache[x] # Use new values only, as existing values would be cached. for l in self._consumegen(): if l == x: return True if l < x: break self._cache[x] = False return False def __iter__(self): if self._ascending: it = self.fastasc else: it = self.fastdesc if it is not None: return it() # we need to consume the iterator for x in self._consumegen(): pass # recall the same code return iter(self) def _iterator(self): if self._finished: return iter(self._genlist) # We have to use this complex iteration strategy to allow multiple # iterations at the same time. We need to be able to catch revision # removed from _consumegen and added to genlist in another instance. # # Getting rid of it would provide an about 15% speed up on this # iteration. genlist = self._genlist nextrev = self._consumegen().next _len = len # cache global lookup def gen(): i = 0 while True: if i < _len(genlist): yield genlist[i] else: yield nextrev() i += 1 return gen() def _consumegen(self): cache = self._cache genlist = self._genlist.append for item in self._gen: cache[item] = True genlist(item) yield item if not self._finished: self._finished = True asc = self._genlist[:] asc.sort() self._asclist = asc self.fastasc = asc.__iter__ self.fastdesc = asc.__reversed__ def __len__(self): for x in self._consumegen(): pass return len(self._genlist) def sort(self, reverse=False): self._ascending = not reverse def reverse(self): self._ascending = not self._ascending def isascending(self): return self._ascending def isdescending(self): return not self._ascending def first(self): if self._ascending: it = self.fastasc else: it = self.fastdesc if it is None: # we need to consume all and try again for x in self._consumegen(): pass return self.first() return next(it(), None) def last(self): if self._ascending: it = self.fastdesc else: it = self.fastasc if it is None: # we need to consume all and try again for x in self._consumegen(): pass return self.first() return next(it(), None) def __repr__(self): d = {False: '-', True: '+'}[self._ascending] return '<%s%s>' % (type(self).__name__, d) class spanset(abstractsmartset): """Duck type for baseset class which represents a range of revisions and can work lazily and without having all the range in memory Note that spanset(x, y) behave almost like xrange(x, y) except for two notable points: - when x < y it will be automatically descending, - revision filtered with this repoview will be skipped. """ def __init__(self, repo, start=0, end=None): """ start: first revision included the set (default to 0) end: first revision excluded (last+1) (default to len(repo) Spanset will be descending if `end` < `start`. """ if end is None: end = len(repo) self._ascending = start <= end if not self._ascending: start, end = end + 1, start +1 self._start = start self._end = end self._hiddenrevs = repo.changelog.filteredrevs def sort(self, reverse=False): self._ascending = not reverse def reverse(self): self._ascending = not self._ascending def _iterfilter(self, iterrange): s = self._hiddenrevs for r in iterrange: if r not in s: yield r def __iter__(self): if self._ascending: return self.fastasc() else: return self.fastdesc() def fastasc(self): iterrange = xrange(self._start, self._end) if self._hiddenrevs: return self._iterfilter(iterrange) return iter(iterrange) def fastdesc(self): iterrange = xrange(self._end - 1, self._start - 1, -1) if self._hiddenrevs: return self._iterfilter(iterrange) return iter(iterrange) def __contains__(self, rev): hidden = self._hiddenrevs return ((self._start <= rev < self._end) and not (hidden and rev in hidden)) def __nonzero__(self): for r in self: return True return False def __len__(self): if not self._hiddenrevs: return abs(self._end - self._start) else: count = 0 start = self._start end = self._end for rev in self._hiddenrevs: if (end < rev <= start) or (start <= rev < end): count += 1 return abs(self._end - self._start) - count def isascending(self): return self._ascending def isdescending(self): return not self._ascending def first(self): if self._ascending: it = self.fastasc else: it = self.fastdesc for x in it(): return x return None def last(self): if self._ascending: it = self.fastdesc else: it = self.fastasc for x in it(): return x return None def __repr__(self): d = {False: '-', True: '+'}[self._ascending] return '<%s%s %d:%d>' % (type(self).__name__, d, self._start, self._end - 1) class fullreposet(spanset): """a set containing all revisions in the repo This class exists to host special optimization and magic to handle virtual revisions such as "null". """ def __init__(self, repo): super(fullreposet, self).__init__(repo) def __and__(self, other): """As self contains the whole repo, all of the other set should also be in self. Therefore `self & other = other`. This boldly assumes the other contains valid revs only. """ # other not a smartset, make is so if not util.safehasattr(other, 'isascending'): # filter out hidden revision # (this boldly assumes all smartset are pure) # # `other` was used with "&", let's assume this is a set like # object. other = baseset(other - self._hiddenrevs) # XXX As fullreposet is also used as bootstrap, this is wrong. # # With a giveme312() revset returning [3,1,2], this makes # 'hg log -r "giveme312()"' -> 1, 2, 3 (wrong) # We cannot just drop it because other usage still need to sort it: # 'hg log -r "all() and giveme312()"' -> 1, 2, 3 (right) # # There is also some faulty revset implementations that rely on it # (eg: children as of its state in e8075329c5fb) # # When we fix the two points above we can move this into the if clause other.sort(reverse=self.isdescending()) return other def prettyformatset(revs): lines = [] rs = repr(revs) p = 0 while p < len(rs): q = rs.find('<', p + 1) if q < 0: q = len(rs) l = rs.count('<', 0, p) - rs.count('>', 0, p) assert l >= 0 lines.append((l, rs[p:q].rstrip())) p = q return '\n'.join(' ' * l + s for l, s in lines) # tell hggettext to extract docstrings from these functions: i18nfunctions = symbols.values() mercurial-3.7.3/mercurial/error.py0000644000175000017500000001615212676531524016621 0ustar mpmmpm00000000000000# error.py - Mercurial exceptions # # Copyright 2005-2008 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """Mercurial exceptions. This allows us to catch exceptions at higher levels without forcing imports. """ from __future__ import absolute_import # Do not import anything here, please class HintException(Exception): def __init__(self, *args, **kw): Exception.__init__(self, *args) self.hint = kw.get('hint') class RevlogError(HintException): pass class FilteredIndexError(IndexError): pass class LookupError(RevlogError, KeyError): def __init__(self, name, index, message): self.name = name self.index = index # this can't be called 'message' because at least some installs of # Python 2.6+ complain about the 'message' property being deprecated self.lookupmessage = message if isinstance(name, str) and len(name) == 20: from .node import short name = short(name) RevlogError.__init__(self, '%s@%s: %s' % (index, name, message)) def __str__(self): return RevlogError.__str__(self) class FilteredLookupError(LookupError): pass class ManifestLookupError(LookupError): pass class CommandError(Exception): """Exception raised on errors in parsing the command line.""" class InterventionRequired(HintException): """Exception raised when a command requires human intervention.""" class Abort(HintException): """Raised if a command needs to print an error and exit.""" class HookLoadError(Abort): """raised when loading a hook fails, aborting an operation Exists to allow more specialized catching.""" class HookAbort(Abort): """raised when a validation hook fails, aborting an operation Exists to allow more specialized catching.""" class ConfigError(Abort): """Exception raised when parsing config files""" class UpdateAbort(Abort): """Raised when an update is aborted for destination issue""" class ResponseExpected(Abort): """Raised when an EOF is received for a prompt""" def __init__(self): from .i18n import _ Abort.__init__(self, _('response expected')) class OutOfBandError(HintException): """Exception raised when a remote repo reports failure""" class ParseError(HintException): """Raised when parsing config files and {rev,file}sets (msg[, pos])""" class UnknownIdentifier(ParseError): """Exception raised when a {rev,file}set references an unknown identifier""" def __init__(self, function, symbols): from .i18n import _ ParseError.__init__(self, _("unknown identifier: %s") % function) self.function = function self.symbols = symbols class RepoError(HintException): pass class RepoLookupError(RepoError): pass class FilteredRepoLookupError(RepoLookupError): pass class CapabilityError(RepoError): pass class RequirementError(RepoError): """Exception raised if .hg/requires has an unknown entry.""" class UnsupportedMergeRecords(Abort): def __init__(self, recordtypes): from .i18n import _ self.recordtypes = sorted(recordtypes) s = ' '.join(self.recordtypes) Abort.__init__( self, _('unsupported merge state records: %s') % s, hint=_('see https://mercurial-scm.org/wiki/MergeStateRecords for ' 'more information')) class LockError(IOError): def __init__(self, errno, strerror, filename, desc): IOError.__init__(self, errno, strerror, filename) self.desc = desc class LockHeld(LockError): def __init__(self, errno, filename, desc, locker): LockError.__init__(self, errno, 'Lock held', filename, desc) self.locker = locker class LockUnavailable(LockError): pass # LockError is for errors while acquiring the lock -- this is unrelated class LockInheritanceContractViolation(RuntimeError): pass class ResponseError(Exception): """Raised to print an error with part of output and exit.""" class UnknownCommand(Exception): """Exception raised if command is not in the command table.""" class AmbiguousCommand(Exception): """Exception raised if command shortcut matches more than one command.""" # derived from KeyboardInterrupt to simplify some breakout code class SignalInterrupt(KeyboardInterrupt): """Exception raised on SIGTERM and SIGHUP.""" class SignatureError(Exception): pass class PushRaced(RuntimeError): """An exception raised during unbundling that indicate a push race""" # bundle2 related errors class BundleValueError(ValueError): """error raised when bundle2 cannot be processed""" class BundleUnknownFeatureError(BundleValueError): def __init__(self, parttype=None, params=(), values=()): self.parttype = parttype self.params = params self.values = values if self.parttype is None: msg = 'Stream Parameter' else: msg = parttype entries = self.params if self.params and self.values: assert len(self.params) == len(self.values) entries = [] for idx, par in enumerate(self.params): val = self.values[idx] if val is None: entries.append(val) else: entries.append("%s=%r" % (par, val)) if entries: msg = '%s - %s' % (msg, ', '.join(entries)) ValueError.__init__(self, msg) class ReadOnlyPartError(RuntimeError): """error raised when code tries to alter a part being generated""" class PushkeyFailed(Abort): """error raised when a pushkey part failed to update a value""" def __init__(self, partid, namespace=None, key=None, new=None, old=None, ret=None): self.partid = partid self.namespace = namespace self.key = key self.new = new self.old = old self.ret = ret # no i18n expected to be processed into a better message Abort.__init__(self, 'failed to update value for "%s/%s"' % (namespace, key)) class CensoredNodeError(RevlogError): """error raised when content verification fails on a censored node Also contains the tombstone data substituted for the uncensored data. """ def __init__(self, filename, node, tombstone): from .node import short RevlogError.__init__(self, '%s:%s' % (filename, short(node))) self.tombstone = tombstone class CensoredBaseError(RevlogError): """error raised when a delta is rejected because its base is censored A delta based on a censored revision must be formed as single patch operation which replaces the entire base with new content. This ensures the delta may be applied by clones which have not censored the base. """ class InvalidBundleSpecification(Exception): """error raised when a bundle specification is invalid. This is used for syntax errors as opposed to support errors. """ class UnsupportedBundleSpecification(Exception): """error raised when a bundle specification is not supported.""" mercurial-3.7.3/mercurial/templater.py0000644000175000017500000010317212676531525017465 0ustar mpmmpm00000000000000# templater.py - template expansion for output # # Copyright 2005, 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import os import re import types from .i18n import _ from . import ( config, error, minirst, parser, revset as revsetmod, templatefilters, templatekw, util, ) # template parsing elements = { # token-type: binding-strength, primary, prefix, infix, suffix "(": (20, None, ("group", 1, ")"), ("func", 1, ")"), None), ",": (2, None, None, ("list", 2), None), "|": (5, None, None, ("|", 5), None), "%": (6, None, None, ("%", 6), None), ")": (0, None, None, None, None), "integer": (0, "integer", None, None, None), "symbol": (0, "symbol", None, None, None), "string": (0, "string", None, None, None), "template": (0, "template", None, None, None), "end": (0, None, None, None, None), } def tokenize(program, start, end): pos = start while pos < end: c = program[pos] if c.isspace(): # skip inter-token whitespace pass elif c in "(,)%|": # handle simple operators yield (c, None, pos) elif c in '"\'': # handle quoted templates s = pos + 1 data, pos = _parsetemplate(program, s, end, c) yield ('template', data, s) pos -= 1 elif c == 'r' and program[pos:pos + 2] in ("r'", 'r"'): # handle quoted strings c = program[pos + 1] s = pos = pos + 2 while pos < end: # find closing quote d = program[pos] if d == '\\': # skip over escaped characters pos += 2 continue if d == c: yield ('string', program[s:pos], s) break pos += 1 else: raise error.ParseError(_("unterminated string"), s) elif c.isdigit() or c == '-': s = pos if c == '-': # simply take negate operator as part of integer pos += 1 if pos >= end or not program[pos].isdigit(): raise error.ParseError(_("integer literal without digits"), s) pos += 1 while pos < end: d = program[pos] if not d.isdigit(): break pos += 1 yield ('integer', program[s:pos], s) pos -= 1 elif (c == '\\' and program[pos:pos + 2] in (r"\'", r'\"') or c == 'r' and program[pos:pos + 3] in (r"r\'", r'r\"')): # handle escaped quoted strings for compatibility with 2.9.2-3.4, # where some of nested templates were preprocessed as strings and # then compiled. therefore, \"...\" was allowed. (issue4733) # # processing flow of _evalifliteral() at 5ab28a2e9962: # outer template string -> stringify() -> compiletemplate() # ------------------------ ------------ ------------------ # {f("\\\\ {g(\"\\\"\")}"} \\ {g("\"")} [r'\\', {g("\"")}] # ~~~~~~~~ # escaped quoted string if c == 'r': pos += 1 token = 'string' else: token = 'template' quote = program[pos:pos + 2] s = pos = pos + 2 while pos < end: # find closing escaped quote if program.startswith('\\\\\\', pos, end): pos += 4 # skip over double escaped characters continue if program.startswith(quote, pos, end): # interpret as if it were a part of an outer string data = parser.unescapestr(program[s:pos]) if token == 'template': data = _parsetemplate(data, 0, len(data))[0] yield (token, data, s) pos += 1 break pos += 1 else: raise error.ParseError(_("unterminated string"), s) elif c.isalnum() or c in '_': s = pos pos += 1 while pos < end: # find end of symbol d = program[pos] if not (d.isalnum() or d == "_"): break pos += 1 sym = program[s:pos] yield ('symbol', sym, s) pos -= 1 elif c == '}': yield ('end', None, pos + 1) return else: raise error.ParseError(_("syntax error"), pos) pos += 1 raise error.ParseError(_("unterminated template expansion"), start) def _parsetemplate(tmpl, start, stop, quote=''): r""" >>> _parsetemplate('foo{bar}"baz', 0, 12) ([('string', 'foo'), ('symbol', 'bar'), ('string', '"baz')], 12) >>> _parsetemplate('foo{bar}"baz', 0, 12, quote='"') ([('string', 'foo'), ('symbol', 'bar')], 9) >>> _parsetemplate('foo"{bar}', 0, 9, quote='"') ([('string', 'foo')], 4) >>> _parsetemplate(r'foo\"bar"baz', 0, 12, quote='"') ([('string', 'foo"'), ('string', 'bar')], 9) >>> _parsetemplate(r'foo\\"bar', 0, 10, quote='"') ([('string', 'foo\\')], 6) """ parsed = [] sepchars = '{' + quote pos = start p = parser.parser(elements) while pos < stop: n = min((tmpl.find(c, pos, stop) for c in sepchars), key=lambda n: (n < 0, n)) if n < 0: parsed.append(('string', parser.unescapestr(tmpl[pos:stop]))) pos = stop break c = tmpl[n] bs = (n - pos) - len(tmpl[pos:n].rstrip('\\')) if bs % 2 == 1: # escaped (e.g. '\{', '\\\{', but not '\\{') parsed.append(('string', parser.unescapestr(tmpl[pos:n - 1]) + c)) pos = n + 1 continue if n > pos: parsed.append(('string', parser.unescapestr(tmpl[pos:n]))) if c == quote: return parsed, n + 1 parseres, pos = p.parse(tokenize(tmpl, n + 1, stop)) parsed.append(parseres) if quote: raise error.ParseError(_("unterminated string"), start) return parsed, pos def compiletemplate(tmpl, context): parsed, pos = _parsetemplate(tmpl, 0, len(tmpl)) return [compileexp(e, context, methods) for e in parsed] def compileexp(exp, context, curmethods): t = exp[0] if t in curmethods: return curmethods[t](exp, context) raise error.ParseError(_("unknown method '%s'") % t) # template evaluation def getsymbol(exp): if exp[0] == 'symbol': return exp[1] raise error.ParseError(_("expected a symbol, got '%s'") % exp[0]) def getlist(x): if not x: return [] if x[0] == 'list': return getlist(x[1]) + [x[2]] return [x] def gettemplate(exp, context): if exp[0] == 'template': return [compileexp(e, context, methods) for e in exp[1]] if exp[0] == 'symbol': # unlike runsymbol(), here 'symbol' is always taken as template name # even if it exists in mapping. this allows us to override mapping # by web templates, e.g. 'changelogtag' is redefined in map file. return context._load(exp[1]) raise error.ParseError(_("expected template specifier")) def evalfuncarg(context, mapping, arg): func, data = arg # func() may return string, generator of strings or arbitrary object such # as date tuple, but filter does not want generator. thing = func(context, mapping, data) if isinstance(thing, types.GeneratorType): thing = stringify(thing) return thing def runinteger(context, mapping, data): return int(data) def runstring(context, mapping, data): return data def _recursivesymbolblocker(key): def showrecursion(**args): raise error.Abort(_("recursive reference '%s' in template") % key) return showrecursion def _runrecursivesymbol(context, mapping, key): raise error.Abort(_("recursive reference '%s' in template") % key) def runsymbol(context, mapping, key): v = mapping.get(key) if v is None: v = context._defaults.get(key) if v is None: # put poison to cut recursion. we can't move this to parsing phase # because "x = {x}" is allowed if "x" is a keyword. (issue4758) safemapping = mapping.copy() safemapping[key] = _recursivesymbolblocker(key) try: v = context.process(key, safemapping) except TemplateNotFound: v = '' if callable(v): return v(**mapping) return v def buildtemplate(exp, context): ctmpl = [compileexp(e, context, methods) for e in exp[1]] if len(ctmpl) == 1: return ctmpl[0] # fast path for string with no template fragment return (runtemplate, ctmpl) def runtemplate(context, mapping, template): for func, data in template: yield func(context, mapping, data) def buildfilter(exp, context): arg = compileexp(exp[1], context, methods) n = getsymbol(exp[2]) if n in context._filters: filt = context._filters[n] return (runfilter, (arg, filt)) if n in funcs: f = funcs[n] return (f, [arg]) raise error.ParseError(_("unknown function '%s'") % n) def runfilter(context, mapping, data): arg, filt = data thing = evalfuncarg(context, mapping, arg) try: return filt(thing) except (ValueError, AttributeError, TypeError): if isinstance(arg[1], tuple): dt = arg[1][1] else: dt = arg[1] raise error.Abort(_("template filter '%s' is not compatible with " "keyword '%s'") % (filt.func_name, dt)) def buildmap(exp, context): func, data = compileexp(exp[1], context, methods) ctmpl = gettemplate(exp[2], context) return (runmap, (func, data, ctmpl)) def runmap(context, mapping, data): func, data, ctmpl = data d = func(context, mapping, data) if util.safehasattr(d, 'itermaps'): d = d.itermaps() lm = mapping.copy() for i in d: if isinstance(i, dict): lm.update(i) lm['originalnode'] = mapping.get('node') yield runtemplate(context, lm, ctmpl) else: # v is not an iterable of dicts, this happen when 'key' # has been fully expanded already and format is useless. # If so, return the expanded value. yield i def buildfunc(exp, context): n = getsymbol(exp[1]) args = [compileexp(x, context, exprmethods) for x in getlist(exp[2])] if n in funcs: f = funcs[n] return (f, args) if n in context._filters: if len(args) != 1: raise error.ParseError(_("filter %s expects one argument") % n) f = context._filters[n] return (runfilter, (args[0], f)) raise error.ParseError(_("unknown function '%s'") % n) def date(context, mapping, args): """:date(date[, fmt]): Format a date. See :hg:`help dates` for formatting strings. The default is a Unix date format, including the timezone: "Mon Sep 04 15:13:13 2006 0700".""" if not (1 <= len(args) <= 2): # i18n: "date" is a keyword raise error.ParseError(_("date expects one or two arguments")) date = args[0][0](context, mapping, args[0][1]) fmt = None if len(args) == 2: fmt = stringify(args[1][0](context, mapping, args[1][1])) try: if fmt is None: return util.datestr(date) else: return util.datestr(date, fmt) except (TypeError, ValueError): # i18n: "date" is a keyword raise error.ParseError(_("date expects a date information")) def diff(context, mapping, args): """:diff([includepattern [, excludepattern]]): Show a diff, optionally specifying files to include or exclude.""" if len(args) > 2: # i18n: "diff" is a keyword raise error.ParseError(_("diff expects zero, one, or two arguments")) def getpatterns(i): if i < len(args): s = stringify(args[i][0](context, mapping, args[i][1])).strip() if s: return [s] return [] ctx = mapping['ctx'] chunks = ctx.diff(match=ctx.match([], getpatterns(0), getpatterns(1))) return ''.join(chunks) def fill(context, mapping, args): """:fill(text[, width[, initialident[, hangindent]]]): Fill many paragraphs with optional indentation. See the "fill" filter.""" if not (1 <= len(args) <= 4): # i18n: "fill" is a keyword raise error.ParseError(_("fill expects one to four arguments")) text = stringify(args[0][0](context, mapping, args[0][1])) width = 76 initindent = '' hangindent = '' if 2 <= len(args) <= 4: try: width = int(stringify(args[1][0](context, mapping, args[1][1]))) except ValueError: # i18n: "fill" is a keyword raise error.ParseError(_("fill expects an integer width")) try: initindent = stringify(args[2][0](context, mapping, args[2][1])) hangindent = stringify(args[3][0](context, mapping, args[3][1])) except IndexError: pass return templatefilters.fill(text, width, initindent, hangindent) def pad(context, mapping, args): """:pad(text, width[, fillchar=' '[, right=False]]): Pad text with a fill character.""" if not (2 <= len(args) <= 4): # i18n: "pad" is a keyword raise error.ParseError(_("pad() expects two to four arguments")) width = int(args[1][1]) text = stringify(args[0][0](context, mapping, args[0][1])) right = False fillchar = ' ' if len(args) > 2: fillchar = stringify(args[2][0](context, mapping, args[2][1])) if len(args) > 3: right = util.parsebool(args[3][1]) if right: return text.rjust(width, fillchar) else: return text.ljust(width, fillchar) def indent(context, mapping, args): """:indent(text, indentchars[, firstline]): Indents all non-empty lines with the characters given in the indentchars string. An optional third parameter will override the indent for the first line only if present.""" if not (2 <= len(args) <= 3): # i18n: "indent" is a keyword raise error.ParseError(_("indent() expects two or three arguments")) text = stringify(args[0][0](context, mapping, args[0][1])) indent = stringify(args[1][0](context, mapping, args[1][1])) if len(args) == 3: firstline = stringify(args[2][0](context, mapping, args[2][1])) else: firstline = indent # the indent function doesn't indent the first line, so we do it here return templatefilters.indent(firstline + text, indent) def get(context, mapping, args): """:get(dict, key): Get an attribute/key from an object. Some keywords are complex types. This function allows you to obtain the value of an attribute on these types.""" if len(args) != 2: # i18n: "get" is a keyword raise error.ParseError(_("get() expects two arguments")) dictarg = args[0][0](context, mapping, args[0][1]) if not util.safehasattr(dictarg, 'get'): # i18n: "get" is a keyword raise error.ParseError(_("get() expects a dict as first argument")) key = args[1][0](context, mapping, args[1][1]) return dictarg.get(key) def if_(context, mapping, args): """:if(expr, then[, else]): Conditionally execute based on the result of an expression.""" if not (2 <= len(args) <= 3): # i18n: "if" is a keyword raise error.ParseError(_("if expects two or three arguments")) test = stringify(args[0][0](context, mapping, args[0][1])) if test: yield args[1][0](context, mapping, args[1][1]) elif len(args) == 3: yield args[2][0](context, mapping, args[2][1]) def ifcontains(context, mapping, args): """:ifcontains(search, thing, then[, else]): Conditionally execute based on whether the item "search" is in "thing".""" if not (3 <= len(args) <= 4): # i18n: "ifcontains" is a keyword raise error.ParseError(_("ifcontains expects three or four arguments")) item = stringify(args[0][0](context, mapping, args[0][1])) items = args[1][0](context, mapping, args[1][1]) if item in items: yield args[2][0](context, mapping, args[2][1]) elif len(args) == 4: yield args[3][0](context, mapping, args[3][1]) def ifeq(context, mapping, args): """:ifeq(expr1, expr2, then[, else]): Conditionally execute based on whether 2 items are equivalent.""" if not (3 <= len(args) <= 4): # i18n: "ifeq" is a keyword raise error.ParseError(_("ifeq expects three or four arguments")) test = stringify(args[0][0](context, mapping, args[0][1])) match = stringify(args[1][0](context, mapping, args[1][1])) if test == match: yield args[2][0](context, mapping, args[2][1]) elif len(args) == 4: yield args[3][0](context, mapping, args[3][1]) def join(context, mapping, args): """:join(list, sep): Join items in a list with a delimiter.""" if not (1 <= len(args) <= 2): # i18n: "join" is a keyword raise error.ParseError(_("join expects one or two arguments")) joinset = args[0][0](context, mapping, args[0][1]) if util.safehasattr(joinset, 'itermaps'): jf = joinset.joinfmt joinset = [jf(x) for x in joinset.itermaps()] joiner = " " if len(args) > 1: joiner = stringify(args[1][0](context, mapping, args[1][1])) first = True for x in joinset: if first: first = False else: yield joiner yield x def label(context, mapping, args): """:label(label, expr): Apply a label to generated content. Content with a label applied can result in additional post-processing, such as automatic colorization.""" if len(args) != 2: # i18n: "label" is a keyword raise error.ParseError(_("label expects two arguments")) # ignore args[0] (the label string) since this is supposed to be a a no-op yield args[1][0](context, mapping, args[1][1]) def latesttag(context, mapping, args): """:latesttag([pattern]): The global tags matching the given pattern on the most recent globally tagged ancestor of this changeset.""" if len(args) > 1: # i18n: "latesttag" is a keyword raise error.ParseError(_("latesttag expects at most one argument")) pattern = None if len(args) == 1: pattern = stringify(args[0][0](context, mapping, args[0][1])) return templatekw.showlatesttags(pattern, **mapping) def localdate(context, mapping, args): """:localdate(date[, tz]): Converts a date to the specified timezone. The default is local date.""" if not (1 <= len(args) <= 2): # i18n: "localdate" is a keyword raise error.ParseError(_("localdate expects one or two arguments")) date = evalfuncarg(context, mapping, args[0]) try: date = util.parsedate(date) except AttributeError: # not str nor date tuple # i18n: "localdate" is a keyword raise error.ParseError(_("localdate expects a date information")) if len(args) >= 2: tzoffset = None tz = evalfuncarg(context, mapping, args[1]) if isinstance(tz, str): tzoffset = util.parsetimezone(tz) if tzoffset is None: try: tzoffset = int(tz) except (TypeError, ValueError): # i18n: "localdate" is a keyword raise error.ParseError(_("localdate expects a timezone")) else: tzoffset = util.makedate()[1] return (date[0], tzoffset) def revset(context, mapping, args): """:revset(query[, formatargs...]): Execute a revision set query. See :hg:`help revset`.""" if not len(args) > 0: # i18n: "revset" is a keyword raise error.ParseError(_("revset expects one or more arguments")) raw = stringify(args[0][0](context, mapping, args[0][1])) ctx = mapping['ctx'] repo = ctx.repo() def query(expr): m = revsetmod.match(repo.ui, expr) return m(repo) if len(args) > 1: formatargs = list([a[0](context, mapping, a[1]) for a in args[1:]]) revs = query(revsetmod.formatspec(raw, *formatargs)) revs = list([str(r) for r in revs]) else: revsetcache = mapping['cache'].setdefault("revsetcache", {}) if raw in revsetcache: revs = revsetcache[raw] else: revs = query(raw) revs = list([str(r) for r in revs]) revsetcache[raw] = revs return templatekw.showrevslist("revision", revs, **mapping) def rstdoc(context, mapping, args): """:rstdoc(text, style): Format ReStructuredText.""" if len(args) != 2: # i18n: "rstdoc" is a keyword raise error.ParseError(_("rstdoc expects two arguments")) text = stringify(args[0][0](context, mapping, args[0][1])) style = stringify(args[1][0](context, mapping, args[1][1])) return minirst.format(text, style=style, keep=['verbose']) def shortest(context, mapping, args): """:shortest(node, minlength=4): Obtain the shortest representation of a node.""" if not (1 <= len(args) <= 2): # i18n: "shortest" is a keyword raise error.ParseError(_("shortest() expects one or two arguments")) node = stringify(args[0][0](context, mapping, args[0][1])) minlength = 4 if len(args) > 1: minlength = int(args[1][1]) cl = mapping['ctx']._repo.changelog def isvalid(test): try: try: cl.index.partialmatch(test) except AttributeError: # Pure mercurial doesn't support partialmatch on the index. # Fallback to the slow way. if cl._partialmatch(test) is None: return False try: i = int(test) # if we are a pure int, then starting with zero will not be # confused as a rev; or, obviously, if the int is larger than # the value of the tip rev if test[0] == '0' or i > len(cl): return True return False except ValueError: return True except error.RevlogError: return False shortest = node startlength = max(6, minlength) length = startlength while True: test = node[:length] if isvalid(test): shortest = test if length == minlength or length > startlength: return shortest length -= 1 else: length += 1 if len(shortest) <= length: return shortest def strip(context, mapping, args): """:strip(text[, chars]): Strip characters from a string. By default, strips all leading and trailing whitespace.""" if not (1 <= len(args) <= 2): # i18n: "strip" is a keyword raise error.ParseError(_("strip expects one or two arguments")) text = stringify(args[0][0](context, mapping, args[0][1])) if len(args) == 2: chars = stringify(args[1][0](context, mapping, args[1][1])) return text.strip(chars) return text.strip() def sub(context, mapping, args): """:sub(pattern, replacement, expression): Perform text substitution using regular expressions.""" if len(args) != 3: # i18n: "sub" is a keyword raise error.ParseError(_("sub expects three arguments")) pat = stringify(args[0][0](context, mapping, args[0][1])) rpl = stringify(args[1][0](context, mapping, args[1][1])) src = stringify(args[2][0](context, mapping, args[2][1])) try: patre = re.compile(pat) except re.error: # i18n: "sub" is a keyword raise error.ParseError(_("sub got an invalid pattern: %s") % pat) try: yield patre.sub(rpl, src) except re.error: # i18n: "sub" is a keyword raise error.ParseError(_("sub got an invalid replacement: %s") % rpl) def startswith(context, mapping, args): """:startswith(pattern, text): Returns the value from the "text" argument if it begins with the content from the "pattern" argument.""" if len(args) != 2: # i18n: "startswith" is a keyword raise error.ParseError(_("startswith expects two arguments")) patn = stringify(args[0][0](context, mapping, args[0][1])) text = stringify(args[1][0](context, mapping, args[1][1])) if text.startswith(patn): return text return '' def word(context, mapping, args): """:word(number, text[, separator]): Return the nth word from a string.""" if not (2 <= len(args) <= 3): # i18n: "word" is a keyword raise error.ParseError(_("word expects two or three arguments, got %d") % len(args)) try: num = int(stringify(args[0][0](context, mapping, args[0][1]))) except ValueError: # i18n: "word" is a keyword raise error.ParseError(_("word expects an integer index")) text = stringify(args[1][0](context, mapping, args[1][1])) if len(args) == 3: splitter = stringify(args[2][0](context, mapping, args[2][1])) else: splitter = None tokens = text.split(splitter) if num >= len(tokens) or num < -len(tokens): return '' else: return tokens[num] # methods to interpret function arguments or inner expressions (e.g. {_(x)}) exprmethods = { "integer": lambda e, c: (runinteger, e[1]), "string": lambda e, c: (runstring, e[1]), "symbol": lambda e, c: (runsymbol, e[1]), "template": buildtemplate, "group": lambda e, c: compileexp(e[1], c, exprmethods), # ".": buildmember, "|": buildfilter, "%": buildmap, "func": buildfunc, } # methods to interpret top-level template (e.g. {x}, {x|_}, {x % "y"}) methods = exprmethods.copy() methods["integer"] = exprmethods["symbol"] # '{1}' as variable funcs = { "date": date, "diff": diff, "fill": fill, "get": get, "if": if_, "ifcontains": ifcontains, "ifeq": ifeq, "indent": indent, "join": join, "label": label, "latesttag": latesttag, "localdate": localdate, "pad": pad, "revset": revset, "rstdoc": rstdoc, "shortest": shortest, "startswith": startswith, "strip": strip, "sub": sub, "word": word, } # template engine stringify = templatefilters.stringify def _flatten(thing): '''yield a single stream from a possibly nested set of iterators''' if isinstance(thing, str): yield thing elif not util.safehasattr(thing, '__iter__'): if thing is not None: yield str(thing) else: for i in thing: if isinstance(i, str): yield i elif not util.safehasattr(i, '__iter__'): if i is not None: yield str(i) elif i is not None: for j in _flatten(i): yield j def unquotestring(s): '''unwrap quotes''' if len(s) < 2 or s[0] != s[-1]: raise SyntaxError(_('unmatched quotes')) return s[1:-1] class engine(object): '''template expansion engine. template expansion works like this. a map file contains key=value pairs. if value is quoted, it is treated as string. otherwise, it is treated as name of template file. templater is asked to expand a key in map. it looks up key, and looks for strings like this: {foo}. it expands {foo} by looking up foo in map, and substituting it. expansion is recursive: it stops when there is no more {foo} to replace. expansion also allows formatting and filtering. format uses key to expand each item in list. syntax is {key%format}. filter uses function to transform value. syntax is {key|filter1|filter2|...}.''' def __init__(self, loader, filters=None, defaults=None): self._loader = loader if filters is None: filters = {} self._filters = filters if defaults is None: defaults = {} self._defaults = defaults self._cache = {} def _load(self, t): '''load, parse, and cache a template''' if t not in self._cache: # put poison to cut recursion while compiling 't' self._cache[t] = [(_runrecursivesymbol, t)] try: self._cache[t] = compiletemplate(self._loader(t), self) except: # re-raises del self._cache[t] raise return self._cache[t] def process(self, t, mapping): '''Perform expansion. t is name of map element to expand. mapping contains added elements for use during expansion. Is a generator.''' return _flatten(runtemplate(self, mapping, self._load(t))) engines = {'default': engine} def stylelist(): paths = templatepaths() if not paths: return _('no templates found, try `hg debuginstall` for more info') dirlist = os.listdir(paths[0]) stylelist = [] for file in dirlist: split = file.split(".") if split[0] == "map-cmdline": stylelist.append(split[1]) return ", ".join(sorted(stylelist)) class TemplateNotFound(error.Abort): pass class templater(object): def __init__(self, mapfile, filters=None, defaults=None, cache=None, minchunk=1024, maxchunk=65536): '''set up template engine. mapfile is name of file to read map definitions from. filters is dict of functions. each transforms a value into another. defaults is dict of default map definitions.''' if filters is None: filters = {} if defaults is None: defaults = {} if cache is None: cache = {} self.mapfile = mapfile or 'template' self.cache = cache.copy() self.map = {} if mapfile: self.base = os.path.dirname(mapfile) else: self.base = '' self.filters = templatefilters.filters.copy() self.filters.update(filters) self.defaults = defaults self.minchunk, self.maxchunk = minchunk, maxchunk self.ecache = {} if not mapfile: return if not os.path.exists(mapfile): raise error.Abort(_("style '%s' not found") % mapfile, hint=_("available styles: %s") % stylelist()) conf = config.config(includepaths=templatepaths()) conf.read(mapfile) for key, val in conf[''].items(): if not val: raise SyntaxError(_('%s: missing value') % conf.source('', key)) if val[0] in "'\"": try: self.cache[key] = unquotestring(val) except SyntaxError as inst: raise SyntaxError('%s: %s' % (conf.source('', key), inst.args[0])) else: val = 'default', val if ':' in val[1]: val = val[1].split(':', 1) self.map[key] = val[0], os.path.join(self.base, val[1]) def __contains__(self, key): return key in self.cache or key in self.map def load(self, t): '''Get the template for the given template name. Use a local cache.''' if t not in self.cache: try: self.cache[t] = util.readfile(self.map[t][1]) except KeyError as inst: raise TemplateNotFound(_('"%s" not in template map') % inst.args[0]) except IOError as inst: raise IOError(inst.args[0], _('template file %s: %s') % (self.map[t][1], inst.args[1])) return self.cache[t] def __call__(self, t, **mapping): ttype = t in self.map and self.map[t][0] or 'default' if ttype not in self.ecache: self.ecache[ttype] = engines[ttype](self.load, self.filters, self.defaults) proc = self.ecache[ttype] stream = proc.process(t, mapping) if self.minchunk: stream = util.increasingchunks(stream, min=self.minchunk, max=self.maxchunk) return stream def templatepaths(): '''return locations used for template files.''' pathsrel = ['templates'] paths = [os.path.normpath(os.path.join(util.datapath, f)) for f in pathsrel] return [p for p in paths if os.path.isdir(p)] def templatepath(name): '''return location of template file. returns None if not found.''' for p in templatepaths(): f = os.path.join(p, name) if os.path.exists(f): return f return None def stylemap(styles, paths=None): """Return path to mapfile for a given style. Searches mapfile in the following locations: 1. templatepath/style/map 2. templatepath/map-style 3. templatepath/map """ if paths is None: paths = templatepaths() elif isinstance(paths, str): paths = [paths] if isinstance(styles, str): styles = [styles] for style in styles: # only plain name is allowed to honor template paths if (not style or style in (os.curdir, os.pardir) or os.sep in style or os.altsep and os.altsep in style): continue locations = [os.path.join(style, 'map'), 'map-' + style] locations.append('map') for path in paths: for location in locations: mapfile = os.path.join(path, location) if os.path.isfile(mapfile): return style, mapfile raise RuntimeError("No hgweb templates found in %r" % paths) # tell hggettext to extract docstrings from these functions: i18nfunctions = funcs.values() mercurial-3.7.3/mercurial/util.h0000644000175000017500000001014112676531525016235 0ustar mpmmpm00000000000000/* util.h - utility functions for interfacing with the various python APIs. This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. */ #ifndef _HG_UTIL_H_ #define _HG_UTIL_H_ #if PY_MAJOR_VERSION >= 3 #define IS_PY3K #define PyInt_FromLong PyLong_FromLong #define PyInt_AsLong PyLong_AsLong /* Mapping of some of the python < 2.x PyString* functions to py3k's PyUnicode. The commented names below represent those that are present in the PyBytes definitions for python < 2.6 (below in this file) that don't have a direct implementation. */ #define PyStringObject PyUnicodeObject #define PyString_Type PyUnicode_Type #define PyString_Check PyUnicode_Check #define PyString_CheckExact PyUnicode_CheckExact #define PyString_CHECK_INTERNED PyUnicode_CHECK_INTERNED #define PyString_AS_STRING PyUnicode_AsLatin1String #define PyString_GET_SIZE PyUnicode_GET_SIZE #define PyString_FromStringAndSize PyUnicode_FromStringAndSize #define PyString_FromString PyUnicode_FromString #define PyString_FromFormatV PyUnicode_FromFormatV #define PyString_FromFormat PyUnicode_FromFormat /* #define PyString_Size PyUnicode_GET_SIZE */ /* #define PyString_AsString */ /* #define PyString_Repr */ #define PyString_Concat PyUnicode_Concat #define PyString_ConcatAndDel PyUnicode_AppendAndDel #define _PyString_Resize PyUnicode_Resize /* #define _PyString_Eq */ #define PyString_Format PyUnicode_Format /* #define _PyString_FormatLong */ /* #define PyString_DecodeEscape */ #define _PyString_Join PyUnicode_Join #define PyString_Decode PyUnicode_Decode #define PyString_Encode PyUnicode_Encode #define PyString_AsEncodedObject PyUnicode_AsEncodedObject #define PyString_AsEncodedString PyUnicode_AsEncodedString #define PyString_AsDecodedObject PyUnicode_AsDecodedObject #define PyString_AsDecodedString PyUnicode_AsDecodedUnicode /* #define PyString_AsStringAndSize */ #define _PyString_InsertThousandsGrouping _PyUnicode_InsertThousandsGrouping #endif /* PY_MAJOR_VERSION */ #ifdef _WIN32 #ifdef _MSC_VER /* msvc 6.0 has problems */ #define inline __inline typedef signed char int8_t; typedef short int16_t; typedef long int32_t; typedef __int64 int64_t; typedef unsigned char uint8_t; typedef unsigned short uint16_t; typedef unsigned long uint32_t; typedef unsigned __int64 uint64_t; #else #include #endif #else /* not windows */ #include #if defined __BEOS__ && !defined __HAIKU__ #include #else #include #endif #include #endif #if defined __hpux || defined __SUNPRO_C || defined _AIX #define inline #endif #ifdef __linux #define inline __inline #endif typedef struct { PyObject_HEAD char state; int mode; int size; int mtime; } dirstateTupleObject; extern PyTypeObject dirstateTupleType; #define dirstate_tuple_check(op) (Py_TYPE(op) == &dirstateTupleType) static inline uint32_t getbe32(const char *c) { const unsigned char *d = (const unsigned char *)c; return ((d[0] << 24) | (d[1] << 16) | (d[2] << 8) | (d[3])); } static inline int16_t getbeint16(const char *c) { const unsigned char *d = (const unsigned char *)c; return ((d[0] << 8) | (d[1])); } static inline uint16_t getbeuint16(const char *c) { const unsigned char *d = (const unsigned char *)c; return ((d[0] << 8) | (d[1])); } static inline void putbe32(uint32_t x, char *c) { c[0] = (x >> 24) & 0xff; c[1] = (x >> 16) & 0xff; c[2] = (x >> 8) & 0xff; c[3] = (x) & 0xff; } static inline double getbefloat64(const char *c) { const unsigned char *d = (const unsigned char *)c; double ret; int i; uint64_t t = 0; for (i = 0; i < 8; i++) { t = (t<<8) + d[i]; } memcpy(&ret, &t, sizeof(t)); return ret; } /* This should be kept in sync with normcasespecs in encoding.py. */ enum normcase_spec { NORMCASE_LOWER = -1, NORMCASE_UPPER = 1, NORMCASE_OTHER = 0 }; #define MIN(a, b) (((a)<(b))?(a):(b)) /* VC9 doesn't include bool and lacks stdbool.h based on my searching */ #if defined(_MSC_VER) || __STDC_VERSION__ < 199901L #define true 1 #define false 0 typedef unsigned char bool; #else #include #endif #endif /* _HG_UTIL_H_ */ mercurial-3.7.3/mercurial/bundle2.py0000644000175000017500000015737712676531524017042 0ustar mpmmpm00000000000000# bundle2.py - generic container format to transmit arbitrary data. # # Copyright 2013 Facebook, Inc. # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """Handling of the new bundle2 format The goal of bundle2 is to act as an atomically packet to transmit a set of payloads in an application agnostic way. It consist in a sequence of "parts" that will be handed to and processed by the application layer. General format architecture =========================== The format is architectured as follow - magic string - stream level parameters - payload parts (any number) - end of stream marker. the Binary format ============================ All numbers are unsigned and big-endian. stream level parameters ------------------------ Binary format is as follow :params size: int32 The total number of Bytes used by the parameters :params value: arbitrary number of Bytes A blob of `params size` containing the serialized version of all stream level parameters. The blob contains a space separated list of parameters. Parameters with value are stored in the form `=`. Both name and value are urlquoted. Empty name are obviously forbidden. Name MUST start with a letter. If this first letter is lower case, the parameter is advisory and can be safely ignored. However when the first letter is capital, the parameter is mandatory and the bundling process MUST stop if he is not able to proceed it. Stream parameters use a simple textual format for two main reasons: - Stream level parameters should remain simple and we want to discourage any crazy usage. - Textual data allow easy human inspection of a bundle2 header in case of troubles. Any Applicative level options MUST go into a bundle2 part instead. Payload part ------------------------ Binary format is as follow :header size: int32 The total number of Bytes used by the part header. When the header is empty (size = 0) this is interpreted as the end of stream marker. :header: The header defines how to interpret the part. It contains two piece of data: the part type, and the part parameters. The part type is used to route an application level handler, that can interpret payload. Part parameters are passed to the application level handler. They are meant to convey information that will help the application level object to interpret the part payload. The binary format of the header is has follow :typesize: (one byte) :parttype: alphanumerical part name (restricted to [a-zA-Z0-9_:-]*) :partid: A 32bits integer (unique in the bundle) that can be used to refer to this part. :parameters: Part's parameter may have arbitrary content, the binary structure is:: :mandatory-count: 1 byte, number of mandatory parameters :advisory-count: 1 byte, number of advisory parameters :param-sizes: N couple of bytes, where N is the total number of parameters. Each couple contains (, `. `chunksize` is an int32, `chunkdata` are plain bytes (as much as `chunksize` says)` The payload part is concluded by a zero size chunk. The current implementation always produces either zero or one chunk. This is an implementation limitation that will ultimately be lifted. `chunksize` can be negative to trigger special case processing. No such processing is in place yet. Bundle processing ============================ Each part is processed in order using a "part handler". Handler are registered for a certain part type. The matching of a part to its handler is case insensitive. The case of the part type is used to know if a part is mandatory or advisory. If the Part type contains any uppercase char it is considered mandatory. When no handler is known for a Mandatory part, the process is aborted and an exception is raised. If the part is advisory and no handler is known, the part is ignored. When the process is aborted, the full bundle is still read from the stream to keep the channel usable. But none of the part read from an abort are processed. In the future, dropping the stream may become an option for channel we do not care to preserve. """ from __future__ import absolute_import import errno import re import string import struct import sys import urllib from .i18n import _ from . import ( changegroup, error, obsolete, pushkey, tags, url, util, ) _pack = struct.pack _unpack = struct.unpack _fstreamparamsize = '>i' _fpartheadersize = '>i' _fparttypesize = '>B' _fpartid = '>I' _fpayloadsize = '>i' _fpartparamcount = '>BB' preferedchunksize = 4096 _parttypeforbidden = re.compile('[^a-zA-Z0-9_:-]') def outdebug(ui, message): """debug regarding output stream (bundling)""" if ui.configbool('devel', 'bundle2.debug', False): ui.debug('bundle2-output: %s\n' % message) def indebug(ui, message): """debug on input stream (unbundling)""" if ui.configbool('devel', 'bundle2.debug', False): ui.debug('bundle2-input: %s\n' % message) def validateparttype(parttype): """raise ValueError if a parttype contains invalid character""" if _parttypeforbidden.search(parttype): raise ValueError(parttype) def _makefpartparamsizes(nbparams): """return a struct format to read part parameter sizes The number parameters is variable so we need to build that format dynamically. """ return '>'+('BB'*nbparams) parthandlermapping = {} def parthandler(parttype, params=()): """decorator that register a function as a bundle2 part handler eg:: @parthandler('myparttype', ('mandatory', 'param', 'handled')) def myparttypehandler(...): '''process a part of type "my part".''' ... """ validateparttype(parttype) def _decorator(func): lparttype = parttype.lower() # enforce lower case matching. assert lparttype not in parthandlermapping parthandlermapping[lparttype] = func func.params = frozenset(params) return func return _decorator class unbundlerecords(object): """keep record of what happens during and unbundle New records are added using `records.add('cat', obj)`. Where 'cat' is a category of record and obj is an arbitrary object. `records['cat']` will return all entries of this category 'cat'. Iterating on the object itself will yield `('category', obj)` tuples for all entries. All iterations happens in chronological order. """ def __init__(self): self._categories = {} self._sequences = [] self._replies = {} def add(self, category, entry, inreplyto=None): """add a new record of a given category. The entry can then be retrieved in the list returned by self['category'].""" self._categories.setdefault(category, []).append(entry) self._sequences.append((category, entry)) if inreplyto is not None: self.getreplies(inreplyto).add(category, entry) def getreplies(self, partid): """get the records that are replies to a specific part""" return self._replies.setdefault(partid, unbundlerecords()) def __getitem__(self, cat): return tuple(self._categories.get(cat, ())) def __iter__(self): return iter(self._sequences) def __len__(self): return len(self._sequences) def __nonzero__(self): return bool(self._sequences) class bundleoperation(object): """an object that represents a single bundling process Its purpose is to carry unbundle-related objects and states. A new object should be created at the beginning of each bundle processing. The object is to be returned by the processing function. The object has very little content now it will ultimately contain: * an access to the repo the bundle is applied to, * a ui object, * a way to retrieve a transaction to add changes to the repo, * a way to record the result of processing each part, * a way to construct a bundle response when applicable. """ def __init__(self, repo, transactiongetter, captureoutput=True): self.repo = repo self.ui = repo.ui self.records = unbundlerecords() self.gettransaction = transactiongetter self.reply = None self.captureoutput = captureoutput class TransactionUnavailable(RuntimeError): pass def _notransaction(): """default method to get a transaction while processing a bundle Raise an exception to highlight the fact that no transaction was expected to be created""" raise TransactionUnavailable() def applybundle(repo, unbundler, tr, source=None, url=None, op=None): # transform me into unbundler.apply() as soon as the freeze is lifted tr.hookargs['bundle2'] = '1' if source is not None and 'source' not in tr.hookargs: tr.hookargs['source'] = source if url is not None and 'url' not in tr.hookargs: tr.hookargs['url'] = url return processbundle(repo, unbundler, lambda: tr, op=op) def processbundle(repo, unbundler, transactiongetter=None, op=None): """This function process a bundle, apply effect to/from a repo It iterates over each part then searches for and uses the proper handling code to process the part. Parts are processed in order. This is very early version of this function that will be strongly reworked before final usage. Unknown Mandatory part will abort the process. It is temporarily possible to provide a prebuilt bundleoperation to the function. This is used to ensure output is properly propagated in case of an error during the unbundling. This output capturing part will likely be reworked and this ability will probably go away in the process. """ if op is None: if transactiongetter is None: transactiongetter = _notransaction op = bundleoperation(repo, transactiongetter) # todo: # - replace this is a init function soon. # - exception catching unbundler.params if repo.ui.debugflag: msg = ['bundle2-input-bundle:'] if unbundler.params: msg.append(' %i params') if op.gettransaction is None: msg.append(' no-transaction') else: msg.append(' with-transaction') msg.append('\n') repo.ui.debug(''.join(msg)) iterparts = enumerate(unbundler.iterparts()) part = None nbpart = 0 try: for nbpart, part in iterparts: _processpart(op, part) except BaseException as exc: for nbpart, part in iterparts: # consume the bundle content part.seek(0, 2) # Small hack to let caller code distinguish exceptions from bundle2 # processing from processing the old format. This is mostly # needed to handle different return codes to unbundle according to the # type of bundle. We should probably clean up or drop this return code # craziness in a future version. exc.duringunbundle2 = True salvaged = [] replycaps = None if op.reply is not None: salvaged = op.reply.salvageoutput() replycaps = op.reply.capabilities exc._replycaps = replycaps exc._bundle2salvagedoutput = salvaged raise finally: repo.ui.debug('bundle2-input-bundle: %i parts total\n' % nbpart) return op def _processpart(op, part): """process a single part from a bundle The part is guaranteed to have been fully consumed when the function exits (even if an exception is raised).""" status = 'unknown' # used by debug output try: try: handler = parthandlermapping.get(part.type) if handler is None: status = 'unsupported-type' raise error.BundleUnknownFeatureError(parttype=part.type) indebug(op.ui, 'found a handler for part %r' % part.type) unknownparams = part.mandatorykeys - handler.params if unknownparams: unknownparams = list(unknownparams) unknownparams.sort() status = 'unsupported-params (%s)' % unknownparams raise error.BundleUnknownFeatureError(parttype=part.type, params=unknownparams) status = 'supported' except error.BundleUnknownFeatureError as exc: if part.mandatory: # mandatory parts raise indebug(op.ui, 'ignoring unsupported advisory part %s' % exc) return # skip to part processing finally: if op.ui.debugflag: msg = ['bundle2-input-part: "%s"' % part.type] if not part.mandatory: msg.append(' (advisory)') nbmp = len(part.mandatorykeys) nbap = len(part.params) - nbmp if nbmp or nbap: msg.append(' (params:') if nbmp: msg.append(' %i mandatory' % nbmp) if nbap: msg.append(' %i advisory' % nbmp) msg.append(')') msg.append(' %s\n' % status) op.ui.debug(''.join(msg)) # handler is called outside the above try block so that we don't # risk catching KeyErrors from anything other than the # parthandlermapping lookup (any KeyError raised by handler() # itself represents a defect of a different variety). output = None if op.captureoutput and op.reply is not None: op.ui.pushbuffer(error=True, subproc=True) output = '' try: handler(op, part) finally: if output is not None: output = op.ui.popbuffer() if output: outpart = op.reply.newpart('output', data=output, mandatory=False) outpart.addparam('in-reply-to', str(part.id), mandatory=False) finally: # consume the part content to not corrupt the stream. part.seek(0, 2) def decodecaps(blob): """decode a bundle2 caps bytes blob into a dictionary The blob is a list of capabilities (one per line) Capabilities may have values using a line of the form:: capability=value1,value2,value3 The values are always a list.""" caps = {} for line in blob.splitlines(): if not line: continue if '=' not in line: key, vals = line, () else: key, vals = line.split('=', 1) vals = vals.split(',') key = urllib.unquote(key) vals = [urllib.unquote(v) for v in vals] caps[key] = vals return caps def encodecaps(caps): """encode a bundle2 caps dictionary into a bytes blob""" chunks = [] for ca in sorted(caps): vals = caps[ca] ca = urllib.quote(ca) vals = [urllib.quote(v) for v in vals] if vals: ca = "%s=%s" % (ca, ','.join(vals)) chunks.append(ca) return '\n'.join(chunks) class bundle20(object): """represent an outgoing bundle2 container Use the `addparam` method to add stream level parameter. and `newpart` to populate it. Then call `getchunks` to retrieve all the binary chunks of data that compose the bundle2 container.""" _magicstring = 'HG20' def __init__(self, ui, capabilities=()): self.ui = ui self._params = [] self._parts = [] self.capabilities = dict(capabilities) self._compressor = util.compressors[None]() def setcompression(self, alg): """setup core part compression to """ if alg is None: return assert not any(n.lower() == 'Compression' for n, v in self._params) self.addparam('Compression', alg) self._compressor = util.compressors[alg]() @property def nbparts(self): """total number of parts added to the bundler""" return len(self._parts) # methods used to defines the bundle2 content def addparam(self, name, value=None): """add a stream level parameter""" if not name: raise ValueError('empty parameter name') if name[0] not in string.letters: raise ValueError('non letter first character: %r' % name) self._params.append((name, value)) def addpart(self, part): """add a new part to the bundle2 container Parts contains the actual applicative payload.""" assert part.id is None part.id = len(self._parts) # very cheap counter self._parts.append(part) def newpart(self, typeid, *args, **kwargs): """create a new part and add it to the containers As the part is directly added to the containers. For now, this means that any failure to properly initialize the part after calling ``newpart`` should result in a failure of the whole bundling process. You can still fall back to manually create and add if you need better control.""" part = bundlepart(typeid, *args, **kwargs) self.addpart(part) return part # methods used to generate the bundle2 stream def getchunks(self): if self.ui.debugflag: msg = ['bundle2-output-bundle: "%s",' % self._magicstring] if self._params: msg.append(' (%i params)' % len(self._params)) msg.append(' %i parts total\n' % len(self._parts)) self.ui.debug(''.join(msg)) outdebug(self.ui, 'start emission of %s stream' % self._magicstring) yield self._magicstring param = self._paramchunk() outdebug(self.ui, 'bundle parameter: %s' % param) yield _pack(_fstreamparamsize, len(param)) if param: yield param # starting compression for chunk in self._getcorechunk(): yield self._compressor.compress(chunk) yield self._compressor.flush() def _paramchunk(self): """return a encoded version of all stream parameters""" blocks = [] for par, value in self._params: par = urllib.quote(par) if value is not None: value = urllib.quote(value) par = '%s=%s' % (par, value) blocks.append(par) return ' '.join(blocks) def _getcorechunk(self): """yield chunk for the core part of the bundle (all but headers and parameters)""" outdebug(self.ui, 'start of parts') for part in self._parts: outdebug(self.ui, 'bundle part: "%s"' % part.type) for chunk in part.getchunks(ui=self.ui): yield chunk outdebug(self.ui, 'end of bundle') yield _pack(_fpartheadersize, 0) def salvageoutput(self): """return a list with a copy of all output parts in the bundle This is meant to be used during error handling to make sure we preserve server output""" salvaged = [] for part in self._parts: if part.type.startswith('output'): salvaged.append(part.copy()) return salvaged class unpackermixin(object): """A mixin to extract bytes and struct data from a stream""" def __init__(self, fp): self._fp = fp self._seekable = (util.safehasattr(fp, 'seek') and util.safehasattr(fp, 'tell')) def _unpack(self, format): """unpack this struct format from the stream""" data = self._readexact(struct.calcsize(format)) return _unpack(format, data) def _readexact(self, size): """read exactly bytes from the stream""" return changegroup.readexactly(self._fp, size) def seek(self, offset, whence=0): """move the underlying file pointer""" if self._seekable: return self._fp.seek(offset, whence) else: raise NotImplementedError(_('File pointer is not seekable')) def tell(self): """return the file offset, or None if file is not seekable""" if self._seekable: try: return self._fp.tell() except IOError as e: if e.errno == errno.ESPIPE: self._seekable = False else: raise return None def close(self): """close underlying file""" if util.safehasattr(self._fp, 'close'): return self._fp.close() def getunbundler(ui, fp, magicstring=None): """return a valid unbundler object for a given magicstring""" if magicstring is None: magicstring = changegroup.readexactly(fp, 4) magic, version = magicstring[0:2], magicstring[2:4] if magic != 'HG': raise error.Abort(_('not a Mercurial bundle')) unbundlerclass = formatmap.get(version) if unbundlerclass is None: raise error.Abort(_('unknown bundle version %s') % version) unbundler = unbundlerclass(ui, fp) indebug(ui, 'start processing of %s stream' % magicstring) return unbundler class unbundle20(unpackermixin): """interpret a bundle2 stream This class is fed with a binary stream and yields parts through its `iterparts` methods.""" _magicstring = 'HG20' def __init__(self, ui, fp): """If header is specified, we do not read it out of the stream.""" self.ui = ui self._decompressor = util.decompressors[None] self._compressed = None super(unbundle20, self).__init__(fp) @util.propertycache def params(self): """dictionary of stream level parameters""" indebug(self.ui, 'reading bundle2 stream parameters') params = {} paramssize = self._unpack(_fstreamparamsize)[0] if paramssize < 0: raise error.BundleValueError('negative bundle param size: %i' % paramssize) if paramssize: params = self._readexact(paramssize) params = self._processallparams(params) return params def _processallparams(self, paramsblock): """""" params = {} for p in paramsblock.split(' '): p = p.split('=', 1) p = [urllib.unquote(i) for i in p] if len(p) < 2: p.append(None) self._processparam(*p) params[p[0]] = p[1] return params def _processparam(self, name, value): """process a parameter, applying its effect if needed Parameter starting with a lower case letter are advisory and will be ignored when unknown. Those starting with an upper case letter are mandatory and will this function will raise a KeyError when unknown. Note: no option are currently supported. Any input will be either ignored or failing. """ if not name: raise ValueError('empty parameter name') if name[0] not in string.letters: raise ValueError('non letter first character: %r' % name) try: handler = b2streamparamsmap[name.lower()] except KeyError: if name[0].islower(): indebug(self.ui, "ignoring unknown parameter %r" % name) else: raise error.BundleUnknownFeatureError(params=(name,)) else: handler(self, name, value) def _forwardchunks(self): """utility to transfer a bundle2 as binary This is made necessary by the fact the 'getbundle' command over 'ssh' have no way to know then the reply end, relying on the bundle to be interpreted to know its end. This is terrible and we are sorry, but we needed to move forward to get general delta enabled. """ yield self._magicstring assert 'params' not in vars(self) paramssize = self._unpack(_fstreamparamsize)[0] if paramssize < 0: raise error.BundleValueError('negative bundle param size: %i' % paramssize) yield _pack(_fstreamparamsize, paramssize) if paramssize: params = self._readexact(paramssize) self._processallparams(params) yield params assert self._decompressor is util.decompressors[None] # From there, payload might need to be decompressed self._fp = self._decompressor(self._fp) emptycount = 0 while emptycount < 2: # so we can brainlessly loop assert _fpartheadersize == _fpayloadsize size = self._unpack(_fpartheadersize)[0] yield _pack(_fpartheadersize, size) if size: emptycount = 0 else: emptycount += 1 continue if size == flaginterrupt: continue elif size < 0: raise error.BundleValueError('negative chunk size: %i') yield self._readexact(size) def iterparts(self): """yield all parts contained in the stream""" # make sure param have been loaded self.params # From there, payload need to be decompressed self._fp = self._decompressor(self._fp) indebug(self.ui, 'start extraction of bundle2 parts') headerblock = self._readpartheader() while headerblock is not None: part = unbundlepart(self.ui, headerblock, self._fp) yield part part.seek(0, 2) headerblock = self._readpartheader() indebug(self.ui, 'end of bundle2 stream') def _readpartheader(self): """reads a part header size and return the bytes blob returns None if empty""" headersize = self._unpack(_fpartheadersize)[0] if headersize < 0: raise error.BundleValueError('negative part header size: %i' % headersize) indebug(self.ui, 'part header size: %i' % headersize) if headersize: return self._readexact(headersize) return None def compressed(self): self.params # load params return self._compressed formatmap = {'20': unbundle20} b2streamparamsmap = {} def b2streamparamhandler(name): """register a handler for a stream level parameter""" def decorator(func): assert name not in formatmap b2streamparamsmap[name] = func return func return decorator @b2streamparamhandler('compression') def processcompression(unbundler, param, value): """read compression parameter and install payload decompression""" if value not in util.decompressors: raise error.BundleUnknownFeatureError(params=(param,), values=(value,)) unbundler._decompressor = util.decompressors[value] if value is not None: unbundler._compressed = True class bundlepart(object): """A bundle2 part contains application level payload The part `type` is used to route the part to the application level handler. The part payload is contained in ``part.data``. It could be raw bytes or a generator of byte chunks. You can add parameters to the part using the ``addparam`` method. Parameters can be either mandatory (default) or advisory. Remote side should be able to safely ignore the advisory ones. Both data and parameters cannot be modified after the generation has begun. """ def __init__(self, parttype, mandatoryparams=(), advisoryparams=(), data='', mandatory=True): validateparttype(parttype) self.id = None self.type = parttype self._data = data self._mandatoryparams = list(mandatoryparams) self._advisoryparams = list(advisoryparams) # checking for duplicated entries self._seenparams = set() for pname, __ in self._mandatoryparams + self._advisoryparams: if pname in self._seenparams: raise RuntimeError('duplicated params: %s' % pname) self._seenparams.add(pname) # status of the part's generation: # - None: not started, # - False: currently generated, # - True: generation done. self._generated = None self.mandatory = mandatory def copy(self): """return a copy of the part The new part have the very same content but no partid assigned yet. Parts with generated data cannot be copied.""" assert not util.safehasattr(self.data, 'next') return self.__class__(self.type, self._mandatoryparams, self._advisoryparams, self._data, self.mandatory) # methods used to defines the part content @property def data(self): return self._data @data.setter def data(self, data): if self._generated is not None: raise error.ReadOnlyPartError('part is being generated') self._data = data @property def mandatoryparams(self): # make it an immutable tuple to force people through ``addparam`` return tuple(self._mandatoryparams) @property def advisoryparams(self): # make it an immutable tuple to force people through ``addparam`` return tuple(self._advisoryparams) def addparam(self, name, value='', mandatory=True): if self._generated is not None: raise error.ReadOnlyPartError('part is being generated') if name in self._seenparams: raise ValueError('duplicated params: %s' % name) self._seenparams.add(name) params = self._advisoryparams if mandatory: params = self._mandatoryparams params.append((name, value)) # methods used to generates the bundle2 stream def getchunks(self, ui): if self._generated is not None: raise RuntimeError('part can only be consumed once') self._generated = False if ui.debugflag: msg = ['bundle2-output-part: "%s"' % self.type] if not self.mandatory: msg.append(' (advisory)') nbmp = len(self.mandatoryparams) nbap = len(self.advisoryparams) if nbmp or nbap: msg.append(' (params:') if nbmp: msg.append(' %i mandatory' % nbmp) if nbap: msg.append(' %i advisory' % nbmp) msg.append(')') if not self.data: msg.append(' empty payload') elif util.safehasattr(self.data, 'next'): msg.append(' streamed payload') else: msg.append(' %i bytes payload' % len(self.data)) msg.append('\n') ui.debug(''.join(msg)) #### header if self.mandatory: parttype = self.type.upper() else: parttype = self.type.lower() outdebug(ui, 'part %s: "%s"' % (self.id, parttype)) ## parttype header = [_pack(_fparttypesize, len(parttype)), parttype, _pack(_fpartid, self.id), ] ## parameters # count manpar = self.mandatoryparams advpar = self.advisoryparams header.append(_pack(_fpartparamcount, len(manpar), len(advpar))) # size parsizes = [] for key, value in manpar: parsizes.append(len(key)) parsizes.append(len(value)) for key, value in advpar: parsizes.append(len(key)) parsizes.append(len(value)) paramsizes = _pack(_makefpartparamsizes(len(parsizes) / 2), *parsizes) header.append(paramsizes) # key, value for key, value in manpar: header.append(key) header.append(value) for key, value in advpar: header.append(key) header.append(value) ## finalize header headerchunk = ''.join(header) outdebug(ui, 'header chunk size: %i' % len(headerchunk)) yield _pack(_fpartheadersize, len(headerchunk)) yield headerchunk ## payload try: for chunk in self._payloadchunks(): outdebug(ui, 'payload chunk size: %i' % len(chunk)) yield _pack(_fpayloadsize, len(chunk)) yield chunk except GeneratorExit: # GeneratorExit means that nobody is listening for our # results anyway, so just bail quickly rather than trying # to produce an error part. ui.debug('bundle2-generatorexit\n') raise except BaseException as exc: # backup exception data for later ui.debug('bundle2-input-stream-interrupt: encoding exception %s' % exc) exc_info = sys.exc_info() msg = 'unexpected error: %s' % exc interpart = bundlepart('error:abort', [('message', msg)], mandatory=False) interpart.id = 0 yield _pack(_fpayloadsize, -1) for chunk in interpart.getchunks(ui=ui): yield chunk outdebug(ui, 'closing payload chunk') # abort current part payload yield _pack(_fpayloadsize, 0) raise exc_info[0], exc_info[1], exc_info[2] # end of payload outdebug(ui, 'closing payload chunk') yield _pack(_fpayloadsize, 0) self._generated = True def _payloadchunks(self): """yield chunks of a the part payload Exists to handle the different methods to provide data to a part.""" # we only support fixed size data now. # This will be improved in the future. if util.safehasattr(self.data, 'next'): buff = util.chunkbuffer(self.data) chunk = buff.read(preferedchunksize) while chunk: yield chunk chunk = buff.read(preferedchunksize) elif len(self.data): yield self.data flaginterrupt = -1 class interrupthandler(unpackermixin): """read one part and process it with restricted capability This allows to transmit exception raised on the producer size during part iteration while the consumer is reading a part. Part processed in this manner only have access to a ui object,""" def __init__(self, ui, fp): super(interrupthandler, self).__init__(fp) self.ui = ui def _readpartheader(self): """reads a part header size and return the bytes blob returns None if empty""" headersize = self._unpack(_fpartheadersize)[0] if headersize < 0: raise error.BundleValueError('negative part header size: %i' % headersize) indebug(self.ui, 'part header size: %i\n' % headersize) if headersize: return self._readexact(headersize) return None def __call__(self): self.ui.debug('bundle2-input-stream-interrupt:' ' opening out of band context\n') indebug(self.ui, 'bundle2 stream interruption, looking for a part.') headerblock = self._readpartheader() if headerblock is None: indebug(self.ui, 'no part found during interruption.') return part = unbundlepart(self.ui, headerblock, self._fp) op = interruptoperation(self.ui) _processpart(op, part) self.ui.debug('bundle2-input-stream-interrupt:' ' closing out of band context\n') class interruptoperation(object): """A limited operation to be use by part handler during interruption It only have access to an ui object. """ def __init__(self, ui): self.ui = ui self.reply = None self.captureoutput = False @property def repo(self): raise RuntimeError('no repo access from stream interruption') def gettransaction(self): raise TransactionUnavailable('no repo access from stream interruption') class unbundlepart(unpackermixin): """a bundle part read from a bundle""" def __init__(self, ui, header, fp): super(unbundlepart, self).__init__(fp) self.ui = ui # unbundle state attr self._headerdata = header self._headeroffset = 0 self._initialized = False self.consumed = False # part data self.id = None self.type = None self.mandatoryparams = None self.advisoryparams = None self.params = None self.mandatorykeys = () self._payloadstream = None self._readheader() self._mandatory = None self._chunkindex = [] #(payload, file) position tuples for chunk starts self._pos = 0 def _fromheader(self, size): """return the next byte from the header""" offset = self._headeroffset data = self._headerdata[offset:(offset + size)] self._headeroffset = offset + size return data def _unpackheader(self, format): """read given format from header This automatically compute the size of the format to read.""" data = self._fromheader(struct.calcsize(format)) return _unpack(format, data) def _initparams(self, mandatoryparams, advisoryparams): """internal function to setup all logic related parameters""" # make it read only to prevent people touching it by mistake. self.mandatoryparams = tuple(mandatoryparams) self.advisoryparams = tuple(advisoryparams) # user friendly UI self.params = dict(self.mandatoryparams) self.params.update(dict(self.advisoryparams)) self.mandatorykeys = frozenset(p[0] for p in mandatoryparams) def _payloadchunks(self, chunknum=0): '''seek to specified chunk and start yielding data''' if len(self._chunkindex) == 0: assert chunknum == 0, 'Must start with chunk 0' self._chunkindex.append((0, super(unbundlepart, self).tell())) else: assert chunknum < len(self._chunkindex), \ 'Unknown chunk %d' % chunknum super(unbundlepart, self).seek(self._chunkindex[chunknum][1]) pos = self._chunkindex[chunknum][0] payloadsize = self._unpack(_fpayloadsize)[0] indebug(self.ui, 'payload chunk size: %i' % payloadsize) while payloadsize: if payloadsize == flaginterrupt: # interruption detection, the handler will now read a # single part and process it. interrupthandler(self.ui, self._fp)() elif payloadsize < 0: msg = 'negative payload chunk size: %i' % payloadsize raise error.BundleValueError(msg) else: result = self._readexact(payloadsize) chunknum += 1 pos += payloadsize if chunknum == len(self._chunkindex): self._chunkindex.append((pos, super(unbundlepart, self).tell())) yield result payloadsize = self._unpack(_fpayloadsize)[0] indebug(self.ui, 'payload chunk size: %i' % payloadsize) def _findchunk(self, pos): '''for a given payload position, return a chunk number and offset''' for chunk, (ppos, fpos) in enumerate(self._chunkindex): if ppos == pos: return chunk, 0 elif ppos > pos: return chunk - 1, pos - self._chunkindex[chunk - 1][0] raise ValueError('Unknown chunk') def _readheader(self): """read the header and setup the object""" typesize = self._unpackheader(_fparttypesize)[0] self.type = self._fromheader(typesize) indebug(self.ui, 'part type: "%s"' % self.type) self.id = self._unpackheader(_fpartid)[0] indebug(self.ui, 'part id: "%s"' % self.id) # extract mandatory bit from type self.mandatory = (self.type != self.type.lower()) self.type = self.type.lower() ## reading parameters # param count mancount, advcount = self._unpackheader(_fpartparamcount) indebug(self.ui, 'part parameters: %i' % (mancount + advcount)) # param size fparamsizes = _makefpartparamsizes(mancount + advcount) paramsizes = self._unpackheader(fparamsizes) # make it a list of couple again paramsizes = zip(paramsizes[::2], paramsizes[1::2]) # split mandatory from advisory mansizes = paramsizes[:mancount] advsizes = paramsizes[mancount:] # retrieve param value manparams = [] for key, value in mansizes: manparams.append((self._fromheader(key), self._fromheader(value))) advparams = [] for key, value in advsizes: advparams.append((self._fromheader(key), self._fromheader(value))) self._initparams(manparams, advparams) ## part payload self._payloadstream = util.chunkbuffer(self._payloadchunks()) # we read the data, tell it self._initialized = True def read(self, size=None): """read payload data""" if not self._initialized: self._readheader() if size is None: data = self._payloadstream.read() else: data = self._payloadstream.read(size) self._pos += len(data) if size is None or len(data) < size: if not self.consumed and self._pos: self.ui.debug('bundle2-input-part: total payload size %i\n' % self._pos) self.consumed = True return data def tell(self): return self._pos def seek(self, offset, whence=0): if whence == 0: newpos = offset elif whence == 1: newpos = self._pos + offset elif whence == 2: if not self.consumed: self.read() newpos = self._chunkindex[-1][0] - offset else: raise ValueError('Unknown whence value: %r' % (whence,)) if newpos > self._chunkindex[-1][0] and not self.consumed: self.read() if not 0 <= newpos <= self._chunkindex[-1][0]: raise ValueError('Offset out of range') if self._pos != newpos: chunk, internaloffset = self._findchunk(newpos) self._payloadstream = util.chunkbuffer(self._payloadchunks(chunk)) adjust = self.read(internaloffset) if len(adjust) != internaloffset: raise error.Abort(_('Seek failed\n')) self._pos = newpos # These are only the static capabilities. # Check the 'getrepocaps' function for the rest. capabilities = {'HG20': (), 'error': ('abort', 'unsupportedcontent', 'pushraced', 'pushkey'), 'listkeys': (), 'pushkey': (), 'digests': tuple(sorted(util.DIGESTS.keys())), 'remote-changegroup': ('http', 'https'), 'hgtagsfnodes': (), } def getrepocaps(repo, allowpushback=False): """return the bundle2 capabilities for a given repo Exists to allow extensions (like evolution) to mutate the capabilities. """ caps = capabilities.copy() caps['changegroup'] = tuple(sorted( changegroup.supportedincomingversions(repo))) if obsolete.isenabled(repo, obsolete.exchangeopt): supportedformat = tuple('V%i' % v for v in obsolete.formats) caps['obsmarkers'] = supportedformat if allowpushback: caps['pushback'] = () return caps def bundle2caps(remote): """return the bundle capabilities of a peer as dict""" raw = remote.capable('bundle2') if not raw and raw != '': return {} capsblob = urllib.unquote(remote.capable('bundle2')) return decodecaps(capsblob) def obsmarkersversion(caps): """extract the list of supported obsmarkers versions from a bundle2caps dict """ obscaps = caps.get('obsmarkers', ()) return [int(c[1:]) for c in obscaps if c.startswith('V')] @parthandler('changegroup', ('version', 'nbchanges', 'treemanifest')) def handlechangegroup(op, inpart): """apply a changegroup part on the repo This is a very early implementation that will massive rework before being inflicted to any end-user. """ # Make sure we trigger a transaction creation # # The addchangegroup function will get a transaction object by itself, but # we need to make sure we trigger the creation of a transaction object used # for the whole processing scope. op.gettransaction() unpackerversion = inpart.params.get('version', '01') # We should raise an appropriate exception here cg = changegroup.getunbundler(unpackerversion, inpart, None) # the source and url passed here are overwritten by the one contained in # the transaction.hookargs argument. So 'bundle2' is a placeholder nbchangesets = None if 'nbchanges' in inpart.params: nbchangesets = int(inpart.params.get('nbchanges')) if ('treemanifest' in inpart.params and 'treemanifest' not in op.repo.requirements): if len(op.repo.changelog) != 0: raise error.Abort(_( "bundle contains tree manifests, but local repo is " "non-empty and does not use tree manifests")) op.repo.requirements.add('treemanifest') op.repo._applyopenerreqs() op.repo._writerequirements() ret = cg.apply(op.repo, 'bundle2', 'bundle2', expectedtotal=nbchangesets) op.records.add('changegroup', {'return': ret}) if op.reply is not None: # This is definitely not the final form of this # return. But one need to start somewhere. part = op.reply.newpart('reply:changegroup', mandatory=False) part.addparam('in-reply-to', str(inpart.id), mandatory=False) part.addparam('return', '%i' % ret, mandatory=False) assert not inpart.read() _remotechangegroupparams = tuple(['url', 'size', 'digests'] + ['digest:%s' % k for k in util.DIGESTS.keys()]) @parthandler('remote-changegroup', _remotechangegroupparams) def handleremotechangegroup(op, inpart): """apply a bundle10 on the repo, given an url and validation information All the information about the remote bundle to import are given as parameters. The parameters include: - url: the url to the bundle10. - size: the bundle10 file size. It is used to validate what was retrieved by the client matches the server knowledge about the bundle. - digests: a space separated list of the digest types provided as parameters. - digest:: the hexadecimal representation of the digest with that name. Like the size, it is used to validate what was retrieved by the client matches what the server knows about the bundle. When multiple digest types are given, all of them are checked. """ try: raw_url = inpart.params['url'] except KeyError: raise error.Abort(_('remote-changegroup: missing "%s" param') % 'url') parsed_url = util.url(raw_url) if parsed_url.scheme not in capabilities['remote-changegroup']: raise error.Abort(_('remote-changegroup does not support %s urls') % parsed_url.scheme) try: size = int(inpart.params['size']) except ValueError: raise error.Abort(_('remote-changegroup: invalid value for param "%s"') % 'size') except KeyError: raise error.Abort(_('remote-changegroup: missing "%s" param') % 'size') digests = {} for typ in inpart.params.get('digests', '').split(): param = 'digest:%s' % typ try: value = inpart.params[param] except KeyError: raise error.Abort(_('remote-changegroup: missing "%s" param') % param) digests[typ] = value real_part = util.digestchecker(url.open(op.ui, raw_url), size, digests) # Make sure we trigger a transaction creation # # The addchangegroup function will get a transaction object by itself, but # we need to make sure we trigger the creation of a transaction object used # for the whole processing scope. op.gettransaction() from . import exchange cg = exchange.readbundle(op.repo.ui, real_part, raw_url) if not isinstance(cg, changegroup.cg1unpacker): raise error.Abort(_('%s: not a bundle version 1.0') % util.hidepassword(raw_url)) ret = cg.apply(op.repo, 'bundle2', 'bundle2') op.records.add('changegroup', {'return': ret}) if op.reply is not None: # This is definitely not the final form of this # return. But one need to start somewhere. part = op.reply.newpart('reply:changegroup') part.addparam('in-reply-to', str(inpart.id), mandatory=False) part.addparam('return', '%i' % ret, mandatory=False) try: real_part.validate() except error.Abort as e: raise error.Abort(_('bundle at %s is corrupted:\n%s') % (util.hidepassword(raw_url), str(e))) assert not inpart.read() @parthandler('reply:changegroup', ('return', 'in-reply-to')) def handlereplychangegroup(op, inpart): ret = int(inpart.params['return']) replyto = int(inpart.params['in-reply-to']) op.records.add('changegroup', {'return': ret}, replyto) @parthandler('check:heads') def handlecheckheads(op, inpart): """check that head of the repo did not change This is used to detect a push race when using unbundle. This replaces the "heads" argument of unbundle.""" h = inpart.read(20) heads = [] while len(h) == 20: heads.append(h) h = inpart.read(20) assert not h # Trigger a transaction so that we are guaranteed to have the lock now. if op.ui.configbool('experimental', 'bundle2lazylocking'): op.gettransaction() if heads != op.repo.heads(): raise error.PushRaced('repository changed while pushing - ' 'please try again') @parthandler('output') def handleoutput(op, inpart): """forward output captured on the server to the client""" for line in inpart.read().splitlines(): op.ui.status(('remote: %s\n' % line)) @parthandler('replycaps') def handlereplycaps(op, inpart): """Notify that a reply bundle should be created The payload contains the capabilities information for the reply""" caps = decodecaps(inpart.read()) if op.reply is None: op.reply = bundle20(op.ui, caps) class AbortFromPart(error.Abort): """Sub-class of Abort that denotes an error from a bundle2 part.""" @parthandler('error:abort', ('message', 'hint')) def handleerrorabort(op, inpart): """Used to transmit abort error over the wire""" raise AbortFromPart(inpart.params['message'], hint=inpart.params.get('hint')) @parthandler('error:pushkey', ('namespace', 'key', 'new', 'old', 'ret', 'in-reply-to')) def handleerrorpushkey(op, inpart): """Used to transmit failure of a mandatory pushkey over the wire""" kwargs = {} for name in ('namespace', 'key', 'new', 'old', 'ret'): value = inpart.params.get(name) if value is not None: kwargs[name] = value raise error.PushkeyFailed(inpart.params['in-reply-to'], **kwargs) @parthandler('error:unsupportedcontent', ('parttype', 'params')) def handleerrorunsupportedcontent(op, inpart): """Used to transmit unknown content error over the wire""" kwargs = {} parttype = inpart.params.get('parttype') if parttype is not None: kwargs['parttype'] = parttype params = inpart.params.get('params') if params is not None: kwargs['params'] = params.split('\0') raise error.BundleUnknownFeatureError(**kwargs) @parthandler('error:pushraced', ('message',)) def handleerrorpushraced(op, inpart): """Used to transmit push race error over the wire""" raise error.ResponseError(_('push failed:'), inpart.params['message']) @parthandler('listkeys', ('namespace',)) def handlelistkeys(op, inpart): """retrieve pushkey namespace content stored in a bundle2""" namespace = inpart.params['namespace'] r = pushkey.decodekeys(inpart.read()) op.records.add('listkeys', (namespace, r)) @parthandler('pushkey', ('namespace', 'key', 'old', 'new')) def handlepushkey(op, inpart): """process a pushkey request""" dec = pushkey.decode namespace = dec(inpart.params['namespace']) key = dec(inpart.params['key']) old = dec(inpart.params['old']) new = dec(inpart.params['new']) # Grab the transaction to ensure that we have the lock before performing the # pushkey. if op.ui.configbool('experimental', 'bundle2lazylocking'): op.gettransaction() ret = op.repo.pushkey(namespace, key, old, new) record = {'namespace': namespace, 'key': key, 'old': old, 'new': new} op.records.add('pushkey', record) if op.reply is not None: rpart = op.reply.newpart('reply:pushkey') rpart.addparam('in-reply-to', str(inpart.id), mandatory=False) rpart.addparam('return', '%i' % ret, mandatory=False) if inpart.mandatory and not ret: kwargs = {} for key in ('namespace', 'key', 'new', 'old', 'ret'): if key in inpart.params: kwargs[key] = inpart.params[key] raise error.PushkeyFailed(partid=str(inpart.id), **kwargs) @parthandler('reply:pushkey', ('return', 'in-reply-to')) def handlepushkeyreply(op, inpart): """retrieve the result of a pushkey request""" ret = int(inpart.params['return']) partid = int(inpart.params['in-reply-to']) op.records.add('pushkey', {'return': ret}, partid) @parthandler('obsmarkers') def handleobsmarker(op, inpart): """add a stream of obsmarkers to the repo""" tr = op.gettransaction() markerdata = inpart.read() if op.ui.config('experimental', 'obsmarkers-exchange-debug', False): op.ui.write(('obsmarker-exchange: %i bytes received\n') % len(markerdata)) # The mergemarkers call will crash if marker creation is not enabled. # we want to avoid this if the part is advisory. if not inpart.mandatory and op.repo.obsstore.readonly: op.repo.ui.debug('ignoring obsolescence markers, feature not enabled') return new = op.repo.obsstore.mergemarkers(tr, markerdata) if new: op.repo.ui.status(_('%i new obsolescence markers\n') % new) op.records.add('obsmarkers', {'new': new}) if op.reply is not None: rpart = op.reply.newpart('reply:obsmarkers') rpart.addparam('in-reply-to', str(inpart.id), mandatory=False) rpart.addparam('new', '%i' % new, mandatory=False) @parthandler('reply:obsmarkers', ('new', 'in-reply-to')) def handleobsmarkerreply(op, inpart): """retrieve the result of a pushkey request""" ret = int(inpart.params['new']) partid = int(inpart.params['in-reply-to']) op.records.add('obsmarkers', {'new': ret}, partid) @parthandler('hgtagsfnodes') def handlehgtagsfnodes(op, inpart): """Applies .hgtags fnodes cache entries to the local repo. Payload is pairs of 20 byte changeset nodes and filenodes. """ # Grab the transaction so we ensure that we have the lock at this point. if op.ui.configbool('experimental', 'bundle2lazylocking'): op.gettransaction() cache = tags.hgtagsfnodescache(op.repo.unfiltered()) count = 0 while True: node = inpart.read(20) fnode = inpart.read(20) if len(node) < 20 or len(fnode) < 20: op.ui.debug('ignoring incomplete received .hgtags fnodes data\n') break cache.setfnode(node, fnode) count += 1 cache.write() op.ui.debug('applied %i hgtags fnodes cache entries\n' % count) mercurial-3.7.3/mercurial/dagparser.py0000644000175000017500000003462512676531524017445 0ustar mpmmpm00000000000000# dagparser.py - parser and generator for concise description of DAGs # # Copyright 2010 Peter Arrenbrecht # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import re import string from .i18n import _ from . import error def parsedag(desc): '''parses a DAG from a concise textual description; generates events "+n" is a linear run of n nodes based on the current default parent "." is a single node based on the current default parent "$" resets the default parent to -1 (implied at the start); otherwise the default parent is always the last node created ">> len(list(parsedag(""" ... ... +3 # 3 nodes in linear run ... :forkhere # a label for the last of the 3 nodes from above ... +5 # 5 more nodes on one branch ... :mergethis # label again ... >> list(parsedag("")) [] A simple linear run: >>> list(parsedag("+3")) [('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [1]))] Some non-standard ways to define such runs: >>> list(parsedag("+1+2")) [('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [1]))] >>> list(parsedag("+1*1*")) [('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [1]))] >>> list(parsedag("*")) [('n', (0, [-1]))] >>> list(parsedag("...")) [('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [1]))] A fork and a join, using numeric back references: >>> list(parsedag("+2*2*/2")) [('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [0])), ('n', (3, [2, 1]))] >>> list(parsedag("+2<2+1/2")) [('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [0])), ('n', (3, [2, 1]))] Placing a label: >>> list(parsedag("+1 :mylabel +1")) [('n', (0, [-1])), ('l', (0, 'mylabel')), ('n', (1, [0]))] An empty label (silly, really): >>> list(parsedag("+1:+1")) [('n', (0, [-1])), ('l', (0, '')), ('n', (1, [0]))] Fork and join, but with labels instead of numeric back references: >>> list(parsedag("+1:f +1:p2 *f */p2")) [('n', (0, [-1])), ('l', (0, 'f')), ('n', (1, [0])), ('l', (1, 'p2')), ('n', (2, [0])), ('n', (3, [2, 1]))] >>> list(parsedag("+1:f +1:p2 >> list(parsedag("+1 $ +1")) [('n', (0, [-1])), ('n', (1, [-1]))] Annotations, which are meant to introduce sticky state for subsequent nodes: >>> list(parsedag("+1 @ann +1")) [('n', (0, [-1])), ('a', 'ann'), ('n', (1, [0]))] >>> list(parsedag('+1 @"my annotation" +1')) [('n', (0, [-1])), ('a', 'my annotation'), ('n', (1, [0]))] Commands, which are meant to operate on the most recently created node: >>> list(parsedag("+1 !cmd +1")) [('n', (0, [-1])), ('c', 'cmd'), ('n', (1, [0]))] >>> list(parsedag('+1 !"my command" +1')) [('n', (0, [-1])), ('c', 'my command'), ('n', (1, [0]))] >>> list(parsedag('+1 !!my command line\\n +1')) [('n', (0, [-1])), ('C', 'my command line'), ('n', (1, [0]))] Comments, which extend to the end of the line: >>> list(parsedag('+1 # comment\\n+1')) [('n', (0, [-1])), ('n', (1, [0]))] Error: >>> try: list(parsedag('+1 bad')) ... except Exception, e: print e invalid character in dag description: bad... ''' if not desc: return wordchars = string.ascii_letters + string.digits labels = {} p1 = -1 r = 0 def resolve(ref): if not ref: return p1 elif ref[0] in string.digits: return r - int(ref) else: return labels[ref] chiter = (c for c in desc) def nextch(): return next(chiter, '\0') def nextrun(c, allow): s = '' while c in allow: s += c c = nextch() return c, s def nextdelimited(c, limit, escape): s = '' while c != limit: if c == escape: c = nextch() s += c c = nextch() return nextch(), s def nextstring(c): if c == '"': return nextdelimited(nextch(), '"', '\\') else: return nextrun(c, wordchars) c = nextch() while c != '\0': while c in string.whitespace: c = nextch() if c == '.': yield 'n', (r, [p1]) p1 = r r += 1 c = nextch() elif c == '+': c, digs = nextrun(nextch(), string.digits) n = int(digs) for i in xrange(0, n): yield 'n', (r, [p1]) p1 = r r += 1 elif c in '*/': if c == '*': c = nextch() c, pref = nextstring(c) prefs = [pref] while c == '/': c, pref = nextstring(nextch()) prefs.append(pref) ps = [resolve(ref) for ref in prefs] yield 'n', (r, ps) p1 = r r += 1 elif c == '<': c, ref = nextstring(nextch()) p1 = resolve(ref) elif c == ':': c, name = nextstring(nextch()) labels[name] = p1 yield 'l', (p1, name) elif c == '@': c, text = nextstring(nextch()) yield 'a', text elif c == '!': c = nextch() if c == '!': cmd = '' c = nextch() while c not in '\n\r\0': cmd += c c = nextch() yield 'C', cmd else: c, cmd = nextstring(c) yield 'c', cmd elif c == '#': while c not in '\n\r\0': c = nextch() elif c == '$': p1 = -1 c = nextch() elif c == '\0': return # in case it was preceded by whitespace else: s = '' i = 0 while c != '\0' and i < 10: s += c i += 1 c = nextch() raise error.Abort(_('invalid character in dag description: ' '%s...') % s) def dagtextlines(events, addspaces=True, wraplabels=False, wrapannotations=False, wrapcommands=False, wrapnonlinear=False, usedots=False, maxlinewidth=70): '''generates single lines for dagtext()''' def wrapstring(text): if re.match("^[0-9a-z]*$", text): return text return '"' + text.replace('\\', '\\\\').replace('"', '\"') + '"' def gen(): labels = {} run = 0 wantr = 0 needroot = False for kind, data in events: if kind == 'n': r, ps = data # sanity check if r != wantr: raise error.Abort(_("expected id %i, got %i") % (wantr, r)) if not ps: ps = [-1] else: for p in ps: if p >= r: raise error.Abort(_("parent id %i is larger than " "current id %i") % (p, r)) wantr += 1 # new root? p1 = r - 1 if len(ps) == 1 and ps[0] == -1: if needroot: if run: yield '+' + str(run) run = 0 if wrapnonlinear: yield '\n' yield '$' p1 = -1 else: needroot = True if len(ps) == 1 and ps[0] == p1: if usedots: yield "." else: run += 1 else: if run: yield '+' + str(run) run = 0 if wrapnonlinear: yield '\n' prefs = [] for p in ps: if p == p1: prefs.append('') elif p in labels: prefs.append(labels[p]) else: prefs.append(str(r - p)) yield '*' + '/'.join(prefs) else: if run: yield '+' + str(run) run = 0 if kind == 'l': rid, name = data labels[rid] = name yield ':' + name if wraplabels: yield '\n' elif kind == 'c': yield '!' + wrapstring(data) if wrapcommands: yield '\n' elif kind == 'C': yield '!!' + data yield '\n' elif kind == 'a': if wrapannotations: yield '\n' yield '@' + wrapstring(data) elif kind == '#': yield '#' + data yield '\n' else: raise error.Abort(_("invalid event type in dag: %s") % str((type, data))) if run: yield '+' + str(run) line = '' for part in gen(): if part == '\n': if line: yield line line = '' else: if len(line) + len(part) >= maxlinewidth: yield line line = '' elif addspaces and line and part != '.': line += ' ' line += part if line: yield line def dagtext(dag, addspaces=True, wraplabels=False, wrapannotations=False, wrapcommands=False, wrapnonlinear=False, usedots=False, maxlinewidth=70): '''generates lines of a textual representation for a dag event stream events should generate what parsedag() does, so: ('n', (id, [parentids])) for node creation ('l', (id, labelname)) for labels on nodes ('a', text) for annotations ('c', text) for commands ('C', text) for line commands ('!!') ('#', text) for comment lines Parent nodes must come before child nodes. Examples -------- Linear run: >>> dagtext([('n', (0, [-1])), ('n', (1, [0]))]) '+2' Two roots: >>> dagtext([('n', (0, [-1])), ('n', (1, [-1]))]) '+1 $ +1' Fork and join: >>> dagtext([('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [0])), ... ('n', (3, [2, 1]))]) '+2 *2 */2' Fork and join with labels: >>> dagtext([('n', (0, [-1])), ('l', (0, 'f')), ('n', (1, [0])), ... ('l', (1, 'p2')), ('n', (2, [0])), ('n', (3, [2, 1]))]) '+1 :f +1 :p2 *f */p2' Annotations: >>> dagtext([('n', (0, [-1])), ('a', 'ann'), ('n', (1, [0]))]) '+1 @ann +1' >>> dagtext([('n', (0, [-1])), ... ('a', 'my annotation'), ... ('n', (1, [0]))]) '+1 @"my annotation" +1' Commands: >>> dagtext([('n', (0, [-1])), ('c', 'cmd'), ('n', (1, [0]))]) '+1 !cmd +1' >>> dagtext([('n', (0, [-1])), ('c', 'my command'), ('n', (1, [0]))]) '+1 !"my command" +1' >>> dagtext([('n', (0, [-1])), ... ('C', 'my command line'), ... ('n', (1, [0]))]) '+1 !!my command line\\n+1' Comments: >>> dagtext([('n', (0, [-1])), ('#', ' comment'), ('n', (1, [0]))]) '+1 # comment\\n+1' >>> dagtext([]) '' Combining parsedag and dagtext: >>> dagtext(parsedag('+1 :f +1 :p2 *f */p2')) '+1 :f +1 :p2 *f */p2' ''' return "\n".join(dagtextlines(dag, addspaces, wraplabels, wrapannotations, wrapcommands, wrapnonlinear, usedots, maxlinewidth)) mercurial-3.7.3/mercurial/subrepo.py0000644000175000017500000021475712676531525017163 0ustar mpmmpm00000000000000# subrepo.py - sub-repository handling for Mercurial # # Copyright 2009-2010 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import copy import errno import os import posixpath import re import stat import subprocess import sys import tarfile import xml.dom.minidom from .i18n import _ from . import ( cmdutil, config, error, exchange, match as matchmod, node, pathutil, phases, scmutil, util, ) hg = None propertycache = util.propertycache nullstate = ('', '', 'empty') def _expandedabspath(path): ''' get a path or url and if it is a path expand it and return an absolute path ''' expandedpath = util.urllocalpath(util.expandpath(path)) u = util.url(expandedpath) if not u.scheme: path = util.normpath(os.path.abspath(u.path)) return path def _getstorehashcachename(remotepath): '''get a unique filename for the store hash cache of a remote repository''' return util.sha1(_expandedabspath(remotepath)).hexdigest()[0:12] class SubrepoAbort(error.Abort): """Exception class used to avoid handling a subrepo error more than once""" def __init__(self, *args, **kw): error.Abort.__init__(self, *args, **kw) self.subrepo = kw.get('subrepo') self.cause = kw.get('cause') def annotatesubrepoerror(func): def decoratedmethod(self, *args, **kargs): try: res = func(self, *args, **kargs) except SubrepoAbort as ex: # This exception has already been handled raise ex except error.Abort as ex: subrepo = subrelpath(self) errormsg = str(ex) + ' ' + _('(in subrepo %s)') % subrepo # avoid handling this exception by raising a SubrepoAbort exception raise SubrepoAbort(errormsg, hint=ex.hint, subrepo=subrepo, cause=sys.exc_info()) return res return decoratedmethod def state(ctx, ui): """return a state dict, mapping subrepo paths configured in .hgsub to tuple: (source from .hgsub, revision from .hgsubstate, kind (key in types dict)) """ p = config.config() repo = ctx.repo() def read(f, sections=None, remap=None): if f in ctx: try: data = ctx[f].data() except IOError as err: if err.errno != errno.ENOENT: raise # handle missing subrepo spec files as removed ui.warn(_("warning: subrepo spec file \'%s\' not found\n") % repo.pathto(f)) return p.parse(f, data, sections, remap, read) else: raise error.Abort(_("subrepo spec file \'%s\' not found") % repo.pathto(f)) if '.hgsub' in ctx: read('.hgsub') for path, src in ui.configitems('subpaths'): p.set('subpaths', path, src, ui.configsource('subpaths', path)) rev = {} if '.hgsubstate' in ctx: try: for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()): l = l.lstrip() if not l: continue try: revision, path = l.split(" ", 1) except ValueError: raise error.Abort(_("invalid subrepository revision " "specifier in \'%s\' line %d") % (repo.pathto('.hgsubstate'), (i + 1))) rev[path] = revision except IOError as err: if err.errno != errno.ENOENT: raise def remap(src): for pattern, repl in p.items('subpaths'): # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub # does a string decode. repl = repl.encode('string-escape') # However, we still want to allow back references to go # through unharmed, so we turn r'\\1' into r'\1'. Again, # extra escapes are needed because re.sub string decodes. repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl) try: src = re.sub(pattern, repl, src, 1) except re.error as e: raise error.Abort(_("bad subrepository pattern in %s: %s") % (p.source('subpaths', pattern), e)) return src state = {} for path, src in p[''].items(): kind = 'hg' if src.startswith('['): if ']' not in src: raise error.Abort(_('missing ] in subrepo source')) kind, src = src.split(']', 1) kind = kind[1:] src = src.lstrip() # strip any extra whitespace after ']' if not util.url(src).isabs(): parent = _abssource(repo, abort=False) if parent: parent = util.url(parent) parent.path = posixpath.join(parent.path or '', src) parent.path = posixpath.normpath(parent.path) joined = str(parent) # Remap the full joined path and use it if it changes, # else remap the original source. remapped = remap(joined) if remapped == joined: src = remap(src) else: src = remapped src = remap(src) state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind) return state def writestate(repo, state): """rewrite .hgsubstate in (outer) repo with these subrepo states""" lines = ['%s %s\n' % (state[s][1], s) for s in sorted(state) if state[s][1] != nullstate[1]] repo.wwrite('.hgsubstate', ''.join(lines), '') def submerge(repo, wctx, mctx, actx, overwrite): """delegated from merge.applyupdates: merging of .hgsubstate file in working context, merging context and ancestor context""" if mctx == actx: # backwards? actx = wctx.p1() s1 = wctx.substate s2 = mctx.substate sa = actx.substate sm = {} repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx)) def debug(s, msg, r=""): if r: r = "%s:%s:%s" % r repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r)) for s, l in sorted(s1.iteritems()): a = sa.get(s, nullstate) ld = l # local state with possible dirty flag for compares if wctx.sub(s).dirty(): ld = (l[0], l[1] + "+") if wctx == actx: # overwrite a = ld if s in s2: r = s2[s] if ld == r or r == a: # no change or local is newer sm[s] = l continue elif ld == a: # other side changed debug(s, "other changed, get", r) wctx.sub(s).get(r, overwrite) sm[s] = r elif ld[0] != r[0]: # sources differ if repo.ui.promptchoice( _(' subrepository sources for %s differ\n' 'use (l)ocal source (%s) or (r)emote source (%s)?' '$$ &Local $$ &Remote') % (s, l[0], r[0]), 0): debug(s, "prompt changed, get", r) wctx.sub(s).get(r, overwrite) sm[s] = r elif ld[1] == a[1]: # local side is unchanged debug(s, "other side changed, get", r) wctx.sub(s).get(r, overwrite) sm[s] = r else: debug(s, "both sides changed") srepo = wctx.sub(s) option = repo.ui.promptchoice( _(' subrepository %s diverged (local revision: %s, ' 'remote revision: %s)\n' '(M)erge, keep (l)ocal or keep (r)emote?' '$$ &Merge $$ &Local $$ &Remote') % (s, srepo.shortid(l[1]), srepo.shortid(r[1])), 0) if option == 0: wctx.sub(s).merge(r) sm[s] = l debug(s, "merge with", r) elif option == 1: sm[s] = l debug(s, "keep local subrepo revision", l) else: wctx.sub(s).get(r, overwrite) sm[s] = r debug(s, "get remote subrepo revision", r) elif ld == a: # remote removed, local unchanged debug(s, "remote removed, remove") wctx.sub(s).remove() elif a == nullstate: # not present in remote or ancestor debug(s, "local added, keep") sm[s] = l continue else: if repo.ui.promptchoice( _(' local changed subrepository %s which remote removed\n' 'use (c)hanged version or (d)elete?' '$$ &Changed $$ &Delete') % s, 0): debug(s, "prompt remove") wctx.sub(s).remove() for s, r in sorted(s2.items()): if s in s1: continue elif s not in sa: debug(s, "remote added, get", r) mctx.sub(s).get(r) sm[s] = r elif r != sa[s]: if repo.ui.promptchoice( _(' remote changed subrepository %s which local removed\n' 'use (c)hanged version or (d)elete?' '$$ &Changed $$ &Delete') % s, 0) == 0: debug(s, "prompt recreate", r) mctx.sub(s).get(r) sm[s] = r # record merged .hgsubstate writestate(repo, sm) return sm def _updateprompt(ui, sub, dirty, local, remote): if dirty: msg = (_(' subrepository sources for %s differ\n' 'use (l)ocal source (%s) or (r)emote source (%s)?' '$$ &Local $$ &Remote') % (subrelpath(sub), local, remote)) else: msg = (_(' subrepository sources for %s differ (in checked out ' 'version)\n' 'use (l)ocal source (%s) or (r)emote source (%s)?' '$$ &Local $$ &Remote') % (subrelpath(sub), local, remote)) return ui.promptchoice(msg, 0) def reporelpath(repo): """return path to this (sub)repo as seen from outermost repo""" parent = repo while util.safehasattr(parent, '_subparent'): parent = parent._subparent return repo.root[len(pathutil.normasprefix(parent.root)):] def subrelpath(sub): """return path to this subrepo as seen from outermost repo""" return sub._relpath def _abssource(repo, push=False, abort=True): """return pull/push path of repo - either based on parent repo .hgsub info or on the top repo config. Abort or return None if no source found.""" if util.safehasattr(repo, '_subparent'): source = util.url(repo._subsource) if source.isabs(): return str(source) source.path = posixpath.normpath(source.path) parent = _abssource(repo._subparent, push, abort=False) if parent: parent = util.url(util.pconvert(parent)) parent.path = posixpath.join(parent.path or '', source.path) parent.path = posixpath.normpath(parent.path) return str(parent) else: # recursion reached top repo if util.safehasattr(repo, '_subtoppath'): return repo._subtoppath if push and repo.ui.config('paths', 'default-push'): return repo.ui.config('paths', 'default-push') if repo.ui.config('paths', 'default'): return repo.ui.config('paths', 'default') if repo.shared(): # chop off the .hg component to get the default path form return os.path.dirname(repo.sharedpath) if abort: raise error.Abort(_("default path for subrepository not found")) def _sanitize(ui, vfs, ignore): for dirname, dirs, names in vfs.walk(): for i, d in enumerate(dirs): if d.lower() == ignore: del dirs[i] break if vfs.basename(dirname).lower() != '.hg': continue for f in names: if f.lower() == 'hgrc': ui.warn(_("warning: removing potentially hostile 'hgrc' " "in '%s'\n") % vfs.join(dirname)) vfs.unlink(vfs.reljoin(dirname, f)) def subrepo(ctx, path, allowwdir=False): """return instance of the right subrepo class for subrepo in path""" # subrepo inherently violates our import layering rules # because it wants to make repo objects from deep inside the stack # so we manually delay the circular imports to not break # scripts that don't use our demand-loading global hg from . import hg as h hg = h pathutil.pathauditor(ctx.repo().root)(path) state = ctx.substate[path] if state[2] not in types: raise error.Abort(_('unknown subrepo type %s') % state[2]) if allowwdir: state = (state[0], ctx.subrev(path), state[2]) return types[state[2]](ctx, path, state[:2]) def nullsubrepo(ctx, path, pctx): """return an empty subrepo in pctx for the extant subrepo in ctx""" # subrepo inherently violates our import layering rules # because it wants to make repo objects from deep inside the stack # so we manually delay the circular imports to not break # scripts that don't use our demand-loading global hg from . import hg as h hg = h pathutil.pathauditor(ctx.repo().root)(path) state = ctx.substate[path] if state[2] not in types: raise error.Abort(_('unknown subrepo type %s') % state[2]) subrev = '' if state[2] == 'hg': subrev = "0" * 40 return types[state[2]](pctx, path, (state[0], subrev)) def newcommitphase(ui, ctx): commitphase = phases.newcommitphase(ui) substate = getattr(ctx, "substate", None) if not substate: return commitphase check = ui.config('phases', 'checksubrepos', 'follow') if check not in ('ignore', 'follow', 'abort'): raise error.Abort(_('invalid phases.checksubrepos configuration: %s') % (check)) if check == 'ignore': return commitphase maxphase = phases.public maxsub = None for s in sorted(substate): sub = ctx.sub(s) subphase = sub.phase(substate[s][1]) if maxphase < subphase: maxphase = subphase maxsub = s if commitphase < maxphase: if check == 'abort': raise error.Abort(_("can't commit in %s phase" " conflicting %s from subrepository %s") % (phases.phasenames[commitphase], phases.phasenames[maxphase], maxsub)) ui.warn(_("warning: changes are committed in" " %s phase from subrepository %s\n") % (phases.phasenames[maxphase], maxsub)) return maxphase return commitphase # subrepo classes need to implement the following abstract class: class abstractsubrepo(object): def __init__(self, ctx, path): """Initialize abstractsubrepo part ``ctx`` is the context referring this subrepository in the parent repository. ``path`` is the path to this subrepository as seen from innermost repository. """ self.ui = ctx.repo().ui self._ctx = ctx self._path = path def storeclean(self, path): """ returns true if the repository has not changed since it was last cloned from or pushed to a given repository. """ return False def dirty(self, ignoreupdate=False): """returns true if the dirstate of the subrepo is dirty or does not match current stored state. If ignoreupdate is true, only check whether the subrepo has uncommitted changes in its dirstate. """ raise NotImplementedError def dirtyreason(self, ignoreupdate=False): """return reason string if it is ``dirty()`` Returned string should have enough information for the message of exception. This returns None, otherwise. """ if self.dirty(ignoreupdate=ignoreupdate): return _("uncommitted changes in subrepository '%s'" ) % subrelpath(self) def bailifchanged(self, ignoreupdate=False): """raise Abort if subrepository is ``dirty()`` """ dirtyreason = self.dirtyreason(ignoreupdate=ignoreupdate) if dirtyreason: raise error.Abort(dirtyreason) def basestate(self): """current working directory base state, disregarding .hgsubstate state and working directory modifications""" raise NotImplementedError def checknested(self, path): """check if path is a subrepository within this repository""" return False def commit(self, text, user, date): """commit the current changes to the subrepo with the given log message. Use given user and date if possible. Return the new state of the subrepo. """ raise NotImplementedError def phase(self, state): """returns phase of specified state in the subrepository. """ return phases.public def remove(self): """remove the subrepo (should verify the dirstate is not dirty first) """ raise NotImplementedError def get(self, state, overwrite=False): """run whatever commands are needed to put the subrepo into this state """ raise NotImplementedError def merge(self, state): """merge currently-saved state with the new state.""" raise NotImplementedError def push(self, opts): """perform whatever action is analogous to 'hg push' This may be a no-op on some systems. """ raise NotImplementedError def add(self, ui, match, prefix, explicitonly, **opts): return [] def addremove(self, matcher, prefix, opts, dry_run, similarity): self.ui.warn("%s: %s" % (prefix, _("addremove is not supported"))) return 1 def cat(self, match, prefix, **opts): return 1 def status(self, rev2, **opts): return scmutil.status([], [], [], [], [], [], []) def diff(self, ui, diffopts, node2, match, prefix, **opts): pass def outgoing(self, ui, dest, opts): return 1 def incoming(self, ui, source, opts): return 1 def files(self): """return filename iterator""" raise NotImplementedError def filedata(self, name): """return file data""" raise NotImplementedError def fileflags(self, name): """return file flags""" return '' def getfileset(self, expr): """Resolve the fileset expression for this repo""" return set() def printfiles(self, ui, m, fm, fmt, subrepos): """handle the files command for this subrepo""" return 1 def archive(self, archiver, prefix, match=None): if match is not None: files = [f for f in self.files() if match(f)] else: files = self.files() total = len(files) relpath = subrelpath(self) self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files'), total=total) for i, name in enumerate(files): flags = self.fileflags(name) mode = 'x' in flags and 0o755 or 0o644 symlink = 'l' in flags archiver.addfile(prefix + self._path + '/' + name, mode, symlink, self.filedata(name)) self.ui.progress(_('archiving (%s)') % relpath, i + 1, unit=_('files'), total=total) self.ui.progress(_('archiving (%s)') % relpath, None) return total def walk(self, match): ''' walk recursively through the directory tree, finding all files matched by the match function ''' pass def forget(self, match, prefix): return ([], []) def removefiles(self, matcher, prefix, after, force, subrepos): """remove the matched files from the subrepository and the filesystem, possibly by force and/or after the file has been removed from the filesystem. Return 0 on success, 1 on any warning. """ return 1 def revert(self, substate, *pats, **opts): self.ui.warn('%s: reverting %s subrepos is unsupported\n' \ % (substate[0], substate[2])) return [] def shortid(self, revid): return revid def verify(self): '''verify the integrity of the repository. Return 0 on success or warning, 1 on any error. ''' return 0 @propertycache def wvfs(self): """return vfs to access the working directory of this subrepository """ return scmutil.vfs(self._ctx.repo().wvfs.join(self._path)) @propertycache def _relpath(self): """return path to this subrepository as seen from outermost repository """ return self.wvfs.reljoin(reporelpath(self._ctx.repo()), self._path) class hgsubrepo(abstractsubrepo): def __init__(self, ctx, path, state): super(hgsubrepo, self).__init__(ctx, path) self._state = state r = ctx.repo() root = r.wjoin(path) create = not r.wvfs.exists('%s/.hg' % path) self._repo = hg.repository(r.baseui, root, create=create) # Propagate the parent's --hidden option if r is r.unfiltered(): self._repo = self._repo.unfiltered() self.ui = self._repo.ui for s, k in [('ui', 'commitsubrepos')]: v = r.ui.config(s, k) if v: self.ui.setconfig(s, k, v, 'subrepo') # internal config: ui._usedassubrepo self.ui.setconfig('ui', '_usedassubrepo', 'True', 'subrepo') self._initrepo(r, state[0], create) def storeclean(self, path): with self._repo.lock(): return self._storeclean(path) def _storeclean(self, path): clean = True itercache = self._calcstorehash(path) for filehash in self._readstorehashcache(path): if filehash != next(itercache, None): clean = False break if clean: # if not empty: # the cached and current pull states have a different size clean = next(itercache, None) is None return clean def _calcstorehash(self, remotepath): '''calculate a unique "store hash" This method is used to to detect when there are changes that may require a push to a given remote path.''' # sort the files that will be hashed in increasing (likely) file size filelist = ('bookmarks', 'store/phaseroots', 'store/00changelog.i') yield '# %s\n' % _expandedabspath(remotepath) vfs = self._repo.vfs for relname in filelist: filehash = util.sha1(vfs.tryread(relname)).hexdigest() yield '%s = %s\n' % (relname, filehash) @propertycache def _cachestorehashvfs(self): return scmutil.vfs(self._repo.join('cache/storehash')) def _readstorehashcache(self, remotepath): '''read the store hash cache for a given remote repository''' cachefile = _getstorehashcachename(remotepath) return self._cachestorehashvfs.tryreadlines(cachefile, 'r') def _cachestorehash(self, remotepath): '''cache the current store hash Each remote repo requires its own store hash cache, because a subrepo store may be "clean" versus a given remote repo, but not versus another ''' cachefile = _getstorehashcachename(remotepath) with self._repo.lock(): storehash = list(self._calcstorehash(remotepath)) vfs = self._cachestorehashvfs vfs.writelines(cachefile, storehash, mode='w', notindexed=True) def _getctx(self): '''fetch the context for this subrepo revision, possibly a workingctx ''' if self._ctx.rev() is None: return self._repo[None] # workingctx if parent is workingctx else: rev = self._state[1] return self._repo[rev] @annotatesubrepoerror def _initrepo(self, parentrepo, source, create): self._repo._subparent = parentrepo self._repo._subsource = source if create: lines = ['[paths]\n'] def addpathconfig(key, value): if value: lines.append('%s = %s\n' % (key, value)) self.ui.setconfig('paths', key, value, 'subrepo') defpath = _abssource(self._repo, abort=False) defpushpath = _abssource(self._repo, True, abort=False) addpathconfig('default', defpath) if defpath != defpushpath: addpathconfig('default-push', defpushpath) fp = self._repo.vfs("hgrc", "w", text=True) try: fp.write(''.join(lines)) finally: fp.close() @annotatesubrepoerror def add(self, ui, match, prefix, explicitonly, **opts): return cmdutil.add(ui, self._repo, match, self.wvfs.reljoin(prefix, self._path), explicitonly, **opts) @annotatesubrepoerror def addremove(self, m, prefix, opts, dry_run, similarity): # In the same way as sub directories are processed, once in a subrepo, # always entry any of its subrepos. Don't corrupt the options that will # be used to process sibling subrepos however. opts = copy.copy(opts) opts['subrepos'] = True return scmutil.addremove(self._repo, m, self.wvfs.reljoin(prefix, self._path), opts, dry_run, similarity) @annotatesubrepoerror def cat(self, match, prefix, **opts): rev = self._state[1] ctx = self._repo[rev] return cmdutil.cat(self.ui, self._repo, ctx, match, prefix, **opts) @annotatesubrepoerror def status(self, rev2, **opts): try: rev1 = self._state[1] ctx1 = self._repo[rev1] ctx2 = self._repo[rev2] return self._repo.status(ctx1, ctx2, **opts) except error.RepoLookupError as inst: self.ui.warn(_('warning: error "%s" in subrepository "%s"\n') % (inst, subrelpath(self))) return scmutil.status([], [], [], [], [], [], []) @annotatesubrepoerror def diff(self, ui, diffopts, node2, match, prefix, **opts): try: node1 = node.bin(self._state[1]) # We currently expect node2 to come from substate and be # in hex format if node2 is not None: node2 = node.bin(node2) cmdutil.diffordiffstat(ui, self._repo, diffopts, node1, node2, match, prefix=posixpath.join(prefix, self._path), listsubrepos=True, **opts) except error.RepoLookupError as inst: self.ui.warn(_('warning: error "%s" in subrepository "%s"\n') % (inst, subrelpath(self))) @annotatesubrepoerror def archive(self, archiver, prefix, match=None): self._get(self._state + ('hg',)) total = abstractsubrepo.archive(self, archiver, prefix, match) rev = self._state[1] ctx = self._repo[rev] for subpath in ctx.substate: s = subrepo(ctx, subpath, True) submatch = matchmod.narrowmatcher(subpath, match) total += s.archive(archiver, prefix + self._path + '/', submatch) return total @annotatesubrepoerror def dirty(self, ignoreupdate=False): r = self._state[1] if r == '' and not ignoreupdate: # no state recorded return True w = self._repo[None] if r != w.p1().hex() and not ignoreupdate: # different version checked out return True return w.dirty() # working directory changed def basestate(self): return self._repo['.'].hex() def checknested(self, path): return self._repo._checknested(self._repo.wjoin(path)) @annotatesubrepoerror def commit(self, text, user, date): # don't bother committing in the subrepo if it's only been # updated if not self.dirty(True): return self._repo['.'].hex() self.ui.debug("committing subrepo %s\n" % subrelpath(self)) n = self._repo.commit(text, user, date) if not n: return self._repo['.'].hex() # different version checked out return node.hex(n) @annotatesubrepoerror def phase(self, state): return self._repo[state].phase() @annotatesubrepoerror def remove(self): # we can't fully delete the repository as it may contain # local-only history self.ui.note(_('removing subrepo %s\n') % subrelpath(self)) hg.clean(self._repo, node.nullid, False) def _get(self, state): source, revision, kind = state if revision in self._repo.unfiltered(): return True self._repo._subsource = source srcurl = _abssource(self._repo) other = hg.peer(self._repo, {}, srcurl) if len(self._repo) == 0: self.ui.status(_('cloning subrepo %s from %s\n') % (subrelpath(self), srcurl)) parentrepo = self._repo._subparent # use self._repo.vfs instead of self.wvfs to remove .hg only self._repo.vfs.rmtree() other, cloned = hg.clone(self._repo._subparent.baseui, {}, other, self._repo.root, update=False) self._repo = cloned.local() self._initrepo(parentrepo, source, create=True) self._cachestorehash(srcurl) else: self.ui.status(_('pulling subrepo %s from %s\n') % (subrelpath(self), srcurl)) cleansub = self.storeclean(srcurl) exchange.pull(self._repo, other) if cleansub: # keep the repo clean after pull self._cachestorehash(srcurl) return False @annotatesubrepoerror def get(self, state, overwrite=False): inrepo = self._get(state) source, revision, kind = state repo = self._repo repo.ui.debug("getting subrepo %s\n" % self._path) if inrepo: urepo = repo.unfiltered() ctx = urepo[revision] if ctx.hidden(): urepo.ui.warn( _('revision %s in subrepo %s is hidden\n') \ % (revision[0:12], self._path)) repo = urepo hg.updaterepo(repo, revision, overwrite) @annotatesubrepoerror def merge(self, state): self._get(state) cur = self._repo['.'] dst = self._repo[state[1]] anc = dst.ancestor(cur) def mergefunc(): if anc == cur and dst.branch() == cur.branch(): self.ui.debug("updating subrepo %s\n" % subrelpath(self)) hg.update(self._repo, state[1]) elif anc == dst: self.ui.debug("skipping subrepo %s\n" % subrelpath(self)) else: self.ui.debug("merging subrepo %s\n" % subrelpath(self)) hg.merge(self._repo, state[1], remind=False) wctx = self._repo[None] if self.dirty(): if anc != dst: if _updateprompt(self.ui, self, wctx.dirty(), cur, dst): mergefunc() else: mergefunc() else: mergefunc() @annotatesubrepoerror def push(self, opts): force = opts.get('force') newbranch = opts.get('new_branch') ssh = opts.get('ssh') # push subrepos depth-first for coherent ordering c = self._repo[''] subs = c.substate # only repos that are committed for s in sorted(subs): if c.sub(s).push(opts) == 0: return False dsturl = _abssource(self._repo, True) if not force: if self.storeclean(dsturl): self.ui.status( _('no changes made to subrepo %s since last push to %s\n') % (subrelpath(self), dsturl)) return None self.ui.status(_('pushing subrepo %s to %s\n') % (subrelpath(self), dsturl)) other = hg.peer(self._repo, {'ssh': ssh}, dsturl) res = exchange.push(self._repo, other, force, newbranch=newbranch) # the repo is now clean self._cachestorehash(dsturl) return res.cgresult @annotatesubrepoerror def outgoing(self, ui, dest, opts): if 'rev' in opts or 'branch' in opts: opts = copy.copy(opts) opts.pop('rev', None) opts.pop('branch', None) return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts) @annotatesubrepoerror def incoming(self, ui, source, opts): if 'rev' in opts or 'branch' in opts: opts = copy.copy(opts) opts.pop('rev', None) opts.pop('branch', None) return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts) @annotatesubrepoerror def files(self): rev = self._state[1] ctx = self._repo[rev] return ctx.manifest().keys() def filedata(self, name): rev = self._state[1] return self._repo[rev][name].data() def fileflags(self, name): rev = self._state[1] ctx = self._repo[rev] return ctx.flags(name) @annotatesubrepoerror def printfiles(self, ui, m, fm, fmt, subrepos): # If the parent context is a workingctx, use the workingctx here for # consistency. if self._ctx.rev() is None: ctx = self._repo[None] else: rev = self._state[1] ctx = self._repo[rev] return cmdutil.files(ui, ctx, m, fm, fmt, subrepos) @annotatesubrepoerror def getfileset(self, expr): if self._ctx.rev() is None: ctx = self._repo[None] else: rev = self._state[1] ctx = self._repo[rev] files = ctx.getfileset(expr) for subpath in ctx.substate: sub = ctx.sub(subpath) try: files.extend(subpath + '/' + f for f in sub.getfileset(expr)) except error.LookupError: self.ui.status(_("skipping missing subrepository: %s\n") % self.wvfs.reljoin(reporelpath(self), subpath)) return files def walk(self, match): ctx = self._repo[None] return ctx.walk(match) @annotatesubrepoerror def forget(self, match, prefix): return cmdutil.forget(self.ui, self._repo, match, self.wvfs.reljoin(prefix, self._path), True) @annotatesubrepoerror def removefiles(self, matcher, prefix, after, force, subrepos): return cmdutil.remove(self.ui, self._repo, matcher, self.wvfs.reljoin(prefix, self._path), after, force, subrepos) @annotatesubrepoerror def revert(self, substate, *pats, **opts): # reverting a subrepo is a 2 step process: # 1. if the no_backup is not set, revert all modified # files inside the subrepo # 2. update the subrepo to the revision specified in # the corresponding substate dictionary self.ui.status(_('reverting subrepo %s\n') % substate[0]) if not opts.get('no_backup'): # Revert all files on the subrepo, creating backups # Note that this will not recursively revert subrepos # We could do it if there was a set:subrepos() predicate opts = opts.copy() opts['date'] = None opts['rev'] = substate[1] self.filerevert(*pats, **opts) # Update the repo to the revision specified in the given substate if not opts.get('dry_run'): self.get(substate, overwrite=True) def filerevert(self, *pats, **opts): ctx = self._repo[opts['rev']] parents = self._repo.dirstate.parents() if opts.get('all'): pats = ['set:modified()'] else: pats = [] cmdutil.revert(self.ui, self._repo, ctx, parents, *pats, **opts) def shortid(self, revid): return revid[:12] def verify(self): try: rev = self._state[1] ctx = self._repo.unfiltered()[rev] if ctx.hidden(): # Since hidden revisions aren't pushed/pulled, it seems worth an # explicit warning. ui = self._repo.ui ui.warn(_("subrepo '%s' is hidden in revision %s\n") % (self._relpath, node.short(self._ctx.node()))) return 0 except error.RepoLookupError: # A missing subrepo revision may be a case of needing to pull it, so # don't treat this as an error. self._repo.ui.warn(_("subrepo '%s' not found in revision %s\n") % (self._relpath, node.short(self._ctx.node()))) return 0 @propertycache def wvfs(self): """return own wvfs for efficiency and consistency """ return self._repo.wvfs @propertycache def _relpath(self): """return path to this subrepository as seen from outermost repository """ # Keep consistent dir separators by avoiding vfs.join(self._path) return reporelpath(self._repo) class svnsubrepo(abstractsubrepo): def __init__(self, ctx, path, state): super(svnsubrepo, self).__init__(ctx, path) self._state = state self._exe = util.findexe('svn') if not self._exe: raise error.Abort(_("'svn' executable not found for subrepo '%s'") % self._path) def _svncommand(self, commands, filename='', failok=False): cmd = [self._exe] extrakw = {} if not self.ui.interactive(): # Making stdin be a pipe should prevent svn from behaving # interactively even if we can't pass --non-interactive. extrakw['stdin'] = subprocess.PIPE # Starting in svn 1.5 --non-interactive is a global flag # instead of being per-command, but we need to support 1.4 so # we have to be intelligent about what commands take # --non-interactive. if commands[0] in ('update', 'checkout', 'commit'): cmd.append('--non-interactive') cmd.extend(commands) if filename is not None: path = self.wvfs.reljoin(self._ctx.repo().origroot, self._path, filename) cmd.append(path) env = dict(os.environ) # Avoid localized output, preserve current locale for everything else. lc_all = env.get('LC_ALL') if lc_all: env['LANG'] = lc_all del env['LC_ALL'] env['LC_MESSAGES'] = 'C' p = subprocess.Popen(cmd, bufsize=-1, close_fds=util.closefds, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, env=env, **extrakw) stdout, stderr = p.communicate() stderr = stderr.strip() if not failok: if p.returncode: raise error.Abort(stderr or 'exited with code %d' % p.returncode) if stderr: self.ui.warn(stderr + '\n') return stdout, stderr @propertycache def _svnversion(self): output, err = self._svncommand(['--version', '--quiet'], filename=None) m = re.search(r'^(\d+)\.(\d+)', output) if not m: raise error.Abort(_('cannot retrieve svn tool version')) return (int(m.group(1)), int(m.group(2))) def _wcrevs(self): # Get the working directory revision as well as the last # commit revision so we can compare the subrepo state with # both. We used to store the working directory one. output, err = self._svncommand(['info', '--xml']) doc = xml.dom.minidom.parseString(output) entries = doc.getElementsByTagName('entry') lastrev, rev = '0', '0' if entries: rev = str(entries[0].getAttribute('revision')) or '0' commits = entries[0].getElementsByTagName('commit') if commits: lastrev = str(commits[0].getAttribute('revision')) or '0' return (lastrev, rev) def _wcrev(self): return self._wcrevs()[0] def _wcchanged(self): """Return (changes, extchanges, missing) where changes is True if the working directory was changed, extchanges is True if any of these changes concern an external entry and missing is True if any change is a missing entry. """ output, err = self._svncommand(['status', '--xml']) externals, changes, missing = [], [], [] doc = xml.dom.minidom.parseString(output) for e in doc.getElementsByTagName('entry'): s = e.getElementsByTagName('wc-status') if not s: continue item = s[0].getAttribute('item') props = s[0].getAttribute('props') path = e.getAttribute('path') if item == 'external': externals.append(path) elif item == 'missing': missing.append(path) if (item not in ('', 'normal', 'unversioned', 'external') or props not in ('', 'none', 'normal')): changes.append(path) for path in changes: for ext in externals: if path == ext or path.startswith(ext + os.sep): return True, True, bool(missing) return bool(changes), False, bool(missing) def dirty(self, ignoreupdate=False): if not self._wcchanged()[0]: if self._state[1] in self._wcrevs() or ignoreupdate: return False return True def basestate(self): lastrev, rev = self._wcrevs() if lastrev != rev: # Last committed rev is not the same than rev. We would # like to take lastrev but we do not know if the subrepo # URL exists at lastrev. Test it and fallback to rev it # is not there. try: self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)]) return lastrev except error.Abort: pass return rev @annotatesubrepoerror def commit(self, text, user, date): # user and date are out of our hands since svn is centralized changed, extchanged, missing = self._wcchanged() if not changed: return self.basestate() if extchanged: # Do not try to commit externals raise error.Abort(_('cannot commit svn externals')) if missing: # svn can commit with missing entries but aborting like hg # seems a better approach. raise error.Abort(_('cannot commit missing svn entries')) commitinfo, err = self._svncommand(['commit', '-m', text]) self.ui.status(commitinfo) newrev = re.search('Committed revision ([0-9]+).', commitinfo) if not newrev: if not commitinfo.strip(): # Sometimes, our definition of "changed" differs from # svn one. For instance, svn ignores missing files # when committing. If there are only missing files, no # commit is made, no output and no error code. raise error.Abort(_('failed to commit svn changes')) raise error.Abort(commitinfo.splitlines()[-1]) newrev = newrev.groups()[0] self.ui.status(self._svncommand(['update', '-r', newrev])[0]) return newrev @annotatesubrepoerror def remove(self): if self.dirty(): self.ui.warn(_('not removing repo %s because ' 'it has changes.\n') % self._path) return self.ui.note(_('removing subrepo %s\n') % self._path) self.wvfs.rmtree(forcibly=True) try: pwvfs = self._ctx.repo().wvfs pwvfs.removedirs(pwvfs.dirname(self._path)) except OSError: pass @annotatesubrepoerror def get(self, state, overwrite=False): if overwrite: self._svncommand(['revert', '--recursive']) args = ['checkout'] if self._svnversion >= (1, 5): args.append('--force') # The revision must be specified at the end of the URL to properly # update to a directory which has since been deleted and recreated. args.append('%s@%s' % (state[0], state[1])) status, err = self._svncommand(args, failok=True) _sanitize(self.ui, self.wvfs, '.svn') if not re.search('Checked out revision [0-9]+.', status): if ('is already a working copy for a different URL' in err and (self._wcchanged()[:2] == (False, False))): # obstructed but clean working copy, so just blow it away. self.remove() self.get(state, overwrite=False) return raise error.Abort((status or err).splitlines()[-1]) self.ui.status(status) @annotatesubrepoerror def merge(self, state): old = self._state[1] new = state[1] wcrev = self._wcrev() if new != wcrev: dirty = old == wcrev or self._wcchanged()[0] if _updateprompt(self.ui, self, dirty, wcrev, new): self.get(state, False) def push(self, opts): # push is a no-op for SVN return True @annotatesubrepoerror def files(self): output = self._svncommand(['list', '--recursive', '--xml'])[0] doc = xml.dom.minidom.parseString(output) paths = [] for e in doc.getElementsByTagName('entry'): kind = str(e.getAttribute('kind')) if kind != 'file': continue name = ''.join(c.data for c in e.getElementsByTagName('name')[0].childNodes if c.nodeType == c.TEXT_NODE) paths.append(name.encode('utf-8')) return paths def filedata(self, name): return self._svncommand(['cat'], name)[0] class gitsubrepo(abstractsubrepo): def __init__(self, ctx, path, state): super(gitsubrepo, self).__init__(ctx, path) self._state = state self._abspath = ctx.repo().wjoin(path) self._subparent = ctx.repo() self._ensuregit() def _ensuregit(self): try: self._gitexecutable = 'git' out, err = self._gitnodir(['--version']) except OSError as e: genericerror = _("error executing git for subrepo '%s': %s") notfoundhint = _("check git is installed and in your PATH") if e.errno != errno.ENOENT: raise error.Abort(genericerror % (self._path, e.strerror)) elif os.name == 'nt': try: self._gitexecutable = 'git.cmd' out, err = self._gitnodir(['--version']) except OSError as e2: if e2.errno == errno.ENOENT: raise error.Abort(_("couldn't find 'git' or 'git.cmd'" " for subrepo '%s'") % self._path, hint=notfoundhint) else: raise error.Abort(genericerror % (self._path, e2.strerror)) else: raise error.Abort(_("couldn't find git for subrepo '%s'") % self._path, hint=notfoundhint) versionstatus = self._checkversion(out) if versionstatus == 'unknown': self.ui.warn(_('cannot retrieve git version\n')) elif versionstatus == 'abort': raise error.Abort(_('git subrepo requires at least 1.6.0 or later')) elif versionstatus == 'warning': self.ui.warn(_('git subrepo requires at least 1.6.0 or later\n')) @staticmethod def _gitversion(out): m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out) if m: return (int(m.group(1)), int(m.group(2)), int(m.group(3))) m = re.search(r'^git version (\d+)\.(\d+)', out) if m: return (int(m.group(1)), int(m.group(2)), 0) return -1 @staticmethod def _checkversion(out): '''ensure git version is new enough >>> _checkversion = gitsubrepo._checkversion >>> _checkversion('git version 1.6.0') 'ok' >>> _checkversion('git version 1.8.5') 'ok' >>> _checkversion('git version 1.4.0') 'abort' >>> _checkversion('git version 1.5.0') 'warning' >>> _checkversion('git version 1.9-rc0') 'ok' >>> _checkversion('git version 1.9.0.265.g81cdec2') 'ok' >>> _checkversion('git version 1.9.0.GIT') 'ok' >>> _checkversion('git version 12345') 'unknown' >>> _checkversion('no') 'unknown' ''' version = gitsubrepo._gitversion(out) # git 1.4.0 can't work at all, but 1.5.X can in at least some cases, # despite the docstring comment. For now, error on 1.4.0, warn on # 1.5.0 but attempt to continue. if version == -1: return 'unknown' if version < (1, 5, 0): return 'abort' elif version < (1, 6, 0): return 'warning' return 'ok' def _gitcommand(self, commands, env=None, stream=False): return self._gitdir(commands, env=env, stream=stream)[0] def _gitdir(self, commands, env=None, stream=False): return self._gitnodir(commands, env=env, stream=stream, cwd=self._abspath) def _gitnodir(self, commands, env=None, stream=False, cwd=None): """Calls the git command The methods tries to call the git command. versions prior to 1.6.0 are not supported and very probably fail. """ self.ui.debug('%s: git %s\n' % (self._relpath, ' '.join(commands))) if env is None: env = os.environ.copy() # fix for Git CVE-2015-7545 if 'GIT_ALLOW_PROTOCOL' not in env: env['GIT_ALLOW_PROTOCOL'] = 'file:git:http:https:ssh' # unless ui.quiet is set, print git's stderr, # which is mostly progress and useful info errpipe = None if self.ui.quiet: errpipe = open(os.devnull, 'w') p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1, cwd=cwd, env=env, close_fds=util.closefds, stdout=subprocess.PIPE, stderr=errpipe) if stream: return p.stdout, None retdata = p.stdout.read().strip() # wait for the child to exit to avoid race condition. p.wait() if p.returncode != 0 and p.returncode != 1: # there are certain error codes that are ok command = commands[0] if command in ('cat-file', 'symbolic-ref'): return retdata, p.returncode # for all others, abort raise error.Abort('git %s error %d in %s' % (command, p.returncode, self._relpath)) return retdata, p.returncode def _gitmissing(self): return not self.wvfs.exists('.git') def _gitstate(self): return self._gitcommand(['rev-parse', 'HEAD']) def _gitcurrentbranch(self): current, err = self._gitdir(['symbolic-ref', 'HEAD', '--quiet']) if err: current = None return current def _gitremote(self, remote): out = self._gitcommand(['remote', 'show', '-n', remote]) line = out.split('\n')[1] i = line.index('URL: ') + len('URL: ') return line[i:] def _githavelocally(self, revision): out, code = self._gitdir(['cat-file', '-e', revision]) return code == 0 def _gitisancestor(self, r1, r2): base = self._gitcommand(['merge-base', r1, r2]) return base == r1 def _gitisbare(self): return self._gitcommand(['config', '--bool', 'core.bare']) == 'true' def _gitupdatestat(self): """This must be run before git diff-index. diff-index only looks at changes to file stat; this command looks at file contents and updates the stat.""" self._gitcommand(['update-index', '-q', '--refresh']) def _gitbranchmap(self): '''returns 2 things: a map from git branch to revision a map from revision to branches''' branch2rev = {} rev2branch = {} out = self._gitcommand(['for-each-ref', '--format', '%(objectname) %(refname)']) for line in out.split('\n'): revision, ref = line.split(' ') if (not ref.startswith('refs/heads/') and not ref.startswith('refs/remotes/')): continue if ref.startswith('refs/remotes/') and ref.endswith('/HEAD'): continue # ignore remote/HEAD redirects branch2rev[ref] = revision rev2branch.setdefault(revision, []).append(ref) return branch2rev, rev2branch def _gittracking(self, branches): 'return map of remote branch to local tracking branch' # assumes no more than one local tracking branch for each remote tracking = {} for b in branches: if b.startswith('refs/remotes/'): continue bname = b.split('/', 2)[2] remote = self._gitcommand(['config', 'branch.%s.remote' % bname]) if remote: ref = self._gitcommand(['config', 'branch.%s.merge' % bname]) tracking['refs/remotes/%s/%s' % (remote, ref.split('/', 2)[2])] = b return tracking def _abssource(self, source): if '://' not in source: # recognize the scp syntax as an absolute source colon = source.find(':') if colon != -1 and '/' not in source[:colon]: return source self._subsource = source return _abssource(self) def _fetch(self, source, revision): if self._gitmissing(): source = self._abssource(source) self.ui.status(_('cloning subrepo %s from %s\n') % (self._relpath, source)) self._gitnodir(['clone', source, self._abspath]) if self._githavelocally(revision): return self.ui.status(_('pulling subrepo %s from %s\n') % (self._relpath, self._gitremote('origin'))) # try only origin: the originally cloned repo self._gitcommand(['fetch']) if not self._githavelocally(revision): raise error.Abort(_("revision %s does not exist in subrepo %s\n") % (revision, self._relpath)) @annotatesubrepoerror def dirty(self, ignoreupdate=False): if self._gitmissing(): return self._state[1] != '' if self._gitisbare(): return True if not ignoreupdate and self._state[1] != self._gitstate(): # different version checked out return True # check for staged changes or modified files; ignore untracked files self._gitupdatestat() out, code = self._gitdir(['diff-index', '--quiet', 'HEAD']) return code == 1 def basestate(self): return self._gitstate() @annotatesubrepoerror def get(self, state, overwrite=False): source, revision, kind = state if not revision: self.remove() return self._fetch(source, revision) # if the repo was set to be bare, unbare it if self._gitisbare(): self._gitcommand(['config', 'core.bare', 'false']) if self._gitstate() == revision: self._gitcommand(['reset', '--hard', 'HEAD']) return elif self._gitstate() == revision: if overwrite: # first reset the index to unmark new files for commit, because # reset --hard will otherwise throw away files added for commit, # not just unmark them. self._gitcommand(['reset', 'HEAD']) self._gitcommand(['reset', '--hard', 'HEAD']) return branch2rev, rev2branch = self._gitbranchmap() def checkout(args): cmd = ['checkout'] if overwrite: # first reset the index to unmark new files for commit, because # the -f option will otherwise throw away files added for # commit, not just unmark them. self._gitcommand(['reset', 'HEAD']) cmd.append('-f') self._gitcommand(cmd + args) _sanitize(self.ui, self.wvfs, '.git') def rawcheckout(): # no branch to checkout, check it out with no branch self.ui.warn(_('checking out detached HEAD in subrepo %s\n') % self._relpath) self.ui.warn(_('check out a git branch if you intend ' 'to make changes\n')) checkout(['-q', revision]) if revision not in rev2branch: rawcheckout() return branches = rev2branch[revision] firstlocalbranch = None for b in branches: if b == 'refs/heads/master': # master trumps all other branches checkout(['refs/heads/master']) return if not firstlocalbranch and not b.startswith('refs/remotes/'): firstlocalbranch = b if firstlocalbranch: checkout([firstlocalbranch]) return tracking = self._gittracking(branch2rev.keys()) # choose a remote branch already tracked if possible remote = branches[0] if remote not in tracking: for b in branches: if b in tracking: remote = b break if remote not in tracking: # create a new local tracking branch local = remote.split('/', 3)[3] checkout(['-b', local, remote]) elif self._gitisancestor(branch2rev[tracking[remote]], remote): # When updating to a tracked remote branch, # if the local tracking branch is downstream of it, # a normal `git pull` would have performed a "fast-forward merge" # which is equivalent to updating the local branch to the remote. # Since we are only looking at branching at update, we need to # detect this situation and perform this action lazily. if tracking[remote] != self._gitcurrentbranch(): checkout([tracking[remote]]) self._gitcommand(['merge', '--ff', remote]) _sanitize(self.ui, self.wvfs, '.git') else: # a real merge would be required, just checkout the revision rawcheckout() @annotatesubrepoerror def commit(self, text, user, date): if self._gitmissing(): raise error.Abort(_("subrepo %s is missing") % self._relpath) cmd = ['commit', '-a', '-m', text] env = os.environ.copy() if user: cmd += ['--author', user] if date: # git's date parser silently ignores when seconds < 1e9 # convert to ISO8601 env['GIT_AUTHOR_DATE'] = util.datestr(date, '%Y-%m-%dT%H:%M:%S %1%2') self._gitcommand(cmd, env=env) # make sure commit works otherwise HEAD might not exist under certain # circumstances return self._gitstate() @annotatesubrepoerror def merge(self, state): source, revision, kind = state self._fetch(source, revision) base = self._gitcommand(['merge-base', revision, self._state[1]]) self._gitupdatestat() out, code = self._gitdir(['diff-index', '--quiet', 'HEAD']) def mergefunc(): if base == revision: self.get(state) # fast forward merge elif base != self._state[1]: self._gitcommand(['merge', '--no-commit', revision]) _sanitize(self.ui, self.wvfs, '.git') if self.dirty(): if self._gitstate() != revision: dirty = self._gitstate() == self._state[1] or code != 0 if _updateprompt(self.ui, self, dirty, self._state[1][:7], revision[:7]): mergefunc() else: mergefunc() @annotatesubrepoerror def push(self, opts): force = opts.get('force') if not self._state[1]: return True if self._gitmissing(): raise error.Abort(_("subrepo %s is missing") % self._relpath) # if a branch in origin contains the revision, nothing to do branch2rev, rev2branch = self._gitbranchmap() if self._state[1] in rev2branch: for b in rev2branch[self._state[1]]: if b.startswith('refs/remotes/origin/'): return True for b, revision in branch2rev.iteritems(): if b.startswith('refs/remotes/origin/'): if self._gitisancestor(self._state[1], revision): return True # otherwise, try to push the currently checked out branch cmd = ['push'] if force: cmd.append('--force') current = self._gitcurrentbranch() if current: # determine if the current branch is even useful if not self._gitisancestor(self._state[1], current): self.ui.warn(_('unrelated git branch checked out ' 'in subrepo %s\n') % self._relpath) return False self.ui.status(_('pushing branch %s of subrepo %s\n') % (current.split('/', 2)[2], self._relpath)) ret = self._gitdir(cmd + ['origin', current]) return ret[1] == 0 else: self.ui.warn(_('no branch checked out in subrepo %s\n' 'cannot push revision %s\n') % (self._relpath, self._state[1])) return False @annotatesubrepoerror def add(self, ui, match, prefix, explicitonly, **opts): if self._gitmissing(): return [] (modified, added, removed, deleted, unknown, ignored, clean) = self.status(None, unknown=True, clean=True) tracked = set() # dirstates 'amn' warn, 'r' is added again for l in (modified, added, deleted, clean): tracked.update(l) # Unknown files not of interest will be rejected by the matcher files = unknown files.extend(match.files()) rejected = [] files = [f for f in sorted(set(files)) if match(f)] for f in files: exact = match.exact(f) command = ["add"] if exact: command.append("-f") #should be added, even if ignored if ui.verbose or not exact: ui.status(_('adding %s\n') % match.rel(f)) if f in tracked: # hg prints 'adding' even if already tracked if exact: rejected.append(f) continue if not opts.get('dry_run'): self._gitcommand(command + [f]) for f in rejected: ui.warn(_("%s already tracked!\n") % match.abs(f)) return rejected @annotatesubrepoerror def remove(self): if self._gitmissing(): return if self.dirty(): self.ui.warn(_('not removing repo %s because ' 'it has changes.\n') % self._relpath) return # we can't fully delete the repository as it may contain # local-only history self.ui.note(_('removing subrepo %s\n') % self._relpath) self._gitcommand(['config', 'core.bare', 'true']) for f, kind in self.wvfs.readdir(): if f == '.git': continue if kind == stat.S_IFDIR: self.wvfs.rmtree(f) else: self.wvfs.unlink(f) def archive(self, archiver, prefix, match=None): total = 0 source, revision = self._state if not revision: return total self._fetch(source, revision) # Parse git's native archive command. # This should be much faster than manually traversing the trees # and objects with many subprocess calls. tarstream = self._gitcommand(['archive', revision], stream=True) tar = tarfile.open(fileobj=tarstream, mode='r|') relpath = subrelpath(self) self.ui.progress(_('archiving (%s)') % relpath, 0, unit=_('files')) for i, info in enumerate(tar): if info.isdir(): continue if match and not match(info.name): continue if info.issym(): data = info.linkname else: data = tar.extractfile(info).read() archiver.addfile(prefix + self._path + '/' + info.name, info.mode, info.issym(), data) total += 1 self.ui.progress(_('archiving (%s)') % relpath, i + 1, unit=_('files')) self.ui.progress(_('archiving (%s)') % relpath, None) return total @annotatesubrepoerror def cat(self, match, prefix, **opts): rev = self._state[1] if match.anypats(): return 1 #No support for include/exclude yet if not match.files(): return 1 for f in match.files(): output = self._gitcommand(["show", "%s:%s" % (rev, f)]) fp = cmdutil.makefileobj(self._subparent, opts.get('output'), self._ctx.node(), pathname=self.wvfs.reljoin(prefix, f)) fp.write(output) fp.close() return 0 @annotatesubrepoerror def status(self, rev2, **opts): rev1 = self._state[1] if self._gitmissing() or not rev1: # if the repo is missing, return no results return scmutil.status([], [], [], [], [], [], []) modified, added, removed = [], [], [] self._gitupdatestat() if rev2: command = ['diff-tree', '--no-renames', '-r', rev1, rev2] else: command = ['diff-index', '--no-renames', rev1] out = self._gitcommand(command) for line in out.split('\n'): tab = line.find('\t') if tab == -1: continue status, f = line[tab - 1], line[tab + 1:] if status == 'M': modified.append(f) elif status == 'A': added.append(f) elif status == 'D': removed.append(f) deleted, unknown, ignored, clean = [], [], [], [] command = ['status', '--porcelain', '-z'] if opts.get('unknown'): command += ['--untracked-files=all'] if opts.get('ignored'): command += ['--ignored'] out = self._gitcommand(command) changedfiles = set() changedfiles.update(modified) changedfiles.update(added) changedfiles.update(removed) for line in out.split('\0'): if not line: continue st = line[0:2] #moves and copies show 2 files on one line if line.find('\0') >= 0: filename1, filename2 = line[3:].split('\0') else: filename1 = line[3:] filename2 = None changedfiles.add(filename1) if filename2: changedfiles.add(filename2) if st == '??': unknown.append(filename1) elif st == '!!': ignored.append(filename1) if opts.get('clean'): out = self._gitcommand(['ls-files']) for f in out.split('\n'): if not f in changedfiles: clean.append(f) return scmutil.status(modified, added, removed, deleted, unknown, ignored, clean) @annotatesubrepoerror def diff(self, ui, diffopts, node2, match, prefix, **opts): node1 = self._state[1] cmd = ['diff', '--no-renames'] if opts['stat']: cmd.append('--stat') else: # for Git, this also implies '-p' cmd.append('-U%d' % diffopts.context) gitprefix = self.wvfs.reljoin(prefix, self._path) if diffopts.noprefix: cmd.extend(['--src-prefix=%s/' % gitprefix, '--dst-prefix=%s/' % gitprefix]) else: cmd.extend(['--src-prefix=a/%s/' % gitprefix, '--dst-prefix=b/%s/' % gitprefix]) if diffopts.ignorews: cmd.append('--ignore-all-space') if diffopts.ignorewsamount: cmd.append('--ignore-space-change') if self._gitversion(self._gitcommand(['--version'])) >= (1, 8, 4) \ and diffopts.ignoreblanklines: cmd.append('--ignore-blank-lines') cmd.append(node1) if node2: cmd.append(node2) output = "" if match.always(): output += self._gitcommand(cmd) + '\n' else: st = self.status(node2)[:3] files = [f for sublist in st for f in sublist] for f in files: if match(f): output += self._gitcommand(cmd + ['--', f]) + '\n' if output.strip(): ui.write(output) @annotatesubrepoerror def revert(self, substate, *pats, **opts): self.ui.status(_('reverting subrepo %s\n') % substate[0]) if not opts.get('no_backup'): status = self.status(None) names = status.modified for name in names: bakname = scmutil.origpath(self.ui, self._subparent, name) self.ui.note(_('saving current version of %s as %s\n') % (name, bakname)) self.wvfs.rename(name, bakname) if not opts.get('dry_run'): self.get(substate, overwrite=True) return [] def shortid(self, revid): return revid[:7] types = { 'hg': hgsubrepo, 'svn': svnsubrepo, 'git': gitsubrepo, } mercurial-3.7.3/mercurial/dirstate.py0000644000175000017500000013045012676531525017306 0ustar mpmmpm00000000000000# dirstate.py - working directory tracking for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import collections import errno import os import stat from .i18n import _ from .node import nullid from . import ( encoding, error, match as matchmod, osutil, parsers, pathutil, scmutil, util, ) propertycache = util.propertycache filecache = scmutil.filecache _rangemask = 0x7fffffff dirstatetuple = parsers.dirstatetuple class repocache(filecache): """filecache for files in .hg/""" def join(self, obj, fname): return obj._opener.join(fname) class rootcache(filecache): """filecache for files in the repository root""" def join(self, obj, fname): return obj._join(fname) def _getfsnow(vfs): '''Get "now" timestamp on filesystem''' tmpfd, tmpname = vfs.mkstemp() try: return os.fstat(tmpfd).st_mtime finally: os.close(tmpfd) vfs.unlink(tmpname) def nonnormalentries(dmap): '''Compute the nonnormal dirstate entries from the dmap''' try: return parsers.nonnormalentries(dmap) except AttributeError: return set(fname for fname, e in dmap.iteritems() if e[0] != 'n' or e[3] == -1) def _trypending(root, vfs, filename): '''Open file to be read according to HG_PENDING environment variable This opens '.pending' of specified 'filename' only when HG_PENDING is equal to 'root'. This returns '(fp, is_pending_opened)' tuple. ''' if root == os.environ.get('HG_PENDING'): try: return (vfs('%s.pending' % filename), True) except IOError as inst: if inst.errno != errno.ENOENT: raise return (vfs(filename), False) class dirstate(object): def __init__(self, opener, ui, root, validate): '''Create a new dirstate object. opener is an open()-like callable that can be used to open the dirstate file; root is the root of the directory tracked by the dirstate. ''' self._opener = opener self._validate = validate self._root = root # ntpath.join(root, '') of Python 2.7.9 does not add sep if root is # UNC path pointing to root share (issue4557) self._rootdir = pathutil.normasprefix(root) # internal config: ui.forcecwd forcecwd = ui.config('ui', 'forcecwd') if forcecwd: self._cwd = forcecwd self._dirty = False self._dirtypl = False self._lastnormaltime = 0 self._ui = ui self._filecache = {} self._parentwriters = 0 self._filename = 'dirstate' self._pendingfilename = '%s.pending' % self._filename # for consistent view between _pl() and _read() invocations self._pendingmode = None def beginparentchange(self): '''Marks the beginning of a set of changes that involve changing the dirstate parents. If there is an exception during this time, the dirstate will not be written when the wlock is released. This prevents writing an incoherent dirstate where the parent doesn't match the contents. ''' self._parentwriters += 1 def endparentchange(self): '''Marks the end of a set of changes that involve changing the dirstate parents. Once all parent changes have been marked done, the wlock will be free to write the dirstate on release. ''' if self._parentwriters > 0: self._parentwriters -= 1 def pendingparentchange(self): '''Returns true if the dirstate is in the middle of a set of changes that modify the dirstate parent. ''' return self._parentwriters > 0 @propertycache def _map(self): '''Return the dirstate contents as a map from filename to (state, mode, size, time).''' self._read() return self._map @propertycache def _copymap(self): self._read() return self._copymap @propertycache def _nonnormalset(self): return nonnormalentries(self._map) @propertycache def _filefoldmap(self): try: makefilefoldmap = parsers.make_file_foldmap except AttributeError: pass else: return makefilefoldmap(self._map, util.normcasespec, util.normcasefallback) f = {} normcase = util.normcase for name, s in self._map.iteritems(): if s[0] != 'r': f[normcase(name)] = name f['.'] = '.' # prevents useless util.fspath() invocation return f @propertycache def _dirfoldmap(self): f = {} normcase = util.normcase for name in self._dirs: f[normcase(name)] = name return f @repocache('branch') def _branch(self): try: return self._opener.read("branch").strip() or "default" except IOError as inst: if inst.errno != errno.ENOENT: raise return "default" @propertycache def _pl(self): try: fp = self._opendirstatefile() st = fp.read(40) fp.close() l = len(st) if l == 40: return st[:20], st[20:40] elif l > 0 and l < 40: raise error.Abort(_('working directory state appears damaged!')) except IOError as err: if err.errno != errno.ENOENT: raise return [nullid, nullid] @propertycache def _dirs(self): return util.dirs(self._map, 'r') def dirs(self): return self._dirs @rootcache('.hgignore') def _ignore(self): files = self._ignorefiles() if not files: return util.never pats = ['include:%s' % f for f in files] return matchmod.match(self._root, '', [], pats, warn=self._ui.warn) @propertycache def _slash(self): return self._ui.configbool('ui', 'slash') and os.sep != '/' @propertycache def _checklink(self): return util.checklink(self._root) @propertycache def _checkexec(self): return util.checkexec(self._root) @propertycache def _checkcase(self): return not util.checkcase(self._join('.hg')) def _join(self, f): # much faster than os.path.join() # it's safe because f is always a relative path return self._rootdir + f def flagfunc(self, buildfallback): if self._checklink and self._checkexec: def f(x): try: st = os.lstat(self._join(x)) if util.statislink(st): return 'l' if util.statisexec(st): return 'x' except OSError: pass return '' return f fallback = buildfallback() if self._checklink: def f(x): if os.path.islink(self._join(x)): return 'l' if 'x' in fallback(x): return 'x' return '' return f if self._checkexec: def f(x): if 'l' in fallback(x): return 'l' if util.isexec(self._join(x)): return 'x' return '' return f else: return fallback @propertycache def _cwd(self): return os.getcwd() def getcwd(self): '''Return the path from which a canonical path is calculated. This path should be used to resolve file patterns or to convert canonical paths back to file paths for display. It shouldn't be used to get real file paths. Use vfs functions instead. ''' cwd = self._cwd if cwd == self._root: return '' # self._root ends with a path separator if self._root is '/' or 'C:\' rootsep = self._root if not util.endswithsep(rootsep): rootsep += os.sep if cwd.startswith(rootsep): return cwd[len(rootsep):] else: # we're outside the repo. return an absolute path. return cwd def pathto(self, f, cwd=None): if cwd is None: cwd = self.getcwd() path = util.pathto(self._root, cwd, f) if self._slash: return util.pconvert(path) return path def __getitem__(self, key): '''Return the current state of key (a filename) in the dirstate. States are: n normal m needs merging r marked for removal a marked for addition ? not tracked ''' return self._map.get(key, ("?",))[0] def __contains__(self, key): return key in self._map def __iter__(self): for x in sorted(self._map): yield x def iteritems(self): return self._map.iteritems() def parents(self): return [self._validate(p) for p in self._pl] def p1(self): return self._validate(self._pl[0]) def p2(self): return self._validate(self._pl[1]) def branch(self): return encoding.tolocal(self._branch) def setparents(self, p1, p2=nullid): """Set dirstate parents to p1 and p2. When moving from two parents to one, 'm' merged entries a adjusted to normal and previous copy records discarded and returned by the call. See localrepo.setparents() """ if self._parentwriters == 0: raise ValueError("cannot set dirstate parent without " "calling dirstate.beginparentchange") self._dirty = self._dirtypl = True oldp2 = self._pl[1] self._pl = p1, p2 copies = {} if oldp2 != nullid and p2 == nullid: for f, s in self._map.iteritems(): # Discard 'm' markers when moving away from a merge state if s[0] == 'm': if f in self._copymap: copies[f] = self._copymap[f] self.normallookup(f) # Also fix up otherparent markers elif s[0] == 'n' and s[2] == -2: if f in self._copymap: copies[f] = self._copymap[f] self.add(f) return copies def setbranch(self, branch): self._branch = encoding.fromlocal(branch) f = self._opener('branch', 'w', atomictemp=True) try: f.write(self._branch + '\n') f.close() # make sure filecache has the correct stat info for _branch after # replacing the underlying file ce = self._filecache['_branch'] if ce: ce.refresh() except: # re-raises f.discard() raise def _opendirstatefile(self): fp, mode = _trypending(self._root, self._opener, self._filename) if self._pendingmode is not None and self._pendingmode != mode: fp.close() raise error.Abort(_('working directory state may be ' 'changed parallelly')) self._pendingmode = mode return fp def _read(self): self._map = {} self._copymap = {} try: fp = self._opendirstatefile() try: st = fp.read() finally: fp.close() except IOError as err: if err.errno != errno.ENOENT: raise return if not st: return if util.safehasattr(parsers, 'dict_new_presized'): # Make an estimate of the number of files in the dirstate based on # its size. From a linear regression on a set of real-world repos, # all over 10,000 files, the size of a dirstate entry is 85 # bytes. The cost of resizing is significantly higher than the cost # of filling in a larger presized dict, so subtract 20% from the # size. # # This heuristic is imperfect in many ways, so in a future dirstate # format update it makes sense to just record the number of entries # on write. self._map = parsers.dict_new_presized(len(st) / 71) # Python's garbage collector triggers a GC each time a certain number # of container objects (the number being defined by # gc.get_threshold()) are allocated. parse_dirstate creates a tuple # for each file in the dirstate. The C version then immediately marks # them as not to be tracked by the collector. However, this has no # effect on when GCs are triggered, only on what objects the GC looks # into. This means that O(number of files) GCs are unavoidable. # Depending on when in the process's lifetime the dirstate is parsed, # this can get very expensive. As a workaround, disable GC while # parsing the dirstate. # # (we cannot decorate the function directly since it is in a C module) parse_dirstate = util.nogc(parsers.parse_dirstate) p = parse_dirstate(self._map, self._copymap, st) if not self._dirtypl: self._pl = p def invalidate(self): for a in ("_map", "_copymap", "_filefoldmap", "_dirfoldmap", "_branch", "_pl", "_dirs", "_ignore", "_nonnormalset"): if a in self.__dict__: delattr(self, a) self._lastnormaltime = 0 self._dirty = False self._parentwriters = 0 def copy(self, source, dest): """Mark dest as a copy of source. Unmark dest if source is None.""" if source == dest: return self._dirty = True if source is not None: self._copymap[dest] = source elif dest in self._copymap: del self._copymap[dest] def copied(self, file): return self._copymap.get(file, None) def copies(self): return self._copymap def _droppath(self, f): if self[f] not in "?r" and "_dirs" in self.__dict__: self._dirs.delpath(f) if "_filefoldmap" in self.__dict__: normed = util.normcase(f) if normed in self._filefoldmap: del self._filefoldmap[normed] def _addpath(self, f, state, mode, size, mtime): oldstate = self[f] if state == 'a' or oldstate == 'r': scmutil.checkfilename(f) if f in self._dirs: raise error.Abort(_('directory %r already in dirstate') % f) # shadows for d in util.finddirs(f): if d in self._dirs: break if d in self._map and self[d] != 'r': raise error.Abort( _('file %r in dirstate clashes with %r') % (d, f)) if oldstate in "?r" and "_dirs" in self.__dict__: self._dirs.addpath(f) self._dirty = True self._map[f] = dirstatetuple(state, mode, size, mtime) if state != 'n' or mtime == -1: self._nonnormalset.add(f) def normal(self, f): '''Mark a file normal and clean.''' s = os.lstat(self._join(f)) mtime = s.st_mtime self._addpath(f, 'n', s.st_mode, s.st_size & _rangemask, mtime & _rangemask) if f in self._copymap: del self._copymap[f] if f in self._nonnormalset: self._nonnormalset.remove(f) if mtime > self._lastnormaltime: # Remember the most recent modification timeslot for status(), # to make sure we won't miss future size-preserving file content # modifications that happen within the same timeslot. self._lastnormaltime = mtime def normallookup(self, f): '''Mark a file normal, but possibly dirty.''' if self._pl[1] != nullid and f in self._map: # if there is a merge going on and the file was either # in state 'm' (-1) or coming from other parent (-2) before # being removed, restore that state. entry = self._map[f] if entry[0] == 'r' and entry[2] in (-1, -2): source = self._copymap.get(f) if entry[2] == -1: self.merge(f) elif entry[2] == -2: self.otherparent(f) if source: self.copy(source, f) return if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2: return self._addpath(f, 'n', 0, -1, -1) if f in self._copymap: del self._copymap[f] if f in self._nonnormalset: self._nonnormalset.remove(f) def otherparent(self, f): '''Mark as coming from the other parent, always dirty.''' if self._pl[1] == nullid: raise error.Abort(_("setting %r to other parent " "only allowed in merges") % f) if f in self and self[f] == 'n': # merge-like self._addpath(f, 'm', 0, -2, -1) else: # add-like self._addpath(f, 'n', 0, -2, -1) if f in self._copymap: del self._copymap[f] def add(self, f): '''Mark a file added.''' self._addpath(f, 'a', 0, -1, -1) if f in self._copymap: del self._copymap[f] def remove(self, f): '''Mark a file removed.''' self._dirty = True self._droppath(f) size = 0 if self._pl[1] != nullid and f in self._map: # backup the previous state entry = self._map[f] if entry[0] == 'm': # merge size = -1 elif entry[0] == 'n' and entry[2] == -2: # other parent size = -2 self._map[f] = dirstatetuple('r', 0, size, 0) self._nonnormalset.add(f) if size == 0 and f in self._copymap: del self._copymap[f] def merge(self, f): '''Mark a file merged.''' if self._pl[1] == nullid: return self.normallookup(f) return self.otherparent(f) def drop(self, f): '''Drop a file from the dirstate''' if f in self._map: self._dirty = True self._droppath(f) del self._map[f] if f in self._nonnormalset: self._nonnormalset.remove(f) def _discoverpath(self, path, normed, ignoremissing, exists, storemap): if exists is None: exists = os.path.lexists(os.path.join(self._root, path)) if not exists: # Maybe a path component exists if not ignoremissing and '/' in path: d, f = path.rsplit('/', 1) d = self._normalize(d, False, ignoremissing, None) folded = d + "/" + f else: # No path components, preserve original case folded = path else: # recursively normalize leading directory components # against dirstate if '/' in normed: d, f = normed.rsplit('/', 1) d = self._normalize(d, False, ignoremissing, True) r = self._root + "/" + d folded = d + "/" + util.fspath(f, r) else: folded = util.fspath(normed, self._root) storemap[normed] = folded return folded def _normalizefile(self, path, isknown, ignoremissing=False, exists=None): normed = util.normcase(path) folded = self._filefoldmap.get(normed, None) if folded is None: if isknown: folded = path else: folded = self._discoverpath(path, normed, ignoremissing, exists, self._filefoldmap) return folded def _normalize(self, path, isknown, ignoremissing=False, exists=None): normed = util.normcase(path) folded = self._filefoldmap.get(normed, None) if folded is None: folded = self._dirfoldmap.get(normed, None) if folded is None: if isknown: folded = path else: # store discovered result in dirfoldmap so that future # normalizefile calls don't start matching directories folded = self._discoverpath(path, normed, ignoremissing, exists, self._dirfoldmap) return folded def normalize(self, path, isknown=False, ignoremissing=False): ''' normalize the case of a pathname when on a casefolding filesystem isknown specifies whether the filename came from walking the disk, to avoid extra filesystem access. If ignoremissing is True, missing path are returned unchanged. Otherwise, we try harder to normalize possibly existing path components. The normalized case is determined based on the following precedence: - version of name already stored in the dirstate - version of name stored on disk - version provided via command arguments ''' if self._checkcase: return self._normalize(path, isknown, ignoremissing) return path def clear(self): self._map = {} self._nonnormalset = set() if "_dirs" in self.__dict__: delattr(self, "_dirs") self._copymap = {} self._pl = [nullid, nullid] self._lastnormaltime = 0 self._dirty = True def rebuild(self, parent, allfiles, changedfiles=None): if changedfiles is None: # Rebuild entire dirstate changedfiles = allfiles lastnormaltime = self._lastnormaltime self.clear() self._lastnormaltime = lastnormaltime for f in changedfiles: mode = 0o666 if f in allfiles and 'x' in allfiles.flags(f): mode = 0o777 if f in allfiles: self._map[f] = dirstatetuple('n', mode, -1, 0) else: self._map.pop(f, None) if f in self._nonnormalset: self._nonnormalset.remove(f) self._pl = (parent, nullid) self._dirty = True def write(self, tr=False): if not self._dirty: return filename = self._filename if tr is False: # not explicitly specified if (self._ui.configbool('devel', 'all-warnings') or self._ui.configbool('devel', 'check-dirstate-write')): self._ui.develwarn('use dirstate.write with ' 'repo.currenttransaction()') if self._opener.lexists(self._pendingfilename): # if pending file already exists, in-memory changes # should be written into it, because it has priority # to '.hg/dirstate' at reading under HG_PENDING mode filename = self._pendingfilename elif tr: # 'dirstate.write()' is not only for writing in-memory # changes out, but also for dropping ambiguous timestamp. # delayed writing re-raise "ambiguous timestamp issue". # See also the wiki page below for detail: # https://www.mercurial-scm.org/wiki/DirstateTransactionPlan # emulate dropping timestamp in 'parsers.pack_dirstate' now = _getfsnow(self._opener) dmap = self._map for f, e in dmap.iteritems(): if e[0] == 'n' and e[3] == now: dmap[f] = dirstatetuple(e[0], e[1], e[2], -1) self._nonnormalset.add(f) # emulate that all 'dirstate.normal' results are written out self._lastnormaltime = 0 # delay writing in-memory changes out tr.addfilegenerator('dirstate', (self._filename,), self._writedirstate, location='plain') return st = self._opener(filename, "w", atomictemp=True) self._writedirstate(st) def _writedirstate(self, st): # use the modification time of the newly created temporary file as the # filesystem's notion of 'now' now = util.fstat(st).st_mtime & _rangemask # enough 'delaywrite' prevents 'pack_dirstate' from dropping # timestamp of each entries in dirstate, because of 'now > mtime' delaywrite = self._ui.configint('debug', 'dirstate.delaywrite', 0) if delaywrite > 0: # do we have any files to delay for? for f, e in self._map.iteritems(): if e[0] == 'n' and e[3] == now: import time # to avoid useless import # rather than sleep n seconds, sleep until the next # multiple of n seconds clock = time.time() start = int(clock) - (int(clock) % delaywrite) end = start + delaywrite time.sleep(end - clock) break st.write(parsers.pack_dirstate(self._map, self._copymap, self._pl, now)) self._nonnormalset = nonnormalentries(self._map) st.close() self._lastnormaltime = 0 self._dirty = self._dirtypl = False def _dirignore(self, f): if f == '.': return False if self._ignore(f): return True for p in util.finddirs(f): if self._ignore(p): return True return False def _ignorefiles(self): files = [] if os.path.exists(self._join('.hgignore')): files.append(self._join('.hgignore')) for name, path in self._ui.configitems("ui"): if name == 'ignore' or name.startswith('ignore.'): # we need to use os.path.join here rather than self._join # because path is arbitrary and user-specified files.append(os.path.join(self._rootdir, util.expandpath(path))) return files def _ignorefileandline(self, f): files = collections.deque(self._ignorefiles()) visited = set() while files: i = files.popleft() patterns = matchmod.readpatternfile(i, self._ui.warn, sourceinfo=True) for pattern, lineno, line in patterns: kind, p = matchmod._patsplit(pattern, 'glob') if kind == "subinclude": if p not in visited: files.append(p) continue m = matchmod.match(self._root, '', [], [pattern], warn=self._ui.warn) if m(f): return (i, lineno, line) visited.add(i) return (None, -1, "") def _walkexplicit(self, match, subrepos): '''Get stat data about the files explicitly specified by match. Return a triple (results, dirsfound, dirsnotfound). - results is a mapping from filename to stat result. It also contains listings mapping subrepos and .hg to None. - dirsfound is a list of files found to be directories. - dirsnotfound is a list of files that the dirstate thinks are directories and that were not found.''' def badtype(mode): kind = _('unknown') if stat.S_ISCHR(mode): kind = _('character device') elif stat.S_ISBLK(mode): kind = _('block device') elif stat.S_ISFIFO(mode): kind = _('fifo') elif stat.S_ISSOCK(mode): kind = _('socket') elif stat.S_ISDIR(mode): kind = _('directory') return _('unsupported file type (type is %s)') % kind matchedir = match.explicitdir badfn = match.bad dmap = self._map lstat = os.lstat getkind = stat.S_IFMT dirkind = stat.S_IFDIR regkind = stat.S_IFREG lnkkind = stat.S_IFLNK join = self._join dirsfound = [] foundadd = dirsfound.append dirsnotfound = [] notfoundadd = dirsnotfound.append if not match.isexact() and self._checkcase: normalize = self._normalize else: normalize = None files = sorted(match.files()) subrepos.sort() i, j = 0, 0 while i < len(files) and j < len(subrepos): subpath = subrepos[j] + "/" if files[i] < subpath: i += 1 continue while i < len(files) and files[i].startswith(subpath): del files[i] j += 1 if not files or '.' in files: files = ['.'] results = dict.fromkeys(subrepos) results['.hg'] = None alldirs = None for ff in files: # constructing the foldmap is expensive, so don't do it for the # common case where files is ['.'] if normalize and ff != '.': nf = normalize(ff, False, True) else: nf = ff if nf in results: continue try: st = lstat(join(nf)) kind = getkind(st.st_mode) if kind == dirkind: if nf in dmap: # file replaced by dir on disk but still in dirstate results[nf] = None if matchedir: matchedir(nf) foundadd((nf, ff)) elif kind == regkind or kind == lnkkind: results[nf] = st else: badfn(ff, badtype(kind)) if nf in dmap: results[nf] = None except OSError as inst: # nf not found on disk - it is dirstate only if nf in dmap: # does it exactly match a missing file? results[nf] = None else: # does it match a missing directory? if alldirs is None: alldirs = util.dirs(dmap) if nf in alldirs: if matchedir: matchedir(nf) notfoundadd(nf) else: badfn(ff, inst.strerror) # Case insensitive filesystems cannot rely on lstat() failing to detect # a case-only rename. Prune the stat object for any file that does not # match the case in the filesystem, if there are multiple files that # normalize to the same path. if match.isexact() and self._checkcase: normed = {} for f, st in results.iteritems(): if st is None: continue nc = util.normcase(f) paths = normed.get(nc) if paths is None: paths = set() normed[nc] = paths paths.add(f) for norm, paths in normed.iteritems(): if len(paths) > 1: for path in paths: folded = self._discoverpath(path, norm, True, None, self._dirfoldmap) if path != folded: results[path] = None return results, dirsfound, dirsnotfound def walk(self, match, subrepos, unknown, ignored, full=True): ''' Walk recursively through the directory tree, finding all files matched by match. If full is False, maybe skip some known-clean files. Return a dict mapping filename to stat-like object (either mercurial.osutil.stat instance or return value of os.stat()). ''' # full is a flag that extensions that hook into walk can use -- this # implementation doesn't use it at all. This satisfies the contract # because we only guarantee a "maybe". if ignored: ignore = util.never dirignore = util.never elif unknown: ignore = self._ignore dirignore = self._dirignore else: # if not unknown and not ignored, drop dir recursion and step 2 ignore = util.always dirignore = util.always matchfn = match.matchfn matchalways = match.always() matchtdir = match.traversedir dmap = self._map listdir = osutil.listdir lstat = os.lstat dirkind = stat.S_IFDIR regkind = stat.S_IFREG lnkkind = stat.S_IFLNK join = self._join exact = skipstep3 = False if match.isexact(): # match.exact exact = True dirignore = util.always # skip step 2 elif match.prefix(): # match.match, no patterns skipstep3 = True if not exact and self._checkcase: normalize = self._normalize normalizefile = self._normalizefile skipstep3 = False else: normalize = self._normalize normalizefile = None # step 1: find all explicit files results, work, dirsnotfound = self._walkexplicit(match, subrepos) skipstep3 = skipstep3 and not (work or dirsnotfound) work = [d for d in work if not dirignore(d[0])] # step 2: visit subdirectories def traverse(work, alreadynormed): wadd = work.append while work: nd = work.pop() skip = None if nd == '.': nd = '' else: skip = '.hg' try: entries = listdir(join(nd), stat=True, skip=skip) except OSError as inst: if inst.errno in (errno.EACCES, errno.ENOENT): match.bad(self.pathto(nd), inst.strerror) continue raise for f, kind, st in entries: if normalizefile: # even though f might be a directory, we're only # interested in comparing it to files currently in the # dmap -- therefore normalizefile is enough nf = normalizefile(nd and (nd + "/" + f) or f, True, True) else: nf = nd and (nd + "/" + f) or f if nf not in results: if kind == dirkind: if not ignore(nf): if matchtdir: matchtdir(nf) wadd(nf) if nf in dmap and (matchalways or matchfn(nf)): results[nf] = None elif kind == regkind or kind == lnkkind: if nf in dmap: if matchalways or matchfn(nf): results[nf] = st elif ((matchalways or matchfn(nf)) and not ignore(nf)): # unknown file -- normalize if necessary if not alreadynormed: nf = normalize(nf, False, True) results[nf] = st elif nf in dmap and (matchalways or matchfn(nf)): results[nf] = None for nd, d in work: # alreadynormed means that processwork doesn't have to do any # expensive directory normalization alreadynormed = not normalize or nd == d traverse([d], alreadynormed) for s in subrepos: del results[s] del results['.hg'] # step 3: visit remaining files from dmap if not skipstep3 and not exact: # If a dmap file is not in results yet, it was either # a) not matching matchfn b) ignored, c) missing, or d) under a # symlink directory. if not results and matchalways: visit = dmap.keys() else: visit = [f for f in dmap if f not in results and matchfn(f)] visit.sort() if unknown: # unknown == True means we walked all dirs under the roots # that wasn't ignored, and everything that matched was stat'ed # and is already in results. # The rest must thus be ignored or under a symlink. audit_path = pathutil.pathauditor(self._root) for nf in iter(visit): # If a stat for the same file was already added with a # different case, don't add one for this, since that would # make it appear as if the file exists under both names # on disk. if (normalizefile and normalizefile(nf, True, True) in results): results[nf] = None # Report ignored items in the dmap as long as they are not # under a symlink directory. elif audit_path.check(nf): try: results[nf] = lstat(join(nf)) # file was just ignored, no links, and exists except OSError: # file doesn't exist results[nf] = None else: # It's either missing or under a symlink directory # which we in this case report as missing results[nf] = None else: # We may not have walked the full directory tree above, # so stat and check everything we missed. nf = iter(visit).next for st in util.statfiles([join(i) for i in visit]): results[nf()] = st return results def status(self, match, subrepos, ignored, clean, unknown): '''Determine the status of the working copy relative to the dirstate and return a pair of (unsure, status), where status is of type scmutil.status and: unsure: files that might have been modified since the dirstate was written, but need to be read to be sure (size is the same but mtime differs) status.modified: files that have definitely been modified since the dirstate was written (different size or mode) status.clean: files that have definitely not been modified since the dirstate was written ''' listignored, listclean, listunknown = ignored, clean, unknown lookup, modified, added, unknown, ignored = [], [], [], [], [] removed, deleted, clean = [], [], [] dmap = self._map ladd = lookup.append # aka "unsure" madd = modified.append aadd = added.append uadd = unknown.append iadd = ignored.append radd = removed.append dadd = deleted.append cadd = clean.append mexact = match.exact dirignore = self._dirignore checkexec = self._checkexec copymap = self._copymap lastnormaltime = self._lastnormaltime # We need to do full walks when either # - we're listing all clean files, or # - match.traversedir does something, because match.traversedir should # be called for every dir in the working dir full = listclean or match.traversedir is not None for fn, st in self.walk(match, subrepos, listunknown, listignored, full=full).iteritems(): if fn not in dmap: if (listignored or mexact(fn)) and dirignore(fn): if listignored: iadd(fn) else: uadd(fn) continue # This is equivalent to 'state, mode, size, time = dmap[fn]' but not # written like that for performance reasons. dmap[fn] is not a # Python tuple in compiled builds. The CPython UNPACK_SEQUENCE # opcode has fast paths when the value to be unpacked is a tuple or # a list, but falls back to creating a full-fledged iterator in # general. That is much slower than simply accessing and storing the # tuple members one by one. t = dmap[fn] state = t[0] mode = t[1] size = t[2] time = t[3] if not st and state in "nma": dadd(fn) elif state == 'n': if (size >= 0 and ((size != st.st_size and size != st.st_size & _rangemask) or ((mode ^ st.st_mode) & 0o100 and checkexec)) or size == -2 # other parent or fn in copymap): madd(fn) elif time != st.st_mtime and time != st.st_mtime & _rangemask: ladd(fn) elif st.st_mtime == lastnormaltime: # fn may have just been marked as normal and it may have # changed in the same second without changing its size. # This can happen if we quickly do multiple commits. # Force lookup, so we don't miss such a racy file change. ladd(fn) elif listclean: cadd(fn) elif state == 'm': madd(fn) elif state == 'a': aadd(fn) elif state == 'r': radd(fn) return (lookup, scmutil.status(modified, added, removed, deleted, unknown, ignored, clean)) def matches(self, match): ''' return files in the dirstate (in whatever state) filtered by match ''' dmap = self._map if match.always(): return dmap.keys() files = match.files() if match.isexact(): # fast path -- filter the other way around, since typically files is # much smaller than dmap return [f for f in files if f in dmap] if match.prefix() and all(fn in dmap for fn in files): # fast path -- all the values are known to be files, so just return # that return list(files) return [f for f in dmap if match(f)] def _actualfilename(self, tr): if tr: return self._pendingfilename else: return self._filename def _savebackup(self, tr, suffix): '''Save current dirstate into backup file with suffix''' filename = self._actualfilename(tr) # use '_writedirstate' instead of 'write' to write changes certainly, # because the latter omits writing out if transaction is running. # output file will be used to create backup of dirstate at this point. self._writedirstate(self._opener(filename, "w", atomictemp=True)) if tr: # ensure that subsequent tr.writepending returns True for # changes written out above, even if dirstate is never # changed after this tr.addfilegenerator('dirstate', (self._filename,), self._writedirstate, location='plain') # ensure that pending file written above is unlinked at # failure, even if tr.writepending isn't invoked until the # end of this transaction tr.registertmp(filename, location='plain') self._opener.write(filename + suffix, self._opener.tryread(filename)) def _restorebackup(self, tr, suffix): '''Restore dirstate by backup file with suffix''' # this "invalidate()" prevents "wlock.release()" from writing # changes of dirstate out after restoring from backup file self.invalidate() filename = self._actualfilename(tr) self._opener.rename(filename + suffix, filename) def _clearbackup(self, tr, suffix): '''Clear backup file with suffix''' filename = self._actualfilename(tr) self._opener.unlink(filename + suffix) mercurial-3.7.3/mercurial/revlog.py0000644000175000017500000017416612676531525017001 0ustar mpmmpm00000000000000# revlog.py - storage back-end for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """Storage back-end for Mercurial. This provides efficient delta storage with O(1) retrieve and append and O(changes) merge between branches. """ from __future__ import absolute_import import collections import errno import os import struct import zlib # import stuff from node for others to import from revlog from .node import ( bin, hex, nullid, nullrev, ) from .i18n import _ from . import ( ancestor, error, mdiff, parsers, templatefilters, util, ) _pack = struct.pack _unpack = struct.unpack _compress = zlib.compress _decompress = zlib.decompress _sha = util.sha1 # revlog header flags REVLOGV0 = 0 REVLOGNG = 1 REVLOGNGINLINEDATA = (1 << 16) REVLOGGENERALDELTA = (1 << 17) REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA REVLOG_DEFAULT_FORMAT = REVLOGNG REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS REVLOGNG_FLAGS = REVLOGNGINLINEDATA | REVLOGGENERALDELTA # revlog index flags REVIDX_ISCENSORED = (1 << 15) # revision has censor metadata, must be verified REVIDX_DEFAULT_FLAGS = 0 REVIDX_KNOWN_FLAGS = REVIDX_ISCENSORED # max size of revlog with inline data _maxinline = 131072 _chunksize = 1048576 RevlogError = error.RevlogError LookupError = error.LookupError CensoredNodeError = error.CensoredNodeError def getoffset(q): return int(q >> 16) def gettype(q): return int(q & 0xFFFF) def offset_type(offset, type): return long(long(offset) << 16 | type) _nullhash = _sha(nullid) def hash(text, p1, p2): """generate a hash from the given text and its parent hashes This hash combines both the current file contents and its history in a manner that makes it easy to distinguish nodes with the same content in the revision graph. """ # As of now, if one of the parent node is null, p2 is null if p2 == nullid: # deep copy of a hash is faster than creating one s = _nullhash.copy() s.update(p1) else: # none of the parent nodes are nullid l = [p1, p2] l.sort() s = _sha(l[0]) s.update(l[1]) s.update(text) return s.digest() def decompress(bin): """ decompress the given input """ if not bin: return bin t = bin[0] if t == '\0': return bin if t == 'x': try: return _decompress(bin) except zlib.error as e: raise RevlogError(_("revlog decompress error: %s") % str(e)) if t == 'u': return util.buffer(bin, 1) raise RevlogError(_("unknown compression type %r") % t) # index v0: # 4 bytes: offset # 4 bytes: compressed length # 4 bytes: base rev # 4 bytes: link rev # 20 bytes: parent 1 nodeid # 20 bytes: parent 2 nodeid # 20 bytes: nodeid indexformatv0 = ">4l20s20s20s" class revlogoldio(object): def __init__(self): self.size = struct.calcsize(indexformatv0) def parseindex(self, data, inline): s = self.size index = [] nodemap = {nullid: nullrev} n = off = 0 l = len(data) while off + s <= l: cur = data[off:off + s] off += s e = _unpack(indexformatv0, cur) # transform to revlogv1 format e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3], nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6]) index.append(e2) nodemap[e[6]] = n n += 1 # add the magic null revision at -1 index.append((0, 0, 0, -1, -1, -1, -1, nullid)) return index, nodemap, None def packentry(self, entry, node, version, rev): if gettype(entry[0]): raise RevlogError(_("index entry flags need RevlogNG")) e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4], node(entry[5]), node(entry[6]), entry[7]) return _pack(indexformatv0, *e2) # index ng: # 6 bytes: offset # 2 bytes: flags # 4 bytes: compressed length # 4 bytes: uncompressed length # 4 bytes: base rev # 4 bytes: link rev # 4 bytes: parent 1 rev # 4 bytes: parent 2 rev # 32 bytes: nodeid indexformatng = ">Qiiiiii20s12x" versionformat = ">I" # corresponds to uncompressed length of indexformatng (2 gigs, 4-byte # signed integer) _maxentrysize = 0x7fffffff class revlogio(object): def __init__(self): self.size = struct.calcsize(indexformatng) def parseindex(self, data, inline): # call the C implementation to parse the index data index, cache = parsers.parse_index2(data, inline) return index, getattr(index, 'nodemap', None), cache def packentry(self, entry, node, version, rev): p = _pack(indexformatng, *entry) if rev == 0: p = _pack(versionformat, version) + p[4:] return p class revlog(object): """ the underlying revision storage object A revlog consists of two parts, an index and the revision data. The index is a file with a fixed record size containing information on each revision, including its nodeid (hash), the nodeids of its parents, the position and offset of its data within the data file, and the revision it's based on. Finally, each entry contains a linkrev entry that can serve as a pointer to external data. The revision data itself is a linear collection of data chunks. Each chunk represents a revision and is usually represented as a delta against the previous chunk. To bound lookup time, runs of deltas are limited to about 2 times the length of the original version data. This makes retrieval of a version proportional to its size, or O(1) relative to the number of revisions. Both pieces of the revlog are written to in an append-only fashion, which means we never need to rewrite a file to insert or remove data, and can use some simple techniques to avoid the need for locking while reading. """ def __init__(self, opener, indexfile): """ create a revlog object opener is a function that abstracts the file opening operation and can be used to implement COW semantics or the like. """ self.indexfile = indexfile self.datafile = indexfile[:-2] + ".d" self.opener = opener # 3-tuple of (node, rev, text) for a raw revision. self._cache = None # 2-tuple of (rev, baserev) defining the base revision the delta chain # begins at for a revision. self._basecache = None # 2-tuple of (offset, data) of raw data from the revlog at an offset. self._chunkcache = (0, '') # How much data to read and cache into the raw revlog data cache. self._chunkcachesize = 65536 self._maxchainlen = None self._aggressivemergedeltas = False self.index = [] # Mapping of partial identifiers to full nodes. self._pcache = {} # Mapping of revision integer to full node. self._nodecache = {nullid: nullrev} self._nodepos = None v = REVLOG_DEFAULT_VERSION opts = getattr(opener, 'options', None) if opts is not None: if 'revlogv1' in opts: if 'generaldelta' in opts: v |= REVLOGGENERALDELTA else: v = 0 if 'chunkcachesize' in opts: self._chunkcachesize = opts['chunkcachesize'] if 'maxchainlen' in opts: self._maxchainlen = opts['maxchainlen'] if 'aggressivemergedeltas' in opts: self._aggressivemergedeltas = opts['aggressivemergedeltas'] self._lazydeltabase = bool(opts.get('lazydeltabase', False)) if self._chunkcachesize <= 0: raise RevlogError(_('revlog chunk cache size %r is not greater ' 'than 0') % self._chunkcachesize) elif self._chunkcachesize & (self._chunkcachesize - 1): raise RevlogError(_('revlog chunk cache size %r is not a power ' 'of 2') % self._chunkcachesize) indexdata = '' self._initempty = True try: f = self.opener(self.indexfile) indexdata = f.read() f.close() if len(indexdata) > 0: v = struct.unpack(versionformat, indexdata[:4])[0] self._initempty = False except IOError as inst: if inst.errno != errno.ENOENT: raise self.version = v self._inline = v & REVLOGNGINLINEDATA self._generaldelta = v & REVLOGGENERALDELTA flags = v & ~0xFFFF fmt = v & 0xFFFF if fmt == REVLOGV0 and flags: raise RevlogError(_("index %s unknown flags %#04x for format v0") % (self.indexfile, flags >> 16)) elif fmt == REVLOGNG and flags & ~REVLOGNG_FLAGS: raise RevlogError(_("index %s unknown flags %#04x for revlogng") % (self.indexfile, flags >> 16)) elif fmt > REVLOGNG: raise RevlogError(_("index %s unknown format %d") % (self.indexfile, fmt)) self._io = revlogio() if self.version == REVLOGV0: self._io = revlogoldio() try: d = self._io.parseindex(indexdata, self._inline) except (ValueError, IndexError): raise RevlogError(_("index %s is corrupted") % (self.indexfile)) self.index, nodemap, self._chunkcache = d if nodemap is not None: self.nodemap = self._nodecache = nodemap if not self._chunkcache: self._chunkclear() # revnum -> (chain-length, sum-delta-length) self._chaininfocache = {} def tip(self): return self.node(len(self.index) - 2) def __contains__(self, rev): return 0 <= rev < len(self) def __len__(self): return len(self.index) - 1 def __iter__(self): return iter(xrange(len(self))) def revs(self, start=0, stop=None): """iterate over all rev in this revlog (from start to stop)""" step = 1 if stop is not None: if start > stop: step = -1 stop += step else: stop = len(self) return xrange(start, stop, step) @util.propertycache def nodemap(self): self.rev(self.node(0)) return self._nodecache def hasnode(self, node): try: self.rev(node) return True except KeyError: return False def clearcaches(self): self._cache = None self._basecache = None self._chunkcache = (0, '') self._pcache = {} try: self._nodecache.clearcaches() except AttributeError: self._nodecache = {nullid: nullrev} self._nodepos = None def rev(self, node): try: return self._nodecache[node] except TypeError: raise except RevlogError: # parsers.c radix tree lookup failed raise LookupError(node, self.indexfile, _('no node')) except KeyError: # pure python cache lookup failed n = self._nodecache i = self.index p = self._nodepos if p is None: p = len(i) - 2 for r in xrange(p, -1, -1): v = i[r][7] n[v] = r if v == node: self._nodepos = r - 1 return r raise LookupError(node, self.indexfile, _('no node')) def node(self, rev): return self.index[rev][7] def linkrev(self, rev): return self.index[rev][4] def parents(self, node): i = self.index d = i[self.rev(node)] return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline def parentrevs(self, rev): return self.index[rev][5:7] def start(self, rev): return int(self.index[rev][0] >> 16) def end(self, rev): return self.start(rev) + self.length(rev) def length(self, rev): return self.index[rev][1] def chainbase(self, rev): index = self.index base = index[rev][3] while base != rev: rev = base base = index[rev][3] return base def chainlen(self, rev): return self._chaininfo(rev)[0] def _chaininfo(self, rev): chaininfocache = self._chaininfocache if rev in chaininfocache: return chaininfocache[rev] index = self.index generaldelta = self._generaldelta iterrev = rev e = index[iterrev] clen = 0 compresseddeltalen = 0 while iterrev != e[3]: clen += 1 compresseddeltalen += e[1] if generaldelta: iterrev = e[3] else: iterrev -= 1 if iterrev in chaininfocache: t = chaininfocache[iterrev] clen += t[0] compresseddeltalen += t[1] break e = index[iterrev] else: # Add text length of base since decompressing that also takes # work. For cache hits the length is already included. compresseddeltalen += e[1] r = (clen, compresseddeltalen) chaininfocache[rev] = r return r def _deltachain(self, rev, stoprev=None): """Obtain the delta chain for a revision. ``stoprev`` specifies a revision to stop at. If not specified, we stop at the base of the chain. Returns a 2-tuple of (chain, stopped) where ``chain`` is a list of revs in ascending order and ``stopped`` is a bool indicating whether ``stoprev`` was hit. """ chain = [] # Alias to prevent attribute lookup in tight loop. index = self.index generaldelta = self._generaldelta iterrev = rev e = index[iterrev] while iterrev != e[3] and iterrev != stoprev: chain.append(iterrev) if generaldelta: iterrev = e[3] else: iterrev -= 1 e = index[iterrev] if iterrev == stoprev: stopped = True else: chain.append(iterrev) stopped = False chain.reverse() return chain, stopped def flags(self, rev): return self.index[rev][0] & 0xFFFF def rawsize(self, rev): """return the length of the uncompressed text for a given revision""" l = self.index[rev][2] if l >= 0: return l t = self.revision(self.node(rev)) return len(t) size = rawsize def ancestors(self, revs, stoprev=0, inclusive=False): """Generate the ancestors of 'revs' in reverse topological order. Does not generate revs lower than stoprev. See the documentation for ancestor.lazyancestors for more details.""" return ancestor.lazyancestors(self.parentrevs, revs, stoprev=stoprev, inclusive=inclusive) def descendants(self, revs): """Generate the descendants of 'revs' in revision order. Yield a sequence of revision numbers starting with a child of some rev in revs, i.e., each revision is *not* considered a descendant of itself. Results are ordered by revision number (a topological sort).""" first = min(revs) if first == nullrev: for i in self: yield i return seen = set(revs) for i in self.revs(start=first + 1): for x in self.parentrevs(i): if x != nullrev and x in seen: seen.add(i) yield i break def findcommonmissing(self, common=None, heads=None): """Return a tuple of the ancestors of common and the ancestors of heads that are not ancestors of common. In revset terminology, we return the tuple: ::common, (::heads) - (::common) The list is sorted by revision number, meaning it is topologically sorted. 'heads' and 'common' are both lists of node IDs. If heads is not supplied, uses all of the revlog's heads. If common is not supplied, uses nullid.""" if common is None: common = [nullid] if heads is None: heads = self.heads() common = [self.rev(n) for n in common] heads = [self.rev(n) for n in heads] # we want the ancestors, but inclusive class lazyset(object): def __init__(self, lazyvalues): self.addedvalues = set() self.lazyvalues = lazyvalues def __contains__(self, value): return value in self.addedvalues or value in self.lazyvalues def __iter__(self): added = self.addedvalues for r in added: yield r for r in self.lazyvalues: if not r in added: yield r def add(self, value): self.addedvalues.add(value) def update(self, values): self.addedvalues.update(values) has = lazyset(self.ancestors(common)) has.add(nullrev) has.update(common) # take all ancestors from heads that aren't in has missing = set() visit = collections.deque(r for r in heads if r not in has) while visit: r = visit.popleft() if r in missing: continue else: missing.add(r) for p in self.parentrevs(r): if p not in has: visit.append(p) missing = list(missing) missing.sort() return has, [self.node(r) for r in missing] def incrementalmissingrevs(self, common=None): """Return an object that can be used to incrementally compute the revision numbers of the ancestors of arbitrary sets that are not ancestors of common. This is an ancestor.incrementalmissingancestors object. 'common' is a list of revision numbers. If common is not supplied, uses nullrev. """ if common is None: common = [nullrev] return ancestor.incrementalmissingancestors(self.parentrevs, common) def findmissingrevs(self, common=None, heads=None): """Return the revision numbers of the ancestors of heads that are not ancestors of common. More specifically, return a list of revision numbers corresponding to nodes N such that every N satisfies the following constraints: 1. N is an ancestor of some node in 'heads' 2. N is not an ancestor of any node in 'common' The list is sorted by revision number, meaning it is topologically sorted. 'heads' and 'common' are both lists of revision numbers. If heads is not supplied, uses all of the revlog's heads. If common is not supplied, uses nullid.""" if common is None: common = [nullrev] if heads is None: heads = self.headrevs() inc = self.incrementalmissingrevs(common=common) return inc.missingancestors(heads) def findmissing(self, common=None, heads=None): """Return the ancestors of heads that are not ancestors of common. More specifically, return a list of nodes N such that every N satisfies the following constraints: 1. N is an ancestor of some node in 'heads' 2. N is not an ancestor of any node in 'common' The list is sorted by revision number, meaning it is topologically sorted. 'heads' and 'common' are both lists of node IDs. If heads is not supplied, uses all of the revlog's heads. If common is not supplied, uses nullid.""" if common is None: common = [nullid] if heads is None: heads = self.heads() common = [self.rev(n) for n in common] heads = [self.rev(n) for n in heads] inc = self.incrementalmissingrevs(common=common) return [self.node(r) for r in inc.missingancestors(heads)] def nodesbetween(self, roots=None, heads=None): """Return a topological path from 'roots' to 'heads'. Return a tuple (nodes, outroots, outheads) where 'nodes' is a topologically sorted list of all nodes N that satisfy both of these constraints: 1. N is a descendant of some node in 'roots' 2. N is an ancestor of some node in 'heads' Every node is considered to be both a descendant and an ancestor of itself, so every reachable node in 'roots' and 'heads' will be included in 'nodes'. 'outroots' is the list of reachable nodes in 'roots', i.e., the subset of 'roots' that is returned in 'nodes'. Likewise, 'outheads' is the subset of 'heads' that is also in 'nodes'. 'roots' and 'heads' are both lists of node IDs. If 'roots' is unspecified, uses nullid as the only root. If 'heads' is unspecified, uses list of all of the revlog's heads.""" nonodes = ([], [], []) if roots is not None: roots = list(roots) if not roots: return nonodes lowestrev = min([self.rev(n) for n in roots]) else: roots = [nullid] # Everybody's a descendant of nullid lowestrev = nullrev if (lowestrev == nullrev) and (heads is None): # We want _all_ the nodes! return ([self.node(r) for r in self], [nullid], list(self.heads())) if heads is None: # All nodes are ancestors, so the latest ancestor is the last # node. highestrev = len(self) - 1 # Set ancestors to None to signal that every node is an ancestor. ancestors = None # Set heads to an empty dictionary for later discovery of heads heads = {} else: heads = list(heads) if not heads: return nonodes ancestors = set() # Turn heads into a dictionary so we can remove 'fake' heads. # Also, later we will be using it to filter out the heads we can't # find from roots. heads = dict.fromkeys(heads, False) # Start at the top and keep marking parents until we're done. nodestotag = set(heads) # Remember where the top was so we can use it as a limit later. highestrev = max([self.rev(n) for n in nodestotag]) while nodestotag: # grab a node to tag n = nodestotag.pop() # Never tag nullid if n == nullid: continue # A node's revision number represents its place in a # topologically sorted list of nodes. r = self.rev(n) if r >= lowestrev: if n not in ancestors: # If we are possibly a descendant of one of the roots # and we haven't already been marked as an ancestor ancestors.add(n) # Mark as ancestor # Add non-nullid parents to list of nodes to tag. nodestotag.update([p for p in self.parents(n) if p != nullid]) elif n in heads: # We've seen it before, is it a fake head? # So it is, real heads should not be the ancestors of # any other heads. heads.pop(n) if not ancestors: return nonodes # Now that we have our set of ancestors, we want to remove any # roots that are not ancestors. # If one of the roots was nullid, everything is included anyway. if lowestrev > nullrev: # But, since we weren't, let's recompute the lowest rev to not # include roots that aren't ancestors. # Filter out roots that aren't ancestors of heads roots = [n for n in roots if n in ancestors] # Recompute the lowest revision if roots: lowestrev = min([self.rev(n) for n in roots]) else: # No more roots? Return empty list return nonodes else: # We are descending from nullid, and don't need to care about # any other roots. lowestrev = nullrev roots = [nullid] # Transform our roots list into a set. descendants = set(roots) # Also, keep the original roots so we can filter out roots that aren't # 'real' roots (i.e. are descended from other roots). roots = descendants.copy() # Our topologically sorted list of output nodes. orderedout = [] # Don't start at nullid since we don't want nullid in our output list, # and if nullid shows up in descendants, empty parents will look like # they're descendants. for r in self.revs(start=max(lowestrev, 0), stop=highestrev + 1): n = self.node(r) isdescendant = False if lowestrev == nullrev: # Everybody is a descendant of nullid isdescendant = True elif n in descendants: # n is already a descendant isdescendant = True # This check only needs to be done here because all the roots # will start being marked is descendants before the loop. if n in roots: # If n was a root, check if it's a 'real' root. p = tuple(self.parents(n)) # If any of its parents are descendants, it's not a root. if (p[0] in descendants) or (p[1] in descendants): roots.remove(n) else: p = tuple(self.parents(n)) # A node is a descendant if either of its parents are # descendants. (We seeded the dependents list with the roots # up there, remember?) if (p[0] in descendants) or (p[1] in descendants): descendants.add(n) isdescendant = True if isdescendant and ((ancestors is None) or (n in ancestors)): # Only include nodes that are both descendants and ancestors. orderedout.append(n) if (ancestors is not None) and (n in heads): # We're trying to figure out which heads are reachable # from roots. # Mark this head as having been reached heads[n] = True elif ancestors is None: # Otherwise, we're trying to discover the heads. # Assume this is a head because if it isn't, the next step # will eventually remove it. heads[n] = True # But, obviously its parents aren't. for p in self.parents(n): heads.pop(p, None) heads = [n for n, flag in heads.iteritems() if flag] roots = list(roots) assert orderedout assert roots assert heads return (orderedout, roots, heads) def headrevs(self): try: return self.index.headrevs() except AttributeError: return self._headrevs() def computephases(self, roots): return self.index.computephasesmapsets(roots) def _headrevs(self): count = len(self) if not count: return [nullrev] # we won't iter over filtered rev so nobody is a head at start ishead = [0] * (count + 1) index = self.index for r in self: ishead[r] = 1 # I may be an head e = index[r] ishead[e[5]] = ishead[e[6]] = 0 # my parent are not return [r for r, val in enumerate(ishead) if val] def heads(self, start=None, stop=None): """return the list of all nodes that have no children if start is specified, only heads that are descendants of start will be returned if stop is specified, it will consider all the revs from stop as if they had no children """ if start is None and stop is None: if not len(self): return [nullid] return [self.node(r) for r in self.headrevs()] if start is None: start = nullid if stop is None: stop = [] stoprevs = set([self.rev(n) for n in stop]) startrev = self.rev(start) reachable = set((startrev,)) heads = set((startrev,)) parentrevs = self.parentrevs for r in self.revs(start=startrev + 1): for p in parentrevs(r): if p in reachable: if r not in stoprevs: reachable.add(r) heads.add(r) if p in heads and p not in stoprevs: heads.remove(p) return [self.node(r) for r in heads] def children(self, node): """find the children of a given node""" c = [] p = self.rev(node) for r in self.revs(start=p + 1): prevs = [pr for pr in self.parentrevs(r) if pr != nullrev] if prevs: for pr in prevs: if pr == p: c.append(self.node(r)) elif p == nullrev: c.append(self.node(r)) return c def descendant(self, start, end): if start == nullrev: return True for i in self.descendants([start]): if i == end: return True elif i > end: break return False def commonancestorsheads(self, a, b): """calculate all the heads of the common ancestors of nodes a and b""" a, b = self.rev(a), self.rev(b) try: ancs = self.index.commonancestorsheads(a, b) except (AttributeError, OverflowError): # C implementation failed ancs = ancestor.commonancestorsheads(self.parentrevs, a, b) return map(self.node, ancs) def isancestor(self, a, b): """return True if node a is an ancestor of node b The implementation of this is trivial but the use of commonancestorsheads is not.""" return a in self.commonancestorsheads(a, b) def ancestor(self, a, b): """calculate the "best" common ancestor of nodes a and b""" a, b = self.rev(a), self.rev(b) try: ancs = self.index.ancestors(a, b) except (AttributeError, OverflowError): ancs = ancestor.ancestors(self.parentrevs, a, b) if ancs: # choose a consistent winner when there's a tie return min(map(self.node, ancs)) return nullid def _match(self, id): if isinstance(id, int): # rev return self.node(id) if len(id) == 20: # possibly a binary node # odds of a binary node being all hex in ASCII are 1 in 10**25 try: node = id self.rev(node) # quick search the index return node except LookupError: pass # may be partial hex id try: # str(rev) rev = int(id) if str(rev) != id: raise ValueError if rev < 0: rev = len(self) + rev if rev < 0 or rev >= len(self): raise ValueError return self.node(rev) except (ValueError, OverflowError): pass if len(id) == 40: try: # a full hex nodeid? node = bin(id) self.rev(node) return node except (TypeError, LookupError): pass def _partialmatch(self, id): try: n = self.index.partialmatch(id) if n and self.hasnode(n): return n return None except RevlogError: # parsers.c radix tree lookup gave multiple matches # fall through to slow path that filters hidden revisions pass except (AttributeError, ValueError): # we are pure python, or key was too short to search radix tree pass if id in self._pcache: return self._pcache[id] if len(id) < 40: try: # hex(node)[:...] l = len(id) // 2 # grab an even number of digits prefix = bin(id[:l * 2]) nl = [e[7] for e in self.index if e[7].startswith(prefix)] nl = [n for n in nl if hex(n).startswith(id) and self.hasnode(n)] if len(nl) > 0: if len(nl) == 1: self._pcache[id] = nl[0] return nl[0] raise LookupError(id, self.indexfile, _('ambiguous identifier')) return None except TypeError: pass def lookup(self, id): """locate a node based on: - revision number or str(revision number) - nodeid or subset of hex nodeid """ n = self._match(id) if n is not None: return n n = self._partialmatch(id) if n: return n raise LookupError(id, self.indexfile, _('no match found')) def cmp(self, node, text): """compare text with a given file revision returns True if text is different than what is stored. """ p1, p2 = self.parents(node) return hash(text, p1, p2) != node def _addchunk(self, offset, data): """Add a segment to the revlog cache. Accepts an absolute offset and the data that is at that location. """ o, d = self._chunkcache # try to add to existing cache if o + len(d) == offset and len(d) + len(data) < _chunksize: self._chunkcache = o, d + data else: self._chunkcache = offset, data def _loadchunk(self, offset, length, df=None): """Load a segment of raw data from the revlog. Accepts an absolute offset, length to read, and an optional existing file handle to read from. If an existing file handle is passed, it will be seeked and the original seek position will NOT be restored. Returns a str or buffer of raw byte data. """ if df is not None: closehandle = False else: if self._inline: df = self.opener(self.indexfile) else: df = self.opener(self.datafile) closehandle = True # Cache data both forward and backward around the requested # data, in a fixed size window. This helps speed up operations # involving reading the revlog backwards. cachesize = self._chunkcachesize realoffset = offset & ~(cachesize - 1) reallength = (((offset + length + cachesize) & ~(cachesize - 1)) - realoffset) df.seek(realoffset) d = df.read(reallength) if closehandle: df.close() self._addchunk(realoffset, d) if offset != realoffset or reallength != length: return util.buffer(d, offset - realoffset, length) return d def _getchunk(self, offset, length, df=None): """Obtain a segment of raw data from the revlog. Accepts an absolute offset, length of bytes to obtain, and an optional file handle to the already-opened revlog. If the file handle is used, it's original seek position will not be preserved. Requests for data may be returned from a cache. Returns a str or a buffer instance of raw byte data. """ o, d = self._chunkcache l = len(d) # is it in the cache? cachestart = offset - o cacheend = cachestart + length if cachestart >= 0 and cacheend <= l: if cachestart == 0 and cacheend == l: return d # avoid a copy return util.buffer(d, cachestart, cacheend - cachestart) return self._loadchunk(offset, length, df=df) def _chunkraw(self, startrev, endrev, df=None): """Obtain a segment of raw data corresponding to a range of revisions. Accepts the start and end revisions and an optional already-open file handle to be used for reading. If the file handle is read, its seek position will not be preserved. Requests for data may be satisfied by a cache. Returns a 2-tuple of (offset, data) for the requested range of revisions. Offset is the integer offset from the beginning of the revlog and data is a str or buffer of the raw byte data. Callers will need to call ``self.start(rev)`` and ``self.length(rev)`` to determine where each revision's data begins and ends. """ start = self.start(startrev) end = self.end(endrev) if self._inline: start += (startrev + 1) * self._io.size end += (endrev + 1) * self._io.size length = end - start return start, self._getchunk(start, length, df=df) def _chunk(self, rev, df=None): """Obtain a single decompressed chunk for a revision. Accepts an integer revision and an optional already-open file handle to be used for reading. If used, the seek position of the file will not be preserved. Returns a str holding uncompressed data for the requested revision. """ return decompress(self._chunkraw(rev, rev, df=df)[1]) def _chunks(self, revs, df=None): """Obtain decompressed chunks for the specified revisions. Accepts an iterable of numeric revisions that are assumed to be in ascending order. Also accepts an optional already-open file handle to be used for reading. If used, the seek position of the file will not be preserved. This function is similar to calling ``self._chunk()`` multiple times, but is faster. Returns a list with decompressed data for each requested revision. """ if not revs: return [] start = self.start length = self.length inline = self._inline iosize = self._io.size buffer = util.buffer l = [] ladd = l.append try: offset, data = self._chunkraw(revs[0], revs[-1], df=df) except OverflowError: # issue4215 - we can't cache a run of chunks greater than # 2G on Windows return [self._chunk(rev, df=df) for rev in revs] for rev in revs: chunkstart = start(rev) if inline: chunkstart += (rev + 1) * iosize chunklength = length(rev) ladd(decompress(buffer(data, chunkstart - offset, chunklength))) return l def _chunkclear(self): """Clear the raw chunk cache.""" self._chunkcache = (0, '') def deltaparent(self, rev): """return deltaparent of the given revision""" base = self.index[rev][3] if base == rev: return nullrev elif self._generaldelta: return base else: return rev - 1 def revdiff(self, rev1, rev2): """return or calculate a delta between two revisions""" if rev1 != nullrev and self.deltaparent(rev2) == rev1: return str(self._chunk(rev2)) return mdiff.textdiff(self.revision(rev1), self.revision(rev2)) def revision(self, nodeorrev, _df=None): """return an uncompressed revision of a given node or revision number. _df is an existing file handle to read from. It is meant to only be used internally. """ if isinstance(nodeorrev, int): rev = nodeorrev node = self.node(rev) else: node = nodeorrev rev = None cachedrev = None if node == nullid: return "" if self._cache: if self._cache[0] == node: return self._cache[2] cachedrev = self._cache[1] # look up what we need to read text = None if rev is None: rev = self.rev(node) # check rev flags if self.flags(rev) & ~REVIDX_KNOWN_FLAGS: raise RevlogError(_('incompatible revision flag %x') % (self.flags(rev) & ~REVIDX_KNOWN_FLAGS)) chain, stopped = self._deltachain(rev, stoprev=cachedrev) if stopped: text = self._cache[2] # drop cache to save memory self._cache = None bins = self._chunks(chain, df=_df) if text is None: text = str(bins[0]) bins = bins[1:] text = mdiff.patches(text, bins) text = self._checkhash(text, node, rev) self._cache = (node, rev, text) return text def hash(self, text, p1, p2): """Compute a node hash. Available as a function so that subclasses can replace the hash as needed. """ return hash(text, p1, p2) def _checkhash(self, text, node, rev): p1, p2 = self.parents(node) self.checkhash(text, p1, p2, node, rev) return text def checkhash(self, text, p1, p2, node, rev=None): if node != self.hash(text, p1, p2): revornode = rev if revornode is None: revornode = templatefilters.short(hex(node)) raise RevlogError(_("integrity check failed on %s:%s") % (self.indexfile, revornode)) def checkinlinesize(self, tr, fp=None): """Check if the revlog is too big for inline and convert if so. This should be called after revisions are added to the revlog. If the revlog has grown too large to be an inline revlog, it will convert it to use multiple index and data files. """ if not self._inline or (self.start(-2) + self.length(-2)) < _maxinline: return trinfo = tr.find(self.indexfile) if trinfo is None: raise RevlogError(_("%s not found in the transaction") % self.indexfile) trindex = trinfo[2] if trindex is not None: dataoff = self.start(trindex) else: # revlog was stripped at start of transaction, use all leftover data trindex = len(self) - 1 dataoff = self.end(-2) tr.add(self.datafile, dataoff) if fp: fp.flush() fp.close() df = self.opener(self.datafile, 'w') try: for r in self: df.write(self._chunkraw(r, r)[1]) finally: df.close() fp = self.opener(self.indexfile, 'w', atomictemp=True) self.version &= ~(REVLOGNGINLINEDATA) self._inline = False for i in self: e = self._io.packentry(self.index[i], self.node, self.version, i) fp.write(e) # if we don't call close, the temp file will never replace the # real index fp.close() tr.replace(self.indexfile, trindex * self._io.size) self._chunkclear() def addrevision(self, text, transaction, link, p1, p2, cachedelta=None, node=None): """add a revision to the log text - the revision data to add transaction - the transaction object used for rollback link - the linkrev data to add p1, p2 - the parent nodeids of the revision cachedelta - an optional precomputed delta node - nodeid of revision; typically node is not specified, and it is computed by default as hash(text, p1, p2), however subclasses might use different hashing method (and override checkhash() in such case) """ if link == nullrev: raise RevlogError(_("attempted to add linkrev -1 to %s") % self.indexfile) if len(text) > _maxentrysize: raise RevlogError( _("%s: size of %d bytes exceeds maximum revlog storage of 2GiB") % (self.indexfile, len(text))) node = node or self.hash(text, p1, p2) if node in self.nodemap: return node dfh = None if not self._inline: dfh = self.opener(self.datafile, "a+") ifh = self.opener(self.indexfile, "a+") try: return self._addrevision(node, text, transaction, link, p1, p2, REVIDX_DEFAULT_FLAGS, cachedelta, ifh, dfh) finally: if dfh: dfh.close() ifh.close() def compress(self, text): """ generate a possibly-compressed representation of text """ if not text: return ("", text) l = len(text) bin = None if l < 44: pass elif l > 1000000: # zlib makes an internal copy, thus doubling memory usage for # large files, so lets do this in pieces z = zlib.compressobj() p = [] pos = 0 while pos < l: pos2 = pos + 2**20 p.append(z.compress(text[pos:pos2])) pos = pos2 p.append(z.flush()) if sum(map(len, p)) < l: bin = "".join(p) else: bin = _compress(text) if bin is None or len(bin) > l: if text[0] == '\0': return ("", text) return ('u', text) return ("", bin) def _isgooddelta(self, d, textlen): """Returns True if the given delta is good. Good means that it is within the disk span, disk size, and chain length bounds that we know to be performant.""" if d is None: return False # - 'dist' is the distance from the base revision -- bounding it limits # the amount of I/O we need to do. # - 'compresseddeltalen' is the sum of the total size of deltas we need # to apply -- bounding it limits the amount of CPU we consume. dist, l, data, base, chainbase, chainlen, compresseddeltalen = d if (dist > textlen * 4 or l > textlen or compresseddeltalen > textlen * 2 or (self._maxchainlen and chainlen > self._maxchainlen)): return False return True def _addrevision(self, node, text, transaction, link, p1, p2, flags, cachedelta, ifh, dfh, alwayscache=False): """internal function to add revisions to the log see addrevision for argument descriptions. invariants: - text is optional (can be None); if not set, cachedelta must be set. if both are set, they must correspond to each other. """ btext = [text] def buildtext(): if btext[0] is not None: return btext[0] baserev = cachedelta[0] delta = cachedelta[1] # special case deltas which replace entire base; no need to decode # base revision. this neatly avoids censored bases, which throw when # they're decoded. hlen = struct.calcsize(">lll") if delta[:hlen] == mdiff.replacediffheader(self.rawsize(baserev), len(delta) - hlen): btext[0] = delta[hlen:] else: if self._inline: fh = ifh else: fh = dfh basetext = self.revision(self.node(baserev), _df=fh) btext[0] = mdiff.patch(basetext, delta) try: self.checkhash(btext[0], p1, p2, node) if flags & REVIDX_ISCENSORED: raise RevlogError(_('node %s is not censored') % node) except CensoredNodeError: # must pass the censored index flag to add censored revisions if not flags & REVIDX_ISCENSORED: raise return btext[0] def builddelta(rev): # can we use the cached delta? if cachedelta and cachedelta[0] == rev: delta = cachedelta[1] else: t = buildtext() if self.iscensored(rev): # deltas based on a censored revision must replace the # full content in one patch, so delta works everywhere header = mdiff.replacediffheader(self.rawsize(rev), len(t)) delta = header + t else: if self._inline: fh = ifh else: fh = dfh ptext = self.revision(self.node(rev), _df=fh) delta = mdiff.textdiff(ptext, t) data = self.compress(delta) l = len(data[1]) + len(data[0]) if basecache[0] == rev: chainbase = basecache[1] else: chainbase = self.chainbase(rev) dist = l + offset - self.start(chainbase) if self._generaldelta: base = rev else: base = chainbase chainlen, compresseddeltalen = self._chaininfo(rev) chainlen += 1 compresseddeltalen += l return dist, l, data, base, chainbase, chainlen, compresseddeltalen curr = len(self) prev = curr - 1 base = chainbase = curr offset = self.end(prev) delta = None if self._basecache is None: self._basecache = (prev, self.chainbase(prev)) basecache = self._basecache p1r, p2r = self.rev(p1), self.rev(p2) # full versions are inserted when the needed deltas # become comparable to the uncompressed text if text is None: textlen = mdiff.patchedsize(self.rawsize(cachedelta[0]), cachedelta[1]) else: textlen = len(text) # should we try to build a delta? if prev != nullrev: tested = set() if cachedelta and self._generaldelta and self._lazydeltabase: # Assume what we received from the server is a good choice # build delta will reuse the cache candidatedelta = builddelta(cachedelta[0]) tested.add(cachedelta[0]) if self._isgooddelta(candidatedelta, textlen): delta = candidatedelta if delta is None and self._generaldelta: # exclude already lazy tested base if any parents = [p for p in (p1r, p2r) if p != nullrev and p not in tested] if parents and not self._aggressivemergedeltas: # Pick whichever parent is closer to us (to minimize the # chance of having to build a fulltext). parents = [max(parents)] tested.update(parents) pdeltas = [] for p in parents: pd = builddelta(p) if self._isgooddelta(pd, textlen): pdeltas.append(pd) if pdeltas: delta = min(pdeltas, key=lambda x: x[1]) if delta is None and prev not in tested: # other approach failed try against prev to hopefully save us a # fulltext. candidatedelta = builddelta(prev) if self._isgooddelta(candidatedelta, textlen): delta = candidatedelta if delta is not None: dist, l, data, base, chainbase, chainlen, compresseddeltalen = delta else: text = buildtext() data = self.compress(text) l = len(data[1]) + len(data[0]) base = chainbase = curr e = (offset_type(offset, flags), l, textlen, base, link, p1r, p2r, node) self.index.insert(-1, e) self.nodemap[node] = curr entry = self._io.packentry(e, self.node, self.version, curr) self._writeentry(transaction, ifh, dfh, entry, data, link, offset) if alwayscache and text is None: text = buildtext() if type(text) == str: # only accept immutable objects self._cache = (node, curr, text) self._basecache = (curr, chainbase) return node def _writeentry(self, transaction, ifh, dfh, entry, data, link, offset): # Files opened in a+ mode have inconsistent behavior on various # platforms. Windows requires that a file positioning call be made # when the file handle transitions between reads and writes. See # 3686fa2b8eee and the mixedfilemodewrapper in windows.py. On other # platforms, Python or the platform itself can be buggy. Some versions # of Solaris have been observed to not append at the end of the file # if the file was seeked to before the end. See issue4943 for more. # # We work around this issue by inserting a seek() before writing. # Note: This is likely not necessary on Python 3. ifh.seek(0, os.SEEK_END) if dfh: dfh.seek(0, os.SEEK_END) curr = len(self) - 1 if not self._inline: transaction.add(self.datafile, offset) transaction.add(self.indexfile, curr * len(entry)) if data[0]: dfh.write(data[0]) dfh.write(data[1]) ifh.write(entry) else: offset += curr * self._io.size transaction.add(self.indexfile, offset, curr) ifh.write(entry) ifh.write(data[0]) ifh.write(data[1]) self.checkinlinesize(transaction, ifh) def addgroup(self, cg, linkmapper, transaction, addrevisioncb=None): """ add a delta group given a set of deltas, add them to the revision log. the first delta is against its parent, which should be in our log, the rest are against the previous delta. If ``addrevisioncb`` is defined, it will be called with arguments of this revlog and the node that was added. """ # track the base of the current delta log content = [] node = None r = len(self) end = 0 if r: end = self.end(r - 1) ifh = self.opener(self.indexfile, "a+") isize = r * self._io.size if self._inline: transaction.add(self.indexfile, end + isize, r) dfh = None else: transaction.add(self.indexfile, isize, r) transaction.add(self.datafile, end) dfh = self.opener(self.datafile, "a+") def flush(): if dfh: dfh.flush() ifh.flush() try: # loop through our set of deltas chain = None while True: chunkdata = cg.deltachunk(chain) if not chunkdata: break node = chunkdata['node'] p1 = chunkdata['p1'] p2 = chunkdata['p2'] cs = chunkdata['cs'] deltabase = chunkdata['deltabase'] delta = chunkdata['delta'] flags = chunkdata['flags'] or REVIDX_DEFAULT_FLAGS content.append(node) link = linkmapper(cs) if node in self.nodemap: # this can happen if two branches make the same change chain = node continue for p in (p1, p2): if p not in self.nodemap: raise LookupError(p, self.indexfile, _('unknown parent')) if deltabase not in self.nodemap: raise LookupError(deltabase, self.indexfile, _('unknown delta base')) baserev = self.rev(deltabase) if baserev != nullrev and self.iscensored(baserev): # if base is censored, delta must be full replacement in a # single patch operation hlen = struct.calcsize(">lll") oldlen = self.rawsize(baserev) newlen = len(delta) - hlen if delta[:hlen] != mdiff.replacediffheader(oldlen, newlen): raise error.CensoredBaseError(self.indexfile, self.node(baserev)) if not flags and self._peek_iscensored(baserev, delta, flush): flags |= REVIDX_ISCENSORED # We assume consumers of addrevisioncb will want to retrieve # the added revision, which will require a call to # revision(). revision() will fast path if there is a cache # hit. So, we tell _addrevision() to always cache in this case. chain = self._addrevision(node, None, transaction, link, p1, p2, flags, (baserev, delta), ifh, dfh, alwayscache=bool(addrevisioncb)) if addrevisioncb: addrevisioncb(self, chain) if not dfh and not self._inline: # addrevision switched from inline to conventional # reopen the index ifh.close() dfh = self.opener(self.datafile, "a+") ifh = self.opener(self.indexfile, "a+") finally: if dfh: dfh.close() ifh.close() return content def iscensored(self, rev): """Check if a file revision is censored.""" return False def _peek_iscensored(self, baserev, delta, flush): """Quickly check if a delta produces a censored revision.""" return False def getstrippoint(self, minlink): """find the minimum rev that must be stripped to strip the linkrev Returns a tuple containing the minimum rev and a set of all revs that have linkrevs that will be broken by this strip. """ brokenrevs = set() strippoint = len(self) heads = {} futurelargelinkrevs = set() for head in self.headrevs(): headlinkrev = self.linkrev(head) heads[head] = headlinkrev if headlinkrev >= minlink: futurelargelinkrevs.add(headlinkrev) # This algorithm involves walking down the rev graph, starting at the # heads. Since the revs are topologically sorted according to linkrev, # once all head linkrevs are below the minlink, we know there are # no more revs that could have a linkrev greater than minlink. # So we can stop walking. while futurelargelinkrevs: strippoint -= 1 linkrev = heads.pop(strippoint) if linkrev < minlink: brokenrevs.add(strippoint) else: futurelargelinkrevs.remove(linkrev) for p in self.parentrevs(strippoint): if p != nullrev: plinkrev = self.linkrev(p) heads[p] = plinkrev if plinkrev >= minlink: futurelargelinkrevs.add(plinkrev) return strippoint, brokenrevs def strip(self, minlink, transaction): """truncate the revlog on the first revision with a linkrev >= minlink This function is called when we're stripping revision minlink and its descendants from the repository. We have to remove all revisions with linkrev >= minlink, because the equivalent changelog revisions will be renumbered after the strip. So we truncate the revlog on the first of these revisions, and trust that the caller has saved the revisions that shouldn't be removed and that it'll re-add them after this truncation. """ if len(self) == 0: return rev, _ = self.getstrippoint(minlink) if rev == len(self): return # first truncate the files on disk end = self.start(rev) if not self._inline: transaction.add(self.datafile, end) end = rev * self._io.size else: end += rev * self._io.size transaction.add(self.indexfile, end) # then reset internal state in memory to forget those revisions self._cache = None self._chaininfocache = {} self._chunkclear() for x in xrange(rev, len(self)): del self.nodemap[self.node(x)] del self.index[rev:-1] def checksize(self): expected = 0 if len(self): expected = max(0, self.end(len(self) - 1)) try: f = self.opener(self.datafile) f.seek(0, 2) actual = f.tell() f.close() dd = actual - expected except IOError as inst: if inst.errno != errno.ENOENT: raise dd = 0 try: f = self.opener(self.indexfile) f.seek(0, 2) actual = f.tell() f.close() s = self._io.size i = max(0, actual // s) di = actual - (i * s) if self._inline: databytes = 0 for r in self: databytes += max(0, self.length(r)) dd = 0 di = actual - len(self) * s - databytes except IOError as inst: if inst.errno != errno.ENOENT: raise di = 0 return (dd, di) def files(self): res = [self.indexfile] if not self._inline: res.append(self.datafile) return res mercurial-3.7.3/mercurial/setdiscovery.py0000644000175000017500000002146412676531525020216 0ustar mpmmpm00000000000000# setdiscovery.py - improved discovery of common nodeset for mercurial # # Copyright 2010 Benoit Boissinot # and Peter Arrenbrecht # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """ Algorithm works in the following way. You have two repository: local and remote. They both contains a DAG of changelists. The goal of the discovery protocol is to find one set of node *common*, the set of nodes shared by local and remote. One of the issue with the original protocol was latency, it could potentially require lots of roundtrips to discover that the local repo was a subset of remote (which is a very common case, you usually have few changes compared to upstream, while upstream probably had lots of development). The new protocol only requires one interface for the remote repo: `known()`, which given a set of changelists tells you if they are present in the DAG. The algorithm then works as follow: - We will be using three sets, `common`, `missing`, `unknown`. Originally all nodes are in `unknown`. - Take a sample from `unknown`, call `remote.known(sample)` - For each node that remote knows, move it and all its ancestors to `common` - For each node that remote doesn't know, move it and all its descendants to `missing` - Iterate until `unknown` is empty There are a couple optimizations, first is instead of starting with a random sample of missing, start by sending all heads, in the case where the local repo is a subset, you computed the answer in one round trip. Then you can do something similar to the bisecting strategy used when finding faulty changesets. Instead of random samples, you can try picking nodes that will maximize the number of nodes that will be classified with it (since all ancestors or descendants will be marked as well). """ from __future__ import absolute_import import collections import random from .i18n import _ from .node import ( nullid, nullrev, ) from . import ( dagutil, error, ) def _updatesample(dag, nodes, sample, quicksamplesize=0): """update an existing sample to match the expected size The sample is updated with nodes exponentially distant from each head of the set. (H~1, H~2, H~4, H~8, etc). If a target size is specified, the sampling will stop once this size is reached. Otherwise sampling will happen until roots of the set are reached. :dag: a dag object from dagutil :nodes: set of nodes we want to discover (if None, assume the whole dag) :sample: a sample to update :quicksamplesize: optional target size of the sample""" # if nodes is empty we scan the entire graph if nodes: heads = dag.headsetofconnecteds(nodes) else: heads = dag.heads() dist = {} visit = collections.deque(heads) seen = set() factor = 1 while visit: curr = visit.popleft() if curr in seen: continue d = dist.setdefault(curr, 1) if d > factor: factor *= 2 if d == factor: sample.add(curr) if quicksamplesize and (len(sample) >= quicksamplesize): return seen.add(curr) for p in dag.parents(curr): if not nodes or p in nodes: dist.setdefault(p, d + 1) visit.append(p) def _takequicksample(dag, nodes, size): """takes a quick sample of size It is meant for initial sampling and focuses on querying heads and close ancestors of heads. :dag: a dag object :nodes: set of nodes to discover :size: the maximum size of the sample""" sample = dag.headsetofconnecteds(nodes) if size <= len(sample): return _limitsample(sample, size) _updatesample(dag, None, sample, quicksamplesize=size) return sample def _takefullsample(dag, nodes, size): sample = dag.headsetofconnecteds(nodes) # update from heads _updatesample(dag, nodes, sample) # update from roots _updatesample(dag.inverse(), nodes, sample) assert sample sample = _limitsample(sample, size) if len(sample) < size: more = size - len(sample) sample.update(random.sample(list(nodes - sample), more)) return sample def _limitsample(sample, desiredlen): """return a random subset of sample of at most desiredlen item""" if len(sample) > desiredlen: sample = set(random.sample(sample, desiredlen)) return sample def findcommonheads(ui, local, remote, initialsamplesize=100, fullsamplesize=200, abortwhenunrelated=True): '''Return a tuple (common, anyincoming, remoteheads) used to identify missing nodes from or in remote. ''' roundtrips = 0 cl = local.changelog dag = dagutil.revlogdag(cl) # early exit if we know all the specified remote heads already ui.debug("query 1; heads\n") roundtrips += 1 ownheads = dag.heads() sample = _limitsample(ownheads, initialsamplesize) # indices between sample and externalized version must match sample = list(sample) batch = remote.batch() srvheadhashesref = batch.heads() yesnoref = batch.known(dag.externalizeall(sample)) batch.submit() srvheadhashes = srvheadhashesref.value yesno = yesnoref.value if cl.tip() == nullid: if srvheadhashes != [nullid]: return [nullid], True, srvheadhashes return [nullid], False, [] # start actual discovery (we note this before the next "if" for # compatibility reasons) ui.status(_("searching for changes\n")) srvheads = dag.internalizeall(srvheadhashes, filterunknown=True) if len(srvheads) == len(srvheadhashes): ui.debug("all remote heads known locally\n") return (srvheadhashes, False, srvheadhashes,) if sample and len(ownheads) <= initialsamplesize and all(yesno): ui.note(_("all local heads known remotely\n")) ownheadhashes = dag.externalizeall(ownheads) return (ownheadhashes, True, srvheadhashes,) # full blown discovery # own nodes I know we both know # treat remote heads (and maybe own heads) as a first implicit sample # response common = cl.incrementalmissingrevs(srvheads) commoninsample = set(n for i, n in enumerate(sample) if yesno[i]) common.addbases(commoninsample) # own nodes where I don't know if remote knows them undecided = set(common.missingancestors(ownheads)) # own nodes I know remote lacks missing = set() full = False while undecided: if sample: missinginsample = [n for i, n in enumerate(sample) if not yesno[i]] missing.update(dag.descendantset(missinginsample, missing)) undecided.difference_update(missing) if not undecided: break if full or common.hasbases(): if full: ui.note(_("sampling from both directions\n")) else: ui.debug("taking initial sample\n") samplefunc = _takefullsample targetsize = fullsamplesize else: # use even cheaper initial sample ui.debug("taking quick initial sample\n") samplefunc = _takequicksample targetsize = initialsamplesize if len(undecided) < targetsize: sample = list(undecided) else: sample = samplefunc(dag, undecided, targetsize) sample = _limitsample(sample, targetsize) roundtrips += 1 ui.progress(_('searching'), roundtrips, unit=_('queries')) ui.debug("query %i; still undecided: %i, sample size is: %i\n" % (roundtrips, len(undecided), len(sample))) # indices between sample and externalized version must match sample = list(sample) yesno = remote.known(dag.externalizeall(sample)) full = True if sample: commoninsample = set(n for i, n in enumerate(sample) if yesno[i]) common.addbases(commoninsample) common.removeancestorsfrom(undecided) # heads(common) == heads(common.bases) since common represents common.bases # and all its ancestors result = dag.headsetofconnecteds(common.bases) # common.bases can include nullrev, but our contract requires us to not # return any heads in that case, so discard that result.discard(nullrev) ui.progress(_('searching'), None) ui.debug("%d total queries\n" % roundtrips) if not result and srvheadhashes != [nullid]: if abortwhenunrelated: raise error.Abort(_("repository is unrelated")) else: ui.warn(_("warning: repository is unrelated\n")) return (set([nullid]), True, srvheadhashes,) anyincoming = (srvheadhashes != [nullid]) return dag.externalizeall(result), anyincoming, srvheadhashes mercurial-3.7.3/mercurial/destutil.py0000644000175000017500000001763512676531524017334 0ustar mpmmpm00000000000000# destutil.py - Mercurial utility function for command destination # # Copyright Matt Mackall and other # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import from .i18n import _ from . import ( bookmarks, error, obsolete, ) def _destupdatevalidate(repo, rev, clean, check): """validate that the destination comply to various rules This exists as its own function to help wrapping from extensions.""" wc = repo[None] p1 = wc.p1() if not clean: # Check that the update is linear. # # Mercurial do not allow update-merge for non linear pattern # (that would be technically possible but was considered too confusing # for user a long time ago) # # See mercurial.merge.update for details if p1.rev() not in repo.changelog.ancestors([rev], inclusive=True): dirty = wc.dirty(missing=True) foreground = obsolete.foreground(repo, [p1.node()]) if not repo[rev].node() in foreground: if dirty: msg = _("uncommitted changes") hint = _("commit and merge, or update --clean to" " discard changes") raise error.UpdateAbort(msg, hint=hint) elif not check: # destination is not a descendant. msg = _("not a linear update") hint = _("merge or update --check to force update") raise error.UpdateAbort(msg, hint=hint) def _destupdateobs(repo, clean, check): """decide of an update destination from obsolescence markers""" node = None wc = repo[None] p1 = wc.p1() movemark = None if p1.obsolete() and not p1.children(): # allow updating to successors successors = obsolete.successorssets(repo, p1.node()) # behavior of certain cases is as follows, # # divergent changesets: update to highest rev, similar to what # is currently done when there are more than one head # (i.e. 'tip') # # replaced changesets: same as divergent except we know there # is no conflict # # pruned changeset: no update is done; though, we could # consider updating to the first non-obsolete parent, # similar to what is current done for 'hg prune' if successors: # flatten the list here handles both divergent (len > 1) # and the usual case (len = 1) successors = [n for sub in successors for n in sub] # get the max revision for the given successors set, # i.e. the 'tip' of a set node = repo.revs('max(%ln)', successors).first() if bookmarks.isactivewdirparent(repo): movemark = repo['.'].node() return node, movemark, None def _destupdatebook(repo, clean, check): """decide on an update destination from active bookmark""" # we also move the active bookmark, if any activemark = None node, movemark = bookmarks.calculateupdate(repo.ui, repo, None) if node is not None: activemark = node return node, movemark, activemark def _destupdatebranch(repo, clean, check): """decide on an update destination from current branch""" wc = repo[None] movemark = node = None try: node = repo.branchtip(wc.branch()) if bookmarks.isactivewdirparent(repo): movemark = repo['.'].node() except error.RepoLookupError: if wc.branch() == 'default': # no default branch! node = repo.lookup('tip') # update to tip else: raise error.Abort(_("branch %s not found") % wc.branch()) return node, movemark, None # order in which each step should be evalutated # steps are run until one finds a destination destupdatesteps = ['evolution', 'bookmark', 'branch'] # mapping to ease extension overriding steps. destupdatestepmap = {'evolution': _destupdateobs, 'bookmark': _destupdatebook, 'branch': _destupdatebranch, } def destupdate(repo, clean=False, check=False): """destination for bare update operation return (rev, movemark, activemark) - rev: the revision to update to, - movemark: node to move the active bookmark from (cf bookmark.calculate update), - activemark: a bookmark to activate at the end of the update. """ node = movemark = activemark = None for step in destupdatesteps: node, movemark, activemark = destupdatestepmap[step](repo, clean, check) if node is not None: break rev = repo[node].rev() _destupdatevalidate(repo, rev, clean, check) return rev, movemark, activemark def _destmergebook(repo): """find merge destination in the active bookmark case""" node = None bmheads = repo.bookmarkheads(repo._activebookmark) curhead = repo[repo._activebookmark].node() if len(bmheads) == 2: if curhead == bmheads[0]: node = bmheads[1] else: node = bmheads[0] elif len(bmheads) > 2: raise error.Abort(_("multiple matching bookmarks to merge - " "please merge with an explicit rev or bookmark"), hint=_("run 'hg heads' to see all heads")) elif len(bmheads) <= 1: raise error.Abort(_("no matching bookmark to merge - " "please merge with an explicit rev or bookmark"), hint=_("run 'hg heads' to see all heads")) assert node is not None return node def _destmergebranch(repo): """find merge destination based on branch heads""" node = None branch = repo[None].branch() bheads = repo.branchheads(branch) nbhs = [bh for bh in bheads if not repo[bh].bookmarks()] if len(nbhs) > 2: raise error.Abort(_("branch '%s' has %d heads - " "please merge with an explicit rev") % (branch, len(bheads)), hint=_("run 'hg heads .' to see heads")) parent = repo.dirstate.p1() if len(nbhs) <= 1: if len(bheads) > 1: raise error.Abort(_("heads are bookmarked - " "please merge with an explicit rev"), hint=_("run 'hg heads' to see all heads")) if len(repo.heads()) > 1: raise error.Abort(_("branch '%s' has one head - " "please merge with an explicit rev") % branch, hint=_("run 'hg heads' to see all heads")) msg, hint = _('nothing to merge'), None if parent != repo.lookup(branch): hint = _("use 'hg update' instead") raise error.Abort(msg, hint=hint) if parent not in bheads: raise error.Abort(_('working directory not at a head revision'), hint=_("use 'hg update' or merge with an " "explicit revision")) if parent == nbhs[0]: node = nbhs[-1] else: node = nbhs[0] assert node is not None return node def destmerge(repo): if repo._activebookmark: node = _destmergebook(repo) else: node = _destmergebranch(repo) return repo[node].rev() histeditdefaultrevset = 'reverse(only(.) and not public() and not ::merge())' def desthistedit(ui, repo): """Default base revision to edit for `hg histedit`.""" # Avoid cycle: scmutil -> revset -> destutil from . import scmutil default = ui.config('histedit', 'defaultrev', histeditdefaultrevset) if default: revs = scmutil.revrange(repo, [default]) if revs: # The revset supplied by the user may not be in ascending order nor # take the first revision. So do this manually. revs.sort() return revs.first() return None mercurial-3.7.3/mercurial/statichttprepo.py0000644000175000017500000001241212676531525020541 0ustar mpmmpm00000000000000# statichttprepo.py - simple http repository class for mercurial # # This provides read-only repo access to repositories exported via static http # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import os import urllib import urllib2 from .i18n import _ from . import ( byterange, changelog, error, localrepo, manifest, namespaces, scmutil, store, url, util, ) class httprangereader(object): def __init__(self, url, opener): # we assume opener has HTTPRangeHandler self.url = url self.pos = 0 self.opener = opener self.name = url def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.close() def seek(self, pos): self.pos = pos def read(self, bytes=None): req = urllib2.Request(self.url) end = '' if bytes: end = self.pos + bytes - 1 if self.pos or end: req.add_header('Range', 'bytes=%d-%s' % (self.pos, end)) try: f = self.opener.open(req) data = f.read() code = f.code except urllib2.HTTPError as inst: num = inst.code == 404 and errno.ENOENT or None raise IOError(num, inst) except urllib2.URLError as inst: raise IOError(None, inst.reason[1]) if code == 200: # HTTPRangeHandler does nothing if remote does not support # Range headers and returns the full entity. Let's slice it. if bytes: data = data[self.pos:self.pos + bytes] else: data = data[self.pos:] elif bytes: data = data[:bytes] self.pos += len(data) return data def readlines(self): return self.read().splitlines(True) def __iter__(self): return iter(self.readlines()) def close(self): pass def build_opener(ui, authinfo): # urllib cannot handle URLs with embedded user or passwd urlopener = url.opener(ui, authinfo) urlopener.add_handler(byterange.HTTPRangeHandler()) class statichttpvfs(scmutil.abstractvfs): def __init__(self, base): self.base = base def __call__(self, path, mode='r', *args, **kw): if mode not in ('r', 'rb'): raise IOError('Permission denied') f = "/".join((self.base, urllib.quote(path))) return httprangereader(f, urlopener) def join(self, path): if path: return os.path.join(self.base, path) else: return self.base return statichttpvfs class statichttppeer(localrepo.localpeer): def local(self): return None def canpush(self): return False class statichttprepository(localrepo.localrepository): supported = localrepo.localrepository._basesupported def __init__(self, ui, path): self._url = path self.ui = ui self.root = path u = util.url(path.rstrip('/') + "/.hg") self.path, authinfo = u.authinfo() opener = build_opener(ui, authinfo) self.opener = opener(self.path) self.vfs = self.opener self._phasedefaults = [] self.names = namespaces.namespaces() try: requirements = scmutil.readrequires(self.vfs, self.supported) except IOError as inst: if inst.errno != errno.ENOENT: raise requirements = set() # check if it is a non-empty old-style repository try: fp = self.vfs("00changelog.i") fp.read(1) fp.close() except IOError as inst: if inst.errno != errno.ENOENT: raise # we do not care about empty old-style repositories here msg = _("'%s' does not appear to be an hg repository") % path raise error.RepoError(msg) # setup store self.store = store.store(requirements, self.path, opener) self.spath = self.store.path self.svfs = self.store.opener self.sjoin = self.store.join self._filecache = {} self.requirements = requirements self.manifest = manifest.manifest(self.svfs) self.changelog = changelog.changelog(self.svfs) self._tags = None self.nodetagscache = None self._branchcaches = {} self._revbranchcache = None self.encodepats = None self.decodepats = None self._transref = None def _restrictcapabilities(self, caps): caps = super(statichttprepository, self)._restrictcapabilities(caps) return caps.difference(["pushkey"]) def url(self): return self._url def local(self): return False def peer(self): return statichttppeer(self) def lock(self, wait=True): raise error.Abort(_('cannot lock static-http repository')) def instance(ui, path, create): if create: raise error.Abort(_('cannot create new static-http repository')) return statichttprepository(ui, path[7:]) mercurial-3.7.3/mercurial/formatter.py0000644000175000017500000001541012676531524017467 0ustar mpmmpm00000000000000# formatter.py - generic output formatting for mercurial # # Copyright 2012 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import cPickle import os from .i18n import _ from .node import ( hex, short, ) from . import ( encoding, error, templater, ) class baseformatter(object): def __init__(self, ui, topic, opts): self._ui = ui self._topic = topic self._style = opts.get("style") self._template = opts.get("template") self._item = None # function to convert node to string suitable for this output self.hexfunc = hex def __nonzero__(self): '''return False if we're not doing real templating so we can skip extra work''' return True def _showitem(self): '''show a formatted item once all data is collected''' pass def startitem(self): '''begin an item in the format list''' if self._item is not None: self._showitem() self._item = {} def data(self, **data): '''insert data into item that's not shown in default output''' self._item.update(data) def write(self, fields, deftext, *fielddata, **opts): '''do default text output while assigning data to item''' fieldkeys = fields.split() assert len(fieldkeys) == len(fielddata) self._item.update(zip(fieldkeys, fielddata)) def condwrite(self, cond, fields, deftext, *fielddata, **opts): '''do conditional write (primarily for plain formatter)''' fieldkeys = fields.split() assert len(fieldkeys) == len(fielddata) self._item.update(zip(fieldkeys, fielddata)) def plain(self, text, **opts): '''show raw text for non-templated mode''' pass def end(self): '''end output for the formatter''' if self._item is not None: self._showitem() class plainformatter(baseformatter): '''the default text output scheme''' def __init__(self, ui, topic, opts): baseformatter.__init__(self, ui, topic, opts) if ui.debugflag: self.hexfunc = hex else: self.hexfunc = short def __nonzero__(self): return False def startitem(self): pass def data(self, **data): pass def write(self, fields, deftext, *fielddata, **opts): self._ui.write(deftext % fielddata, **opts) def condwrite(self, cond, fields, deftext, *fielddata, **opts): '''do conditional write''' if cond: self._ui.write(deftext % fielddata, **opts) def plain(self, text, **opts): self._ui.write(text, **opts) def end(self): pass class debugformatter(baseformatter): def __init__(self, ui, topic, opts): baseformatter.__init__(self, ui, topic, opts) self._ui.write("%s = [\n" % self._topic) def _showitem(self): self._ui.write(" " + repr(self._item) + ",\n") def end(self): baseformatter.end(self) self._ui.write("]\n") class pickleformatter(baseformatter): def __init__(self, ui, topic, opts): baseformatter.__init__(self, ui, topic, opts) self._data = [] def _showitem(self): self._data.append(self._item) def end(self): baseformatter.end(self) self._ui.write(cPickle.dumps(self._data)) def _jsonifyobj(v): if isinstance(v, tuple): return '[' + ', '.join(_jsonifyobj(e) for e in v) + ']' elif v is None: return 'null' elif v is True: return 'true' elif v is False: return 'false' elif isinstance(v, (int, float)): return str(v) else: return '"%s"' % encoding.jsonescape(v) class jsonformatter(baseformatter): def __init__(self, ui, topic, opts): baseformatter.__init__(self, ui, topic, opts) self._ui.write("[") self._ui._first = True def _showitem(self): if self._ui._first: self._ui._first = False else: self._ui.write(",") self._ui.write("\n {\n") first = True for k, v in sorted(self._item.items()): if first: first = False else: self._ui.write(",\n") self._ui.write(' "%s": %s' % (k, _jsonifyobj(v))) self._ui.write("\n }") def end(self): baseformatter.end(self) self._ui.write("\n]\n") class templateformatter(baseformatter): def __init__(self, ui, topic, opts): baseformatter.__init__(self, ui, topic, opts) self._topic = topic self._t = gettemplater(ui, topic, opts.get('template', '')) def _showitem(self): g = self._t(self._topic, **self._item) self._ui.write(templater.stringify(g)) def lookuptemplate(ui, topic, tmpl): # looks like a literal template? if '{' in tmpl: return tmpl, None # perhaps a stock style? if not os.path.split(tmpl)[0]: mapname = (templater.templatepath('map-cmdline.' + tmpl) or templater.templatepath(tmpl)) if mapname and os.path.isfile(mapname): return None, mapname # perhaps it's a reference to [templates] t = ui.config('templates', tmpl) if t: try: tmpl = templater.unquotestring(t) except SyntaxError: tmpl = t return tmpl, None if tmpl == 'list': ui.write(_("available styles: %s\n") % templater.stylelist()) raise error.Abort(_("specify a template")) # perhaps it's a path to a map or a template if ('/' in tmpl or '\\' in tmpl) and os.path.isfile(tmpl): # is it a mapfile for a style? if os.path.basename(tmpl).startswith("map-"): return None, os.path.realpath(tmpl) tmpl = open(tmpl).read() return tmpl, None # constant string? return tmpl, None def gettemplater(ui, topic, spec): tmpl, mapfile = lookuptemplate(ui, topic, spec) t = templater.templater(mapfile, {}) if tmpl: t.cache[topic] = tmpl return t def formatter(ui, topic, opts): template = opts.get("template", "") if template == "json": return jsonformatter(ui, topic, opts) elif template == "pickle": return pickleformatter(ui, topic, opts) elif template == "debug": return debugformatter(ui, topic, opts) elif template != "": return templateformatter(ui, topic, opts) # developer config: ui.formatdebug elif ui.configbool('ui', 'formatdebug'): return debugformatter(ui, topic, opts) # deprecated config: ui.formatjson elif ui.configbool('ui', 'formatjson'): return jsonformatter(ui, topic, opts) return plainformatter(ui, topic, opts) mercurial-3.7.3/mercurial/minirst.py0000644000175000017500000007052112676531525017156 0ustar mpmmpm00000000000000# minirst.py - minimal reStructuredText parser # # Copyright 2009, 2010 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """simplified reStructuredText parser. This parser knows just enough about reStructuredText to parse the Mercurial docstrings. It cheats in a major way: nested blocks are not really nested. They are just indented blocks that look like they are nested. This relies on the user to keep the right indentation for the blocks. Remember to update https://mercurial-scm.org/wiki/HelpStyleGuide when adding support for new constructs. """ from __future__ import absolute_import import cgi import re from .i18n import _ from . import ( encoding, util, ) def section(s): return "%s\n%s\n\n" % (s, "\"" * encoding.colwidth(s)) def subsection(s): return "%s\n%s\n\n" % (s, '=' * encoding.colwidth(s)) def subsubsection(s): return "%s\n%s\n\n" % (s, "-" * encoding.colwidth(s)) def subsubsubsection(s): return "%s\n%s\n\n" % (s, "." * encoding.colwidth(s)) def replace(text, substs): ''' Apply a list of (find, replace) pairs to a text. >>> replace("foo bar", [('f', 'F'), ('b', 'B')]) 'Foo Bar' >>> encoding.encoding = 'latin1' >>> replace('\\x81\\\\', [('\\\\', '/')]) '\\x81/' >>> encoding.encoding = 'shiftjis' >>> replace('\\x81\\\\', [('\\\\', '/')]) '\\x81\\\\' ''' # some character encodings (cp932 for Japanese, at least) use # ASCII characters other than control/alphabet/digit as a part of # multi-bytes characters, so direct replacing with such characters # on strings in local encoding causes invalid byte sequences. utext = text.decode(encoding.encoding) for f, t in substs: utext = utext.replace(f.decode("ascii"), t.decode("ascii")) return utext.encode(encoding.encoding) _blockre = re.compile(r"\n(?:\s*\n)+") def findblocks(text): """Find continuous blocks of lines in text. Returns a list of dictionaries representing the blocks. Each block has an 'indent' field and a 'lines' field. """ blocks = [] for b in _blockre.split(text.lstrip('\n').rstrip()): lines = b.splitlines() if lines: indent = min((len(l) - len(l.lstrip())) for l in lines) lines = [l[indent:] for l in lines] blocks.append({'indent': indent, 'lines': lines}) return blocks def findliteralblocks(blocks): """Finds literal blocks and adds a 'type' field to the blocks. Literal blocks are given the type 'literal', all other blocks are given type the 'paragraph'. """ i = 0 while i < len(blocks): # Searching for a block that looks like this: # # +------------------------------+ # | paragraph | # | (ends with "::") | # +------------------------------+ # +---------------------------+ # | indented literal block | # +---------------------------+ blocks[i]['type'] = 'paragraph' if blocks[i]['lines'][-1].endswith('::') and i + 1 < len(blocks): indent = blocks[i]['indent'] adjustment = blocks[i + 1]['indent'] - indent if blocks[i]['lines'] == ['::']: # Expanded form: remove block del blocks[i] i -= 1 elif blocks[i]['lines'][-1].endswith(' ::'): # Partially minimized form: remove space and both # colons. blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-3] elif len(blocks[i]['lines']) == 1 and \ blocks[i]['lines'][0].lstrip(' ').startswith('.. ') and \ blocks[i]['lines'][0].find(' ', 3) == -1: # directive on its own line, not a literal block i += 1 continue else: # Fully minimized form: remove just one colon. blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-1] # List items are formatted with a hanging indent. We must # correct for this here while we still have the original # information on the indentation of the subsequent literal # blocks available. m = _bulletre.match(blocks[i]['lines'][0]) if m: indent += m.end() adjustment -= m.end() # Mark the following indented blocks. while i + 1 < len(blocks) and blocks[i + 1]['indent'] > indent: blocks[i + 1]['type'] = 'literal' blocks[i + 1]['indent'] -= adjustment i += 1 i += 1 return blocks _bulletre = re.compile(r'(-|[0-9A-Za-z]+\.|\(?[0-9A-Za-z]+\)|\|) ') _optionre = re.compile(r'^(-([a-zA-Z0-9]), )?(--[a-z0-9-]+)' r'((.*) +)(.*)$') _fieldre = re.compile(r':(?![: ])([^:]*)(? indent: if prune: del blocks[j] else: blocks[j]['indent'] -= adjustment j += 1 i += 1 return blocks, pruned _sectionre = re.compile(r"""^([-=`:.'"~^_*+#])\1+$""") def findtables(blocks): '''Find simple tables Only simple one-line table elements are supported ''' for block in blocks: # Searching for a block that looks like this: # # === ==== === # A B C # === ==== === <- optional # 1 2 3 # x y z # === ==== === if (block['type'] == 'paragraph' and len(block['lines']) > 2 and _tablere.match(block['lines'][0]) and block['lines'][0] == block['lines'][-1]): block['type'] = 'table' block['header'] = False div = block['lines'][0] # column markers are ASCII so we can calculate column # position in bytes columns = [x for x in xrange(len(div)) if div[x] == '=' and (x == 0 or div[x - 1] == ' ')] rows = [] for l in block['lines'][1:-1]: if l == div: block['header'] = True continue row = [] # we measure columns not in bytes or characters but in # colwidth which makes things tricky pos = columns[0] # leading whitespace is bytes for n, start in enumerate(columns): if n + 1 < len(columns): width = columns[n + 1] - start v = encoding.getcols(l, pos, width) # gather columns pos += len(v) # calculate byte position of end row.append(v.strip()) else: row.append(l[pos:].strip()) rows.append(row) block['table'] = rows return blocks def findsections(blocks): """Finds sections. The blocks must have a 'type' field, i.e., they should have been run through findliteralblocks first. """ for block in blocks: # Searching for a block that looks like this: # # +------------------------------+ # | Section title | # | ------------- | # +------------------------------+ if (block['type'] == 'paragraph' and len(block['lines']) == 2 and encoding.colwidth(block['lines'][0]) == len(block['lines'][1]) and _sectionre.match(block['lines'][1])): block['underline'] = block['lines'][1][0] block['type'] = 'section' del block['lines'][1] return blocks def inlineliterals(blocks): substs = [('``', '"')] for b in blocks: if b['type'] in ('paragraph', 'section'): b['lines'] = [replace(l, substs) for l in b['lines']] return blocks def hgrole(blocks): substs = [(':hg:`', "'hg "), ('`', "'")] for b in blocks: if b['type'] in ('paragraph', 'section'): # Turn :hg:`command` into "hg command". This also works # when there is a line break in the command and relies on # the fact that we have no stray back-quotes in the input # (run the blocks through inlineliterals first). b['lines'] = [replace(l, substs) for l in b['lines']] return blocks def addmargins(blocks): """Adds empty blocks for vertical spacing. This groups bullets, options, and definitions together with no vertical space between them, and adds an empty block between all other blocks. """ i = 1 while i < len(blocks): if (blocks[i]['type'] == blocks[i - 1]['type'] and blocks[i]['type'] in ('bullet', 'option', 'field')): i += 1 elif not blocks[i - 1]['lines']: # no lines in previous block, do not separate i += 1 else: blocks.insert(i, {'lines': [''], 'indent': 0, 'type': 'margin'}) i += 2 return blocks def prunecomments(blocks): """Remove comments.""" i = 0 while i < len(blocks): b = blocks[i] if b['type'] == 'paragraph' and (b['lines'][0].startswith('.. ') or b['lines'] == ['..']): del blocks[i] if i < len(blocks) and blocks[i]['type'] == 'margin': del blocks[i] else: i += 1 return blocks _admonitionre = re.compile(r"\.\. (admonition|attention|caution|danger|" r"error|hint|important|note|tip|warning)::", flags=re.IGNORECASE) def findadmonitions(blocks): """ Makes the type of the block an admonition block if the first line is an admonition directive """ i = 0 while i < len(blocks): m = _admonitionre.match(blocks[i]['lines'][0]) if m: blocks[i]['type'] = 'admonition' admonitiontitle = blocks[i]['lines'][0][3:m.end() - 2].lower() firstline = blocks[i]['lines'][0][m.end() + 1:] if firstline: blocks[i]['lines'].insert(1, ' ' + firstline) blocks[i]['admonitiontitle'] = admonitiontitle del blocks[i]['lines'][0] i = i + 1 return blocks _admonitiontitles = {'attention': _('Attention:'), 'caution': _('Caution:'), 'danger': _('!Danger!') , 'error': _('Error:'), 'hint': _('Hint:'), 'important': _('Important:'), 'note': _('Note:'), 'tip': _('Tip:'), 'warning': _('Warning!')} def formatoption(block, width): desc = ' '.join(map(str.strip, block['lines'])) colwidth = encoding.colwidth(block['optstr']) usablewidth = width - 1 hanging = block['optstrwidth'] initindent = '%s%s ' % (block['optstr'], ' ' * ((hanging - colwidth))) hangindent = ' ' * (encoding.colwidth(initindent) + 1) return ' %s\n' % (util.wrap(desc, usablewidth, initindent=initindent, hangindent=hangindent)) def formatblock(block, width): """Format a block according to width.""" if width <= 0: width = 78 indent = ' ' * block['indent'] if block['type'] == 'admonition': admonition = _admonitiontitles[block['admonitiontitle']] if not block['lines']: return indent + admonition + '\n' hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip()) defindent = indent + hang * ' ' text = ' '.join(map(str.strip, block['lines'])) return '%s\n%s\n' % (indent + admonition, util.wrap(text, width=width, initindent=defindent, hangindent=defindent)) if block['type'] == 'margin': return '\n' if block['type'] == 'literal': indent += ' ' return indent + ('\n' + indent).join(block['lines']) + '\n' if block['type'] == 'section': underline = encoding.colwidth(block['lines'][0]) * block['underline'] return "%s%s\n%s%s\n" % (indent, block['lines'][0],indent, underline) if block['type'] == 'table': table = block['table'] # compute column widths widths = [max([encoding.colwidth(e) for e in c]) for c in zip(*table)] text = '' span = sum(widths) + len(widths) - 1 indent = ' ' * block['indent'] hang = ' ' * (len(indent) + span - widths[-1]) for row in table: l = [] for w, v in zip(widths, row): pad = ' ' * (w - encoding.colwidth(v)) l.append(v + pad) l = ' '.join(l) l = util.wrap(l, width=width, initindent=indent, hangindent=hang) if not text and block['header']: text = l + '\n' + indent + '-' * (min(width, span)) + '\n' else: text += l + "\n" return text if block['type'] == 'definition': term = indent + block['lines'][0] hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip()) defindent = indent + hang * ' ' text = ' '.join(map(str.strip, block['lines'][1:])) return '%s\n%s\n' % (term, util.wrap(text, width=width, initindent=defindent, hangindent=defindent)) subindent = indent if block['type'] == 'bullet': if block['lines'][0].startswith('| '): # Remove bullet for line blocks and add no extra # indentation. block['lines'][0] = block['lines'][0][2:] else: m = _bulletre.match(block['lines'][0]) subindent = indent + m.end() * ' ' elif block['type'] == 'field': key = block['key'] subindent = indent + _fieldwidth * ' ' if len(key) + 2 > _fieldwidth: # key too large, use full line width key = key.ljust(width) else: # key fits within field width key = key.ljust(_fieldwidth) block['lines'][0] = key + block['lines'][0] elif block['type'] == 'option': return formatoption(block, width) text = ' '.join(map(str.strip, block['lines'])) return util.wrap(text, width=width, initindent=indent, hangindent=subindent) + '\n' def formathtml(blocks): """Format RST blocks as HTML""" out = [] headernest = '' listnest = [] def escape(s): return cgi.escape(s, True) def openlist(start, level): if not listnest or listnest[-1][0] != start: listnest.append((start, level)) out.append('<%s>\n' % start) blocks = [b for b in blocks if b['type'] != 'margin'] for pos, b in enumerate(blocks): btype = b['type'] level = b['indent'] lines = b['lines'] if btype == 'admonition': admonition = escape(_admonitiontitles[b['admonitiontitle']]) text = escape(' '.join(map(str.strip, lines))) out.append('

\n%s %s\n

\n' % (admonition, text)) elif btype == 'paragraph': out.append('

\n%s\n

\n' % escape('\n'.join(lines))) elif btype == 'margin': pass elif btype == 'literal': out.append('
\n%s\n
\n' % escape('\n'.join(lines))) elif btype == 'section': i = b['underline'] if i not in headernest: headernest += i level = headernest.index(i) + 1 out.append('%s\n' % (level, escape(lines[0]), level)) elif btype == 'table': table = b['table'] out.append('\n') for row in table: out.append('') for v in row: out.append('') out.append('\n') out.pop() out.append('\n') out.append('
') out.append(escape(v)) out.append('
\n') elif btype == 'definition': openlist('dl', level) term = escape(lines[0]) text = escape(' '.join(map(str.strip, lines[1:]))) out.append('
%s\n
%s\n' % (term, text)) elif btype == 'bullet': bullet, head = lines[0].split(' ', 1) if bullet == '-': openlist('ul', level) else: openlist('ol', level) out.append('
  • %s\n' % escape(' '.join([head] + lines[1:]))) elif btype == 'field': openlist('dl', level) key = escape(b['key']) text = escape(' '.join(map(str.strip, lines))) out.append('
    %s\n
    %s\n' % (key, text)) elif btype == 'option': openlist('dl', level) opt = escape(b['optstr']) desc = escape(' '.join(map(str.strip, lines))) out.append('
    %s\n
    %s\n' % (opt, desc)) # close lists if indent level of next block is lower if listnest: start, level = listnest[-1] if pos == len(blocks) - 1: out.append('\n' % start) listnest.pop() else: nb = blocks[pos + 1] ni = nb['indent'] if (ni < level or (ni == level and nb['type'] not in 'definition bullet field option')): out.append('\n' % start) listnest.pop() return ''.join(out) def parse(text, indent=0, keep=None): """Parse text into a list of blocks""" pruned = [] blocks = findblocks(text) for b in blocks: b['indent'] += indent blocks = findliteralblocks(blocks) blocks = findtables(blocks) blocks, pruned = prunecontainers(blocks, keep or []) blocks = findsections(blocks) blocks = inlineliterals(blocks) blocks = hgrole(blocks) blocks = splitparagraphs(blocks) blocks = updatefieldlists(blocks) blocks = updateoptionlists(blocks) blocks = findadmonitions(blocks) blocks = addmargins(blocks) blocks = prunecomments(blocks) return blocks, pruned def formatblocks(blocks, width): text = ''.join(formatblock(b, width) for b in blocks) return text def format(text, width=80, indent=0, keep=None, style='plain', section=None): """Parse and format the text according to width.""" blocks, pruned = parse(text, indent, keep or []) parents = [] if section: sections = getsections(blocks) blocks = [] i = 0 lastparents = [] synthetic = [] collapse = True while i < len(sections): name, nest, b = sections[i] del parents[nest:] parents.append(i) if name == section: if lastparents != parents: llen = len(lastparents) plen = len(parents) if llen and llen != plen: collapse = False s = [] for j in xrange(3, plen - 1): parent = parents[j] if (j >= llen or lastparents[j] != parent): s.append(len(blocks)) sec = sections[parent][2] blocks.append(sec[0]) blocks.append(sec[-1]) if s: synthetic.append(s) lastparents = parents[:] blocks.extend(b) ## Also show all subnested sections while i + 1 < len(sections) and sections[i + 1][1] > nest: i += 1 blocks.extend(sections[i][2]) i += 1 if collapse: synthetic.reverse() for s in synthetic: path = [blocks[i]['lines'][0] for i in s] real = s[-1] + 2 realline = blocks[real]['lines'] realline[0] = ('"%s"' % '.'.join(path + [realline[0]]).replace('"', '')) del blocks[s[0]:real] if style == 'html': text = formathtml(blocks) else: text = ''.join(formatblock(b, width) for b in blocks) if keep is None: return text else: return text, pruned def getsections(blocks): '''return a list of (section name, nesting level, blocks) tuples''' nest = "" level = 0 secs = [] def getname(b): if b['type'] == 'field': x = b['key'] else: x = b['lines'][0] x = x.lower().strip('"') if '(' in x: x = x.split('(')[0] return x for b in blocks: if b['type'] == 'section': i = b['underline'] if i not in nest: nest += i level = nest.index(i) + 1 nest = nest[:level] secs.append((getname(b), level, [b])) elif b['type'] in ('definition', 'field'): i = ' ' if i not in nest: nest += i level = nest.index(i) + 1 nest = nest[:level] for i in range(1, len(secs) + 1): sec = secs[-i] if sec[1] < level: break siblings = [a for a in sec[2] if a['type'] == 'definition'] if siblings: siblingindent = siblings[-1]['indent'] indent = b['indent'] if siblingindent < indent: level += 1 break elif siblingindent == indent: level = sec[1] break secs.append((getname(b), level, [b])) else: if not secs: # add an initial empty section secs = [('', 0, [])] if b['type'] != 'margin': pointer = 1 bindent = b['indent'] while pointer < len(secs): section = secs[-pointer][2][0] if section['type'] != 'margin': sindent = section['indent'] if len(section['lines']) > 1: sindent += len(section['lines'][1]) - \ len(section['lines'][1].lstrip(' ')) if bindent >= sindent: break pointer += 1 if pointer > 1: blevel = secs[-pointer][1] if section['type'] != b['type']: blevel += 1 secs.append(('', blevel, [])) secs[-1][2].append(b) return secs def decorateblocks(blocks, width): '''generate a list of (section name, line text) pairs for search''' lines = [] for s in getsections(blocks): section = s[0] text = formatblocks(s[2], width) lines.append([(section, l) for l in text.splitlines(True)]) return lines def maketable(data, indent=0, header=False): '''Generate an RST table for the given table data as a list of lines''' widths = [max(encoding.colwidth(e) for e in c) for c in zip(*data)] indent = ' ' * indent div = indent + ' '.join('=' * w for w in widths) + '\n' out = [div] for row in data: l = [] for w, v in zip(widths, row): if '\n' in v: # only remove line breaks and indentation, long lines are # handled by the next tool v = ' '.join(e.lstrip() for e in v.split('\n')) pad = ' ' * (w - encoding.colwidth(v)) l.append(v + pad) out.append(indent + ' '.join(l) + "\n") if header and len(data) > 1: out.insert(2, div) out.append(div) return out mercurial-3.7.3/mercurial/archival.py0000644000175000017500000002512312676531525017260 0ustar mpmmpm00000000000000# archival.py - revision archival for mercurial # # Copyright 2006 Vadim Gelfer # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import cStringIO import gzip import os import struct import tarfile import time import zipfile import zlib from .i18n import _ from . import ( cmdutil, encoding, error, match as matchmod, scmutil, util, ) # from unzip source code: _UNX_IFREG = 0x8000 _UNX_IFLNK = 0xa000 def tidyprefix(dest, kind, prefix): '''choose prefix to use for names in archive. make sure prefix is safe for consumers.''' if prefix: prefix = util.normpath(prefix) else: if not isinstance(dest, str): raise ValueError('dest must be string if no prefix') prefix = os.path.basename(dest) lower = prefix.lower() for sfx in exts.get(kind, []): if lower.endswith(sfx): prefix = prefix[:-len(sfx)] break lpfx = os.path.normpath(util.localpath(prefix)) prefix = util.pconvert(lpfx) if not prefix.endswith('/'): prefix += '/' # Drop the leading '.' path component if present, so Windows can read the # zip files (issue4634) if prefix.startswith('./'): prefix = prefix[2:] if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix: raise error.Abort(_('archive prefix contains illegal components')) return prefix exts = { 'tar': ['.tar'], 'tbz2': ['.tbz2', '.tar.bz2'], 'tgz': ['.tgz', '.tar.gz'], 'zip': ['.zip'], } def guesskind(dest): for kind, extensions in exts.iteritems(): if any(dest.endswith(ext) for ext in extensions): return kind return None def _rootctx(repo): # repo[0] may be hidden for rev in repo: return repo[rev] return repo['null'] def buildmetadata(ctx): '''build content of .hg_archival.txt''' repo = ctx.repo() hex = ctx.hex() if ctx.rev() is None: hex = ctx.p1().hex() if ctx.dirty(): hex += '+' base = 'repo: %s\nnode: %s\nbranch: %s\n' % ( _rootctx(repo).hex(), hex, encoding.fromlocal(ctx.branch())) tags = ''.join('tag: %s\n' % t for t in ctx.tags() if repo.tagtype(t) == 'global') if not tags: repo.ui.pushbuffer() opts = {'template': '{latesttag}\n{latesttagdistance}\n' '{changessincelatesttag}', 'style': '', 'patch': None, 'git': None} cmdutil.show_changeset(repo.ui, repo, opts).show(ctx) ltags, dist, changessince = repo.ui.popbuffer().split('\n') ltags = ltags.split(':') tags = ''.join('latesttag: %s\n' % t for t in ltags) tags += 'latesttagdistance: %s\n' % dist tags += 'changessincelatesttag: %s\n' % changessince return base + tags class tarit(object): '''write archive to tar file or stream. can write uncompressed, or compress with gzip or bzip2.''' class GzipFileWithTime(gzip.GzipFile): def __init__(self, *args, **kw): timestamp = None if 'timestamp' in kw: timestamp = kw.pop('timestamp') if timestamp is None: self.timestamp = time.time() else: self.timestamp = timestamp gzip.GzipFile.__init__(self, *args, **kw) def _write_gzip_header(self): self.fileobj.write('\037\213') # magic header self.fileobj.write('\010') # compression method fname = self.name if fname and fname.endswith('.gz'): fname = fname[:-3] flags = 0 if fname: flags = gzip.FNAME self.fileobj.write(chr(flags)) gzip.write32u(self.fileobj, long(self.timestamp)) self.fileobj.write('\002') self.fileobj.write('\377') if fname: self.fileobj.write(fname + '\000') def __init__(self, dest, mtime, kind=''): self.mtime = mtime self.fileobj = None def taropen(name, mode, fileobj=None): if kind == 'gz': mode = mode[0] if not fileobj: fileobj = open(name, mode + 'b') gzfileobj = self.GzipFileWithTime(name, mode + 'b', zlib.Z_BEST_COMPRESSION, fileobj, timestamp=mtime) self.fileobj = gzfileobj return tarfile.TarFile.taropen(name, mode, gzfileobj) else: return tarfile.open(name, mode + kind, fileobj) if isinstance(dest, str): self.z = taropen(dest, mode='w:') else: # Python 2.5-2.5.1 have a regression that requires a name arg self.z = taropen(name='', mode='w|', fileobj=dest) def addfile(self, name, mode, islink, data): i = tarfile.TarInfo(name) i.mtime = self.mtime i.size = len(data) if islink: i.type = tarfile.SYMTYPE i.mode = 0o777 i.linkname = data data = None i.size = 0 else: i.mode = mode data = cStringIO.StringIO(data) self.z.addfile(i, data) def done(self): self.z.close() if self.fileobj: self.fileobj.close() class tellable(object): '''provide tell method for zipfile.ZipFile when writing to http response file object.''' def __init__(self, fp): self.fp = fp self.offset = 0 def __getattr__(self, key): return getattr(self.fp, key) def write(self, s): self.fp.write(s) self.offset += len(s) def tell(self): return self.offset class zipit(object): '''write archive to zip file or stream. can write uncompressed, or compressed with deflate.''' def __init__(self, dest, mtime, compress=True): if not isinstance(dest, str): try: dest.tell() except (AttributeError, IOError): dest = tellable(dest) self.z = zipfile.ZipFile(dest, 'w', compress and zipfile.ZIP_DEFLATED or zipfile.ZIP_STORED) # Python's zipfile module emits deprecation warnings if we try # to store files with a date before 1980. epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0)) if mtime < epoch: mtime = epoch self.mtime = mtime self.date_time = time.gmtime(mtime)[:6] def addfile(self, name, mode, islink, data): i = zipfile.ZipInfo(name, self.date_time) i.compress_type = self.z.compression # unzip will not honor unix file modes unless file creator is # set to unix (id 3). i.create_system = 3 ftype = _UNX_IFREG if islink: mode = 0o777 ftype = _UNX_IFLNK i.external_attr = (mode | ftype) << 16L # add "extended-timestamp" extra block, because zip archives # without this will be extracted with unexpected timestamp, # if TZ is not configured as GMT i.extra += struct.pack(' # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. # see http://effbot.org/zone/simple-top-down-parsing.htm and # http://eli.thegreenplace.net/2010/01/02/top-down-operator-precedence-parsing/ # for background # takes a tokenizer and elements # tokenizer is an iterator that returns (type, value, pos) tuples # elements is a mapping of types to binding strength, primary, prefix, infix # and suffix actions # an action is a tree node name, a tree label, and an optional match # __call__(program) parses program into a labeled tree from __future__ import absolute_import from .i18n import _ from . import error class parser(object): def __init__(self, elements, methods=None): self._elements = elements self._methods = methods self.current = None def _advance(self): 'advance the tokenizer' t = self.current self.current = next(self._iter, None) return t def _hasnewterm(self): 'True if next token may start new term' return any(self._elements[self.current[0]][1:3]) def _match(self, m): 'make sure the tokenizer matches an end condition' if self.current[0] != m: raise error.ParseError(_("unexpected token: %s") % self.current[0], self.current[2]) self._advance() def _parseoperand(self, bind, m=None): 'gather right-hand-side operand until an end condition or binding met' if m and self.current[0] == m: expr = None else: expr = self._parse(bind) if m: self._match(m) return expr def _parse(self, bind=0): token, value, pos = self._advance() # handle prefix rules on current token, take as primary if unambiguous primary, prefix = self._elements[token][1:3] if primary and not (prefix and self._hasnewterm()): expr = (primary, value) elif prefix: expr = (prefix[0], self._parseoperand(*prefix[1:])) else: raise error.ParseError(_("not a prefix: %s") % token, pos) # gather tokens until we meet a lower binding strength while bind < self._elements[self.current[0]][0]: token, value, pos = self._advance() # handle infix rules, take as suffix if unambiguous infix, suffix = self._elements[token][3:] if suffix and not (infix and self._hasnewterm()): expr = (suffix[0], expr) elif infix: expr = (infix[0], expr, self._parseoperand(*infix[1:])) else: raise error.ParseError(_("not an infix: %s") % token, pos) return expr def parse(self, tokeniter): 'generate a parse tree from tokens' self._iter = tokeniter self._advance() res = self._parse() token, value, pos = self.current return res, pos def eval(self, tree): 'recursively evaluate a parse tree using node methods' if not isinstance(tree, tuple): return tree return self._methods[tree[0]](*[self.eval(t) for t in tree[1:]]) def __call__(self, tokeniter): 'parse tokens into a parse tree and evaluate if methods given' t = self.parse(tokeniter) if self._methods: return self.eval(t) return t def buildargsdict(trees, funcname, keys, keyvaluenode, keynode): """Build dict from list containing positional and keyword arguments Invalid keywords or too many positional arguments are rejected, but missing arguments are just omitted. """ if len(trees) > len(keys): raise error.ParseError(_("%(func)s takes at most %(nargs)d arguments") % {'func': funcname, 'nargs': len(keys)}) args = {} # consume positional arguments for k, x in zip(keys, trees): if x[0] == keyvaluenode: break args[k] = x # remainder should be keyword arguments for x in trees[len(args):]: if x[0] != keyvaluenode or x[1][0] != keynode: raise error.ParseError(_("%(func)s got an invalid argument") % {'func': funcname}) k = x[1][1] if k not in keys: raise error.ParseError(_("%(func)s got an unexpected keyword " "argument '%(key)s'") % {'func': funcname, 'key': k}) if k in args: raise error.ParseError(_("%(func)s got multiple values for keyword " "argument '%(key)s'") % {'func': funcname, 'key': k}) args[k] = x[2] return args def unescapestr(s): try: return s.decode("string_escape") except ValueError as e: # mangle Python's exception into our format raise error.ParseError(str(e).lower()) def _prettyformat(tree, leafnodes, level, lines): if not isinstance(tree, tuple) or tree[0] in leafnodes: lines.append((level, str(tree))) else: lines.append((level, '(%s' % tree[0])) for s in tree[1:]: _prettyformat(s, leafnodes, level + 1, lines) lines[-1:] = [(lines[-1][0], lines[-1][1] + ')')] def prettyformat(tree, leafnodes): lines = [] _prettyformat(tree, leafnodes, 0, lines) output = '\n'.join((' ' * l + s) for l, s in lines) return output def simplifyinfixops(tree, targetnodes): """Flatten chained infix operations to reduce usage of Python stack >>> def f(tree): ... print prettyformat(simplifyinfixops(tree, ('or',)), ('symbol',)) >>> f(('or', ... ('or', ... ('symbol', '1'), ... ('symbol', '2')), ... ('symbol', '3'))) (or ('symbol', '1') ('symbol', '2') ('symbol', '3')) >>> f(('func', ... ('symbol', 'p1'), ... ('or', ... ('or', ... ('func', ... ('symbol', 'sort'), ... ('list', ... ('or', ... ('or', ... ('symbol', '1'), ... ('symbol', '2')), ... ('symbol', '3')), ... ('negate', ... ('symbol', 'rev')))), ... ('and', ... ('symbol', '4'), ... ('group', ... ('or', ... ('or', ... ('symbol', '5'), ... ('symbol', '6')), ... ('symbol', '7'))))), ... ('symbol', '8')))) (func ('symbol', 'p1') (or (func ('symbol', 'sort') (list (or ('symbol', '1') ('symbol', '2') ('symbol', '3')) (negate ('symbol', 'rev')))) (and ('symbol', '4') (group (or ('symbol', '5') ('symbol', '6') ('symbol', '7')))) ('symbol', '8'))) """ if not isinstance(tree, tuple): return tree op = tree[0] if op not in targetnodes: return (op,) + tuple(simplifyinfixops(x, targetnodes) for x in tree[1:]) # walk down left nodes taking each right node. no recursion to left nodes # because infix operators are left-associative, i.e. left tree is deep. # e.g. '1 + 2 + 3' -> (+ (+ 1 2) 3) -> (+ 1 2 3) simplified = [] x = tree while x[0] == op: l, r = x[1:] simplified.append(simplifyinfixops(r, targetnodes)) x = l simplified.append(simplifyinfixops(x, targetnodes)) simplified.append(op) return tuple(reversed(simplified)) mercurial-3.7.3/mercurial/repair.py0000644000175000017500000002366312676531525016760 0ustar mpmmpm00000000000000# repair.py - functions for repository repair for mercurial # # Copyright 2005, 2006 Chris Mason # Copyright 2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno from .i18n import _ from .node import short from . import ( bundle2, changegroup, error, exchange, util, ) def _bundle(repo, bases, heads, node, suffix, compress=True): """create a bundle with the specified revisions as a backup""" cgversion = changegroup.safeversion(repo) cg = changegroup.changegroupsubset(repo, bases, heads, 'strip', version=cgversion) backupdir = "strip-backup" vfs = repo.vfs if not vfs.isdir(backupdir): vfs.mkdir(backupdir) # Include a hash of all the nodes in the filename for uniqueness allcommits = repo.set('%ln::%ln', bases, heads) allhashes = sorted(c.hex() for c in allcommits) totalhash = util.sha1(''.join(allhashes)).hexdigest() name = "%s/%s-%s-%s.hg" % (backupdir, short(node), totalhash[:8], suffix) comp = None if cgversion != '01': bundletype = "HG20" if compress: comp = 'BZ' elif compress: bundletype = "HG10BZ" else: bundletype = "HG10UN" return changegroup.writebundle(repo.ui, cg, name, bundletype, vfs, compression=comp) def _collectfiles(repo, striprev): """find out the filelogs affected by the strip""" files = set() for x in xrange(striprev, len(repo)): files.update(repo[x].files()) return sorted(files) def _collectbrokencsets(repo, files, striprev): """return the changesets which will be broken by the truncation""" s = set() def collectone(revlog): _, brokenset = revlog.getstrippoint(striprev) s.update([revlog.linkrev(r) for r in brokenset]) collectone(repo.manifest) for fname in files: collectone(repo.file(fname)) return s def strip(ui, repo, nodelist, backup=True, topic='backup'): # This function operates within a transaction of its own, but does # not take any lock on the repo. # Simple way to maintain backwards compatibility for this # argument. if backup in ['none', 'strip']: backup = False repo = repo.unfiltered() repo.destroying() cl = repo.changelog # TODO handle undo of merge sets if isinstance(nodelist, str): nodelist = [nodelist] striplist = [cl.rev(node) for node in nodelist] striprev = min(striplist) # Some revisions with rev > striprev may not be descendants of striprev. # We have to find these revisions and put them in a bundle, so that # we can restore them after the truncations. # To create the bundle we use repo.changegroupsubset which requires # the list of heads and bases of the set of interesting revisions. # (head = revision in the set that has no descendant in the set; # base = revision in the set that has no ancestor in the set) tostrip = set(striplist) for rev in striplist: for desc in cl.descendants([rev]): tostrip.add(desc) files = _collectfiles(repo, striprev) saverevs = _collectbrokencsets(repo, files, striprev) # compute heads saveheads = set(saverevs) for r in xrange(striprev + 1, len(cl)): if r not in tostrip: saverevs.add(r) saveheads.difference_update(cl.parentrevs(r)) saveheads.add(r) saveheads = [cl.node(r) for r in saveheads] # compute base nodes if saverevs: descendants = set(cl.descendants(saverevs)) saverevs.difference_update(descendants) savebases = [cl.node(r) for r in saverevs] stripbases = [cl.node(r) for r in tostrip] # For a set s, max(parents(s) - s) is the same as max(heads(::s - s)), but # is much faster newbmtarget = repo.revs('max(parents(%ld) - (%ld))', tostrip, tostrip) if newbmtarget: newbmtarget = repo[newbmtarget.first()].node() else: newbmtarget = '.' bm = repo._bookmarks updatebm = [] for m in bm: rev = repo[bm[m]].rev() if rev in tostrip: updatebm.append(m) # create a changegroup for all the branches we need to keep backupfile = None vfs = repo.vfs node = nodelist[-1] if backup: backupfile = _bundle(repo, stripbases, cl.heads(), node, topic) repo.ui.status(_("saved backup bundle to %s\n") % vfs.join(backupfile)) repo.ui.log("backupbundle", "saved backup bundle to %s\n", vfs.join(backupfile)) if saveheads or savebases: # do not compress partial bundle if we remove it from disk later chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp', compress=False) mfst = repo.manifest curtr = repo.currenttransaction() if curtr is not None: del curtr # avoid carrying reference to transaction for nothing msg = _('programming error: cannot strip from inside a transaction') raise error.Abort(msg, hint=_('contact your extension maintainer')) try: with repo.transaction("strip") as tr: offset = len(tr.entries) tr.startgroup() cl.strip(striprev, tr) mfst.strip(striprev, tr) for fn in files: repo.file(fn).strip(striprev, tr) tr.endgroup() for i in xrange(offset, len(tr.entries)): file, troffset, ignore = tr.entries[i] repo.svfs(file, 'a').truncate(troffset) if troffset == 0: repo.store.markremoved(file) if saveheads or savebases: ui.note(_("adding branch\n")) f = vfs.open(chgrpfile, "rb") gen = exchange.readbundle(ui, f, chgrpfile, vfs) if not repo.ui.verbose: # silence internal shuffling chatter repo.ui.pushbuffer() if isinstance(gen, bundle2.unbundle20): with repo.transaction('strip') as tr: tr.hookargs = {'source': 'strip', 'url': 'bundle:' + vfs.join(chgrpfile)} bundle2.applybundle(repo, gen, tr, source='strip', url='bundle:' + vfs.join(chgrpfile)) else: gen.apply(repo, 'strip', 'bundle:' + vfs.join(chgrpfile), True) if not repo.ui.verbose: repo.ui.popbuffer() f.close() for m in updatebm: bm[m] = repo[newbmtarget].node() lock = tr = None try: lock = repo.lock() tr = repo.transaction('repair') bm.recordchange(tr) tr.close() finally: tr.release() lock.release() # remove undo files for undovfs, undofile in repo.undofiles(): try: undovfs.unlink(undofile) except OSError as e: if e.errno != errno.ENOENT: ui.warn(_('error removing %s: %s\n') % (undovfs.join(undofile), str(e))) except: # re-raises if backupfile: ui.warn(_("strip failed, full bundle stored in '%s'\n") % vfs.join(backupfile)) elif saveheads: ui.warn(_("strip failed, partial bundle stored in '%s'\n") % vfs.join(chgrpfile)) raise else: if saveheads or savebases: # Remove partial backup only if there were no exceptions vfs.unlink(chgrpfile) repo.destroyed() def rebuildfncache(ui, repo): """Rebuilds the fncache file from repo history. Missing entries will be added. Extra entries will be removed. """ repo = repo.unfiltered() if 'fncache' not in repo.requirements: ui.warn(_('(not rebuilding fncache because repository does not ' 'support fncache)\n')) return with repo.lock(): fnc = repo.store.fncache # Trigger load of fncache. if 'irrelevant' in fnc: pass oldentries = set(fnc.entries) newentries = set() seenfiles = set() repolen = len(repo) for rev in repo: ui.progress(_('changeset'), rev, total=repolen) ctx = repo[rev] for f in ctx.files(): # This is to minimize I/O. if f in seenfiles: continue seenfiles.add(f) i = 'data/%s.i' % f d = 'data/%s.d' % f if repo.store._exists(i): newentries.add(i) if repo.store._exists(d): newentries.add(d) ui.progress(_('changeset'), None) addcount = len(newentries - oldentries) removecount = len(oldentries - newentries) for p in sorted(oldentries - newentries): ui.write(_('removing %s\n') % p) for p in sorted(newentries - oldentries): ui.write(_('adding %s\n') % p) if addcount or removecount: ui.write(_('%d items added, %d removed from fncache\n') % (addcount, removecount)) fnc.entries = newentries fnc._dirty = True with repo.transaction('fncache') as tr: fnc.write(tr) else: ui.write(_('fncache already up to date\n')) def stripbmrevset(repo, mark): """ The revset to strip when strip is called with -B mark Needs to live here so extensions can use it and wrap it even when strip is not enabled or not present on a box. """ return repo.revs("ancestors(bookmark(%s)) - " "ancestors(head() and not bookmark(%s)) - " "ancestors(bookmark() and not bookmark(%s))", mark, mark, mark) mercurial-3.7.3/mercurial/treediscovery.py0000644000175000017500000001231112676531525020351 0ustar mpmmpm00000000000000# discovery.py - protocol changeset discovery functions # # Copyright 2010 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import collections from .i18n import _ from .node import ( nullid, short, ) from . import ( error, ) def findcommonincoming(repo, remote, heads=None, force=False): """Return a tuple (common, fetch, heads) used to identify the common subset of nodes between repo and remote. "common" is a list of (at least) the heads of the common subset. "fetch" is a list of roots of the nodes that would be incoming, to be supplied to changegroupsubset. "heads" is either the supplied heads, or else the remote's heads. """ knownnode = repo.changelog.hasnode search = [] fetch = set() seen = set() seenbranch = set() base = set() if not heads: heads = remote.heads() if repo.changelog.tip() == nullid: base.add(nullid) if heads != [nullid]: return [nullid], [nullid], list(heads) return [nullid], [], heads # assume we're closer to the tip than the root # and start by examining the heads repo.ui.status(_("searching for changes\n")) unknown = [] for h in heads: if not knownnode(h): unknown.append(h) else: base.add(h) if not unknown: return list(base), [], list(heads) req = set(unknown) reqcnt = 0 # search through remote branches # a 'branch' here is a linear segment of history, with four parts: # head, root, first parent, second parent # (a branch always has two parents (or none) by definition) unknown = collections.deque(remote.branches(unknown)) while unknown: r = [] while unknown: n = unknown.popleft() if n[0] in seen: continue repo.ui.debug("examining %s:%s\n" % (short(n[0]), short(n[1]))) if n[0] == nullid: # found the end of the branch pass elif n in seenbranch: repo.ui.debug("branch already found\n") continue elif n[1] and knownnode(n[1]): # do we know the base? repo.ui.debug("found incomplete branch %s:%s\n" % (short(n[0]), short(n[1]))) search.append(n[0:2]) # schedule branch range for scanning seenbranch.add(n) else: if n[1] not in seen and n[1] not in fetch: if knownnode(n[2]) and knownnode(n[3]): repo.ui.debug("found new changeset %s\n" % short(n[1])) fetch.add(n[1]) # earliest unknown for p in n[2:4]: if knownnode(p): base.add(p) # latest known for p in n[2:4]: if p not in req and not knownnode(p): r.append(p) req.add(p) seen.add(n[0]) if r: reqcnt += 1 repo.ui.progress(_('searching'), reqcnt, unit=_('queries')) repo.ui.debug("request %d: %s\n" % (reqcnt, " ".join(map(short, r)))) for p in xrange(0, len(r), 10): for b in remote.branches(r[p:p + 10]): repo.ui.debug("received %s:%s\n" % (short(b[0]), short(b[1]))) unknown.append(b) # do binary search on the branches we found while search: newsearch = [] reqcnt += 1 repo.ui.progress(_('searching'), reqcnt, unit=_('queries')) for n, l in zip(search, remote.between(search)): l.append(n[1]) p = n[0] f = 1 for i in l: repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i))) if knownnode(i): if f <= 2: repo.ui.debug("found new branch changeset %s\n" % short(p)) fetch.add(p) base.add(i) else: repo.ui.debug("narrowed branch search to %s:%s\n" % (short(p), short(i))) newsearch.append((p, i)) break p, f = i, f * 2 search = newsearch # sanity check our fetch list for f in fetch: if knownnode(f): raise error.RepoError(_("already have changeset ") + short(f[:4])) base = list(base) if base == [nullid]: if force: repo.ui.warn(_("warning: repository is unrelated\n")) else: raise error.Abort(_("repository is unrelated")) repo.ui.debug("found new changesets starting at " + " ".join([short(f) for f in fetch]) + "\n") repo.ui.progress(_('searching'), None) repo.ui.debug("%d total queries\n" % reqcnt) return base, list(fetch), heads mercurial-3.7.3/mercurial/namespaces.py0000644000175000017500000001632612676531525017613 0ustar mpmmpm00000000000000from __future__ import absolute_import from .i18n import _ from . import ( templatekw, util, ) def tolist(val): """ a convenience method to return an empty list instead of None """ if val is None: return [] else: return [val] class namespaces(object): """provides an interface to register and operate on multiple namespaces. See the namespace class below for details on the namespace object. """ _names_version = 0 def __init__(self): self._names = util.sortdict() # we need current mercurial named objects (bookmarks, tags, and # branches) to be initialized somewhere, so that place is here bmknames = lambda repo: repo._bookmarks.keys() bmknamemap = lambda repo, name: tolist(repo._bookmarks.get(name)) bmknodemap = lambda repo, name: repo.nodebookmarks(name) n = namespace("bookmarks", templatename="bookmark", # i18n: column positioning for "hg log" logfmt=_("bookmark: %s\n"), listnames=bmknames, namemap=bmknamemap, nodemap=bmknodemap) self.addnamespace(n) tagnames = lambda repo: [t for t, n in repo.tagslist()] tagnamemap = lambda repo, name: tolist(repo._tagscache.tags.get(name)) tagnodemap = lambda repo, name: repo.nodetags(name) n = namespace("tags", templatename="tag", # i18n: column positioning for "hg log" logfmt=_("tag: %s\n"), listnames=tagnames, namemap=tagnamemap, nodemap=tagnodemap, deprecated=set(['tip'])) self.addnamespace(n) bnames = lambda repo: repo.branchmap().keys() bnamemap = lambda repo, name: tolist(repo.branchtip(name, True)) bnodemap = lambda repo, node: [repo[node].branch()] n = namespace("branches", templatename="branch", # i18n: column positioning for "hg log" logfmt=_("branch: %s\n"), listnames=bnames, namemap=bnamemap, nodemap=bnodemap) self.addnamespace(n) def __getitem__(self, namespace): """returns the namespace object""" return self._names[namespace] def __iter__(self): return self._names.__iter__() def iteritems(self): return self._names.iteritems() def addnamespace(self, namespace, order=None): """register a namespace namespace: the name to be registered (in plural form) order: optional argument to specify the order of namespaces (e.g. 'branches' should be listed before 'bookmarks') """ if order is not None: self._names.insert(order, namespace.name, namespace) else: self._names[namespace.name] = namespace # we only generate a template keyword if one does not already exist if namespace.name not in templatekw.keywords: def generatekw(**args): return templatekw.shownames(namespace.name, **args) templatekw.keywords[namespace.name] = generatekw def singlenode(self, repo, name): """ Return the 'best' node for the given name. Best means the first node in the first nonempty list returned by a name-to-nodes mapping function in the defined precedence order. Raises a KeyError if there is no such node. """ for ns, v in self._names.iteritems(): n = v.namemap(repo, name) if n: # return max revision number if len(n) > 1: cl = repo.changelog maxrev = max(cl.rev(node) for node in n) return cl.node(maxrev) return n[0] raise KeyError(_('no such name: %s') % name) class namespace(object): """provides an interface to a namespace Namespaces are basically generic many-to-many mapping between some (namespaced) names and nodes. The goal here is to control the pollution of jamming things into tags or bookmarks (in extension-land) and to simplify internal bits of mercurial: log output, tab completion, etc. More precisely, we define a mapping of names to nodes, and a mapping from nodes to names. Each mapping returns a list. Furthermore, each name mapping will be passed a name to lookup which might not be in its domain. In this case, each method should return an empty list and not raise an error. This namespace object will define the properties we need: 'name': the namespace (plural form) 'templatename': name to use for templating (usually the singular form of the plural namespace name) 'listnames': list of all names in the namespace (usually the keys of a dictionary) 'namemap': function that takes a name and returns a list of nodes 'nodemap': function that takes a node and returns a list of names 'deprecated': set of names to be masked for ordinary use """ def __init__(self, name, templatename=None, logname=None, colorname=None, logfmt=None, listnames=None, namemap=None, nodemap=None, deprecated=None): """create a namespace name: the namespace to be registered (in plural form) templatename: the name to use for templating logname: the name to use for log output; if not specified templatename is used colorname: the name to use for colored log output; if not specified logname is used logfmt: the format to use for (i18n-ed) log output; if not specified it is composed from logname listnames: function to list all names namemap: function that inputs a node, output name(s) nodemap: function that inputs a name, output node(s) deprecated: set of names to be masked for ordinary use """ self.name = name self.templatename = templatename self.logname = logname self.colorname = colorname self.logfmt = logfmt self.listnames = listnames self.namemap = namemap self.nodemap = nodemap # if logname is not specified, use the template name as backup if self.logname is None: self.logname = self.templatename # if colorname is not specified, just use the logname as a backup if self.colorname is None: self.colorname = self.logname # if logfmt is not specified, compose it from logname as backup if self.logfmt is None: # i18n: column positioning for "hg log" self.logfmt = ("%s:" % self.logname).ljust(13) + "%s\n" if deprecated is None: self.deprecated = set() else: self.deprecated = deprecated def names(self, repo, node): """method that returns a (sorted) list of names in a namespace that match a given node""" return sorted(self.nodemap(repo, node)) def nodes(self, repo, name): """method that returns a list of nodes in a namespace that match a given name. """ return sorted(self.namemap(repo, name)) mercurial-3.7.3/mercurial/httppeer.py0000644000175000017500000002420612676531525017323 0ustar mpmmpm00000000000000# httppeer.py - HTTP repository proxy classes for mercurial # # Copyright 2005, 2006 Matt Mackall # Copyright 2006 Vadim Gelfer # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import httplib import os import socket import tempfile import urllib import urllib2 import zlib from .i18n import _ from .node import nullid from . import ( changegroup, error, httpconnection, statichttprepo, url, util, wireproto, ) def zgenerator(f): zd = zlib.decompressobj() try: for chunk in util.filechunkiter(f): while chunk: yield zd.decompress(chunk, 2**18) chunk = zd.unconsumed_tail except httplib.HTTPException: raise IOError(None, _('connection ended unexpectedly')) yield zd.flush() class httppeer(wireproto.wirepeer): def __init__(self, ui, path): self.path = path self.caps = None self.handler = None self.urlopener = None self.requestbuilder = None u = util.url(path) if u.query or u.fragment: raise error.Abort(_('unsupported URL component: "%s"') % (u.query or u.fragment)) # urllib cannot handle URLs with embedded user or passwd self._url, authinfo = u.authinfo() self.ui = ui self.ui.debug('using %s\n' % self._url) self.urlopener = url.opener(ui, authinfo) self.requestbuilder = urllib2.Request def __del__(self): if self.urlopener: for h in self.urlopener.handlers: h.close() getattr(h, "close_all", lambda : None)() def url(self): return self.path # look up capabilities only when needed def _fetchcaps(self): self.caps = set(self._call('capabilities').split()) def _capabilities(self): if self.caps is None: try: self._fetchcaps() except error.RepoError: self.caps = set() self.ui.debug('capabilities: %s\n' % (' '.join(self.caps or ['none']))) return self.caps def lock(self): raise error.Abort(_('operation not supported over http')) def _callstream(self, cmd, **args): if cmd == 'pushkey': args['data'] = '' data = args.pop('data', None) size = 0 if util.safehasattr(data, 'length'): size = data.length elif data is not None: size = len(data) headers = args.pop('headers', {}) if data is not None and 'Content-Type' not in headers: headers['Content-Type'] = 'application/mercurial-0.1' if size and self.ui.configbool('ui', 'usehttp2', False): headers['Expect'] = '100-Continue' headers['X-HgHttp2'] = '1' self.ui.debug("sending %s command\n" % cmd) q = [('cmd', cmd)] headersize = 0 if len(args) > 0: httpheader = self.capable('httpheader') if httpheader: headersize = int(httpheader.split(',')[0]) if headersize > 0: # The headers can typically carry more data than the URL. encargs = urllib.urlencode(sorted(args.items())) headerfmt = 'X-HgArg-%s' contentlen = headersize - len(headerfmt % '000' + ': \r\n') headernum = 0 for i in xrange(0, len(encargs), contentlen): headernum += 1 header = headerfmt % str(headernum) headers[header] = encargs[i:i + contentlen] varyheaders = [headerfmt % str(h) for h in range(1, headernum + 1)] headers['Vary'] = ','.join(varyheaders) else: q += sorted(args.items()) qs = '?%s' % urllib.urlencode(q) cu = "%s%s" % (self._url, qs) req = self.requestbuilder(cu, data, headers) if data is not None: self.ui.debug("sending %s bytes\n" % size) req.add_unredirected_header('Content-Length', '%d' % size) try: resp = self.urlopener.open(req) except urllib2.HTTPError as inst: if inst.code == 401: raise error.Abort(_('authorization failed')) raise except httplib.HTTPException as inst: self.ui.debug('http error while sending %s command\n' % cmd) self.ui.traceback() raise IOError(None, inst) except IndexError: # this only happens with Python 2.3, later versions raise URLError raise error.Abort(_('http error, possibly caused by proxy setting')) # record the url we got redirected to resp_url = resp.geturl() if resp_url.endswith(qs): resp_url = resp_url[:-len(qs)] if self._url.rstrip('/') != resp_url.rstrip('/'): if not self.ui.quiet: self.ui.warn(_('real URL is %s\n') % resp_url) self._url = resp_url try: proto = resp.getheader('content-type') except AttributeError: proto = resp.headers.get('content-type', '') safeurl = util.hidepassword(self._url) if proto.startswith('application/hg-error'): raise error.OutOfBandError(resp.read()) # accept old "text/plain" and "application/hg-changegroup" for now if not (proto.startswith('application/mercurial-') or (proto.startswith('text/plain') and not resp.headers.get('content-length')) or proto.startswith('application/hg-changegroup')): self.ui.debug("requested URL: '%s'\n" % util.hidepassword(cu)) raise error.RepoError( _("'%s' does not appear to be an hg repository:\n" "---%%<--- (%s)\n%s\n---%%<---\n") % (safeurl, proto or 'no content-type', resp.read(1024))) if proto.startswith('application/mercurial-'): try: version = proto.split('-', 1)[1] version_info = tuple([int(n) for n in version.split('.')]) except ValueError: raise error.RepoError(_("'%s' sent a broken Content-Type " "header (%s)") % (safeurl, proto)) if version_info > (0, 1): raise error.RepoError(_("'%s' uses newer protocol %s") % (safeurl, version)) return resp def _call(self, cmd, **args): fp = self._callstream(cmd, **args) try: return fp.read() finally: # if using keepalive, allow connection to be reused fp.close() def _callpush(self, cmd, cg, **args): # have to stream bundle to a temp file because we do not have # http 1.1 chunked transfer. types = self.capable('unbundle') try: types = types.split(',') except AttributeError: # servers older than d1b16a746db6 will send 'unbundle' as a # boolean capability. They only support headerless/uncompressed # bundles. types = [""] for x in types: if x in changegroup.bundletypes: type = x break tempname = changegroup.writebundle(self.ui, cg, None, type) fp = httpconnection.httpsendfile(self.ui, tempname, "rb") headers = {'Content-Type': 'application/mercurial-0.1'} try: r = self._call(cmd, data=fp, headers=headers, **args) vals = r.split('\n', 1) if len(vals) < 2: raise error.ResponseError(_("unexpected response:"), r) return vals except socket.error as err: if err.args[0] in (errno.ECONNRESET, errno.EPIPE): raise error.Abort(_('push failed: %s') % err.args[1]) raise error.Abort(err.args[1]) finally: fp.close() os.unlink(tempname) def _calltwowaystream(self, cmd, fp, **args): fh = None fp_ = None filename = None try: # dump bundle to disk fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg") fh = os.fdopen(fd, "wb") d = fp.read(4096) while d: fh.write(d) d = fp.read(4096) fh.close() # start http push fp_ = httpconnection.httpsendfile(self.ui, filename, "rb") headers = {'Content-Type': 'application/mercurial-0.1'} return self._callstream(cmd, data=fp_, headers=headers, **args) finally: if fp_ is not None: fp_.close() if fh is not None: fh.close() os.unlink(filename) def _callcompressable(self, cmd, **args): stream = self._callstream(cmd, **args) return util.chunkbuffer(zgenerator(stream)) def _abort(self, exception): raise exception class httpspeer(httppeer): def __init__(self, ui, path): if not url.has_https: raise error.Abort(_('Python support for SSL and HTTPS ' 'is not installed')) httppeer.__init__(self, ui, path) def instance(ui, path, create): if create: raise error.Abort(_('cannot create new http repository')) try: if path.startswith('https:'): inst = httpspeer(ui, path) else: inst = httppeer(ui, path) try: # Try to do useful work when checking compatibility. # Usually saves a roundtrip since we want the caps anyway. inst._fetchcaps() except error.RepoError: # No luck, try older compatibility check. inst.between([(nullid, nullid)]) return inst except error.RepoError as httpexception: try: r = statichttprepo.instance(ui, "static-" + path, create) ui.note('(falling back to static-http)\n') return r except error.RepoError: raise httpexception # use the original http RepoError instead mercurial-3.7.3/mercurial/bundlerepo.py0000644000175000017500000004522512676531524017632 0ustar mpmmpm00000000000000# bundlerepo.py - repository class for viewing uncompressed bundles # # Copyright 2006, 2007 Benoit Boissinot # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """Repository class for viewing uncompressed bundles. This provides a read-only repository interface to bundles as if they were part of the actual repository. """ from __future__ import absolute_import import os import shutil import tempfile from .i18n import _ from .node import nullid from . import ( bundle2, changegroup, changelog, cmdutil, discovery, error, exchange, filelog, localrepo, manifest, mdiff, pathutil, phases, revlog, scmutil, util, ) class bundlerevlog(revlog.revlog): def __init__(self, opener, indexfile, bundle, linkmapper): # How it works: # To retrieve a revision, we need to know the offset of the revision in # the bundle (an unbundle object). We store this offset in the index # (start). The base of the delta is stored in the base field. # # To differentiate a rev in the bundle from a rev in the revlog, we # check revision against repotiprev. opener = scmutil.readonlyvfs(opener) revlog.revlog.__init__(self, opener, indexfile) self.bundle = bundle n = len(self) self.repotiprev = n - 1 chain = None self.bundlerevs = set() # used by 'bundle()' revset expression while True: chunkdata = bundle.deltachunk(chain) if not chunkdata: break node = chunkdata['node'] p1 = chunkdata['p1'] p2 = chunkdata['p2'] cs = chunkdata['cs'] deltabase = chunkdata['deltabase'] delta = chunkdata['delta'] size = len(delta) start = bundle.tell() - size link = linkmapper(cs) if node in self.nodemap: # this can happen if two branches make the same change chain = node self.bundlerevs.add(self.nodemap[node]) continue for p in (p1, p2): if p not in self.nodemap: raise error.LookupError(p, self.indexfile, _("unknown parent")) if deltabase not in self.nodemap: raise LookupError(deltabase, self.indexfile, _('unknown delta base')) baserev = self.rev(deltabase) # start, size, full unc. size, base (unused), link, p1, p2, node e = (revlog.offset_type(start, 0), size, -1, baserev, link, self.rev(p1), self.rev(p2), node) self.index.insert(-1, e) self.nodemap[node] = n self.bundlerevs.add(n) chain = node n += 1 def _chunk(self, rev): # Warning: in case of bundle, the diff is against what we stored as # delta base, not against rev - 1 # XXX: could use some caching if rev <= self.repotiprev: return revlog.revlog._chunk(self, rev) self.bundle.seek(self.start(rev)) return self.bundle.read(self.length(rev)) def revdiff(self, rev1, rev2): """return or calculate a delta between two revisions""" if rev1 > self.repotiprev and rev2 > self.repotiprev: # hot path for bundle revb = self.index[rev2][3] if revb == rev1: return self._chunk(rev2) elif rev1 <= self.repotiprev and rev2 <= self.repotiprev: return revlog.revlog.revdiff(self, rev1, rev2) return mdiff.textdiff(self.revision(self.node(rev1)), self.revision(self.node(rev2))) def revision(self, nodeorrev): """return an uncompressed revision of a given node or revision number. """ if isinstance(nodeorrev, int): rev = nodeorrev node = self.node(rev) else: node = nodeorrev rev = self.rev(node) if node == nullid: return "" text = None chain = [] iterrev = rev # reconstruct the revision if it is from a changegroup while iterrev > self.repotiprev: if self._cache and self._cache[1] == iterrev: text = self._cache[2] break chain.append(iterrev) iterrev = self.index[iterrev][3] if text is None: text = self.baserevision(iterrev) while chain: delta = self._chunk(chain.pop()) text = mdiff.patches(text, [delta]) self._checkhash(text, node, rev) self._cache = (node, rev, text) return text def baserevision(self, nodeorrev): # Revlog subclasses may override 'revision' method to modify format of # content retrieved from revlog. To use bundlerevlog with such class one # needs to override 'baserevision' and make more specific call here. return revlog.revlog.revision(self, nodeorrev) def addrevision(self, text, transaction, link, p1=None, p2=None, d=None): raise NotImplementedError def addgroup(self, revs, linkmapper, transaction): raise NotImplementedError def strip(self, rev, minlink): raise NotImplementedError def checksize(self): raise NotImplementedError class bundlechangelog(bundlerevlog, changelog.changelog): def __init__(self, opener, bundle): changelog.changelog.__init__(self, opener) linkmapper = lambda x: x bundlerevlog.__init__(self, opener, self.indexfile, bundle, linkmapper) def baserevision(self, nodeorrev): # Although changelog doesn't override 'revision' method, some extensions # may replace this class with another that does. Same story with # manifest and filelog classes. # This bypasses filtering on changelog.node() and rev() because we need # revision text of the bundle base even if it is hidden. oldfilter = self.filteredrevs try: self.filteredrevs = () return changelog.changelog.revision(self, nodeorrev) finally: self.filteredrevs = oldfilter class bundlemanifest(bundlerevlog, manifest.manifest): def __init__(self, opener, bundle, linkmapper): manifest.manifest.__init__(self, opener) bundlerevlog.__init__(self, opener, self.indexfile, bundle, linkmapper) def baserevision(self, nodeorrev): node = nodeorrev if isinstance(node, int): node = self.node(node) if node in self._mancache: result = self._mancache[node][0].text() else: result = manifest.manifest.revision(self, nodeorrev) return result class bundlefilelog(bundlerevlog, filelog.filelog): def __init__(self, opener, path, bundle, linkmapper): filelog.filelog.__init__(self, opener, path) bundlerevlog.__init__(self, opener, self.indexfile, bundle, linkmapper) def baserevision(self, nodeorrev): return filelog.filelog.revision(self, nodeorrev) class bundlepeer(localrepo.localpeer): def canpush(self): return False class bundlephasecache(phases.phasecache): def __init__(self, *args, **kwargs): super(bundlephasecache, self).__init__(*args, **kwargs) if util.safehasattr(self, 'opener'): self.opener = scmutil.readonlyvfs(self.opener) def write(self): raise NotImplementedError def _write(self, fp): raise NotImplementedError def _updateroots(self, phase, newroots, tr): self.phaseroots[phase] = newroots self.invalidate() self.dirty = True class bundlerepository(localrepo.localrepository): def __init__(self, ui, path, bundlename): def _writetempbundle(read, suffix, header=''): """Write a temporary file to disk This is closure because we need to make sure this tracked by self.tempfile for cleanup purposes.""" fdtemp, temp = self.vfs.mkstemp(prefix="hg-bundle-", suffix=".hg10un") self.tempfile = temp with os.fdopen(fdtemp, 'wb') as fptemp: fptemp.write(header) while True: chunk = read(2**18) if not chunk: break fptemp.write(chunk) return self.vfs.open(self.tempfile, mode="rb") self._tempparent = None try: localrepo.localrepository.__init__(self, ui, path) except error.RepoError: self._tempparent = tempfile.mkdtemp() localrepo.instance(ui, self._tempparent, 1) localrepo.localrepository.__init__(self, ui, self._tempparent) self.ui.setconfig('phases', 'publish', False, 'bundlerepo') if path: self._url = 'bundle:' + util.expandpath(path) + '+' + bundlename else: self._url = 'bundle:' + bundlename self.tempfile = None f = util.posixfile(bundlename, "rb") self.bundlefile = self.bundle = exchange.readbundle(ui, f, bundlename) if isinstance(self.bundle, bundle2.unbundle20): cgstream = None for part in self.bundle.iterparts(): if part.type == 'changegroup': if cgstream is not None: raise NotImplementedError("can't process " "multiple changegroups") cgstream = part version = part.params.get('version', '01') if version not in changegroup.allsupportedversions(ui): msg = _('Unsupported changegroup version: %s') raise error.Abort(msg % version) if self.bundle.compressed(): cgstream = _writetempbundle(part.read, ".cg%sun" % version) if cgstream is None: raise error.Abort('No changegroups found') cgstream.seek(0) self.bundle = changegroup.getunbundler(version, cgstream, 'UN') elif self.bundle.compressed(): f = _writetempbundle(self.bundle.read, '.hg10un', header='HG10UN') self.bundlefile = self.bundle = exchange.readbundle(ui, f, bundlename, self.vfs) # dict with the mapping 'filename' -> position in the bundle self.bundlefilespos = {} self.firstnewrev = self.changelog.repotiprev + 1 phases.retractboundary(self, None, phases.draft, [ctx.node() for ctx in self[self.firstnewrev:]]) @localrepo.unfilteredpropertycache def _phasecache(self): return bundlephasecache(self, self._phasedefaults) @localrepo.unfilteredpropertycache def changelog(self): # consume the header if it exists self.bundle.changelogheader() c = bundlechangelog(self.svfs, self.bundle) self.manstart = self.bundle.tell() return c @localrepo.unfilteredpropertycache def manifest(self): self.bundle.seek(self.manstart) # consume the header if it exists self.bundle.manifestheader() linkmapper = self.unfiltered().changelog.rev m = bundlemanifest(self.svfs, self.bundle, linkmapper) # XXX: hack to work with changegroup3, but we still don't handle # tree manifests correctly if self.bundle.version == "03": self.bundle.filelogheader() self.filestart = self.bundle.tell() return m @localrepo.unfilteredpropertycache def manstart(self): self.changelog return self.manstart @localrepo.unfilteredpropertycache def filestart(self): self.manifest return self.filestart def url(self): return self._url def file(self, f): if not self.bundlefilespos: self.bundle.seek(self.filestart) while True: chunkdata = self.bundle.filelogheader() if not chunkdata: break fname = chunkdata['filename'] self.bundlefilespos[fname] = self.bundle.tell() while True: c = self.bundle.deltachunk(None) if not c: break if f in self.bundlefilespos: self.bundle.seek(self.bundlefilespos[f]) linkmapper = self.unfiltered().changelog.rev return bundlefilelog(self.svfs, f, self.bundle, linkmapper) else: return filelog.filelog(self.svfs, f) def close(self): """Close assigned bundle file immediately.""" self.bundlefile.close() if self.tempfile is not None: self.vfs.unlink(self.tempfile) if self._tempparent: shutil.rmtree(self._tempparent, True) def cancopy(self): return False def peer(self): return bundlepeer(self) def getcwd(self): return os.getcwd() # always outside the repo def instance(ui, path, create): if create: raise error.Abort(_('cannot create new bundle repository')) # internal config: bundle.mainreporoot parentpath = ui.config("bundle", "mainreporoot", "") if not parentpath: # try to find the correct path to the working directory repo parentpath = cmdutil.findrepo(os.getcwd()) if parentpath is None: parentpath = '' if parentpath: # Try to make the full path relative so we get a nice, short URL. # In particular, we don't want temp dir names in test outputs. cwd = os.getcwd() if parentpath == cwd: parentpath = '' else: cwd = pathutil.normasprefix(cwd) if parentpath.startswith(cwd): parentpath = parentpath[len(cwd):] u = util.url(path) path = u.localpath() if u.scheme == 'bundle': s = path.split("+", 1) if len(s) == 1: repopath, bundlename = parentpath, s[0] else: repopath, bundlename = s else: repopath, bundlename = parentpath, path return bundlerepository(ui, repopath, bundlename) class bundletransactionmanager(object): def transaction(self): return None def close(self): raise NotImplementedError def release(self): raise NotImplementedError def getremotechanges(ui, repo, other, onlyheads=None, bundlename=None, force=False): '''obtains a bundle of changes incoming from other "onlyheads" restricts the returned changes to those reachable from the specified heads. "bundlename", if given, stores the bundle to this file path permanently; otherwise it's stored to a temp file and gets deleted again when you call the returned "cleanupfn". "force" indicates whether to proceed on unrelated repos. Returns a tuple (local, csets, cleanupfn): "local" is a local repo from which to obtain the actual incoming changesets; it is a bundlerepo for the obtained bundle when the original "other" is remote. "csets" lists the incoming changeset node ids. "cleanupfn" must be called without arguments when you're done processing the changes; it closes both the original "other" and the one returned here. ''' tmp = discovery.findcommonincoming(repo, other, heads=onlyheads, force=force) common, incoming, rheads = tmp if not incoming: try: if bundlename: os.unlink(bundlename) except OSError: pass return repo, [], other.close commonset = set(common) rheads = [x for x in rheads if x not in commonset] bundle = None bundlerepo = None localrepo = other.local() if bundlename or not localrepo: # create a bundle (uncompressed if other repo is not local) canbundle2 = (ui.configbool('experimental', 'bundle2-exp', True) and other.capable('getbundle') and other.capable('bundle2')) if canbundle2: kwargs = {} kwargs['common'] = common kwargs['heads'] = rheads kwargs['bundlecaps'] = exchange.caps20to10(repo) kwargs['cg'] = True b2 = other.getbundle('incoming', **kwargs) fname = bundle = changegroup.writechunks(ui, b2._forwardchunks(), bundlename) else: if other.capable('getbundle'): cg = other.getbundle('incoming', common=common, heads=rheads) elif onlyheads is None and not other.capable('changegroupsubset'): # compat with older servers when pulling all remote heads cg = other.changegroup(incoming, "incoming") rheads = None else: cg = other.changegroupsubset(incoming, rheads, 'incoming') if localrepo: bundletype = "HG10BZ" else: bundletype = "HG10UN" fname = bundle = changegroup.writebundle(ui, cg, bundlename, bundletype) # keep written bundle? if bundlename: bundle = None if not localrepo: # use the created uncompressed bundlerepo localrepo = bundlerepo = bundlerepository(repo.baseui, repo.root, fname) # this repo contains local and other now, so filter out local again common = repo.heads() if localrepo: # Part of common may be remotely filtered # So use an unfiltered version # The discovery process probably need cleanup to avoid that localrepo = localrepo.unfiltered() csets = localrepo.changelog.findmissing(common, rheads) if bundlerepo: reponodes = [ctx.node() for ctx in bundlerepo[bundlerepo.firstnewrev:]] remotephases = other.listkeys('phases') pullop = exchange.pulloperation(bundlerepo, other, heads=reponodes) pullop.trmanager = bundletransactionmanager() exchange._pullapplyphases(pullop, remotephases) def cleanup(): if bundlerepo: bundlerepo.close() if bundle: os.unlink(bundle) other.close() return (localrepo, csets, cleanup) mercurial-3.7.3/mercurial/verify.py0000644000175000017500000003357712676531525017007 0ustar mpmmpm00000000000000# verify.py - repository integrity checking for Mercurial # # Copyright 2006, 2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import os from .i18n import _ from .node import ( nullid, short, ) from . import ( error, revlog, util, ) def verify(repo): with repo.lock(): return verifier(repo).verify() def _normpath(f): # under hg < 2.4, convert didn't sanitize paths properly, so a # converted repo may contain repeated slashes while '//' in f: f = f.replace('//', '/') return f def _validpath(repo, path): """Returns False if a path should NOT be treated as part of a repo. For all in-core cases, this returns True, as we have no way for a path to be mentioned in the history but not actually be relevant. For narrow clones, this is important because many filelogs will be missing, and changelog entries may mention modified files that are outside the narrow scope. """ return True class verifier(object): def __init__(self, repo): self.repo = repo.unfiltered() self.ui = repo.ui self.badrevs = set() self.errors = 0 self.warnings = 0 self.havecl = len(repo.changelog) > 0 self.havemf = len(repo.manifest) > 0 self.revlogv1 = repo.changelog.version != revlog.REVLOGV0 self.lrugetctx = util.lrucachefunc(repo.changectx) self.refersmf = False self.fncachewarned = False def warn(self, msg): self.ui.warn(msg + "\n") self.warnings += 1 def err(self, linkrev, msg, filename=None): if linkrev is not None: self.badrevs.add(linkrev) else: linkrev = '?' msg = "%s: %s" % (linkrev, msg) if filename: msg = "%s@%s" % (filename, msg) self.ui.warn(" " + msg + "\n") self.errors += 1 def exc(self, linkrev, msg, inst, filename=None): if not str(inst): inst = repr(inst) self.err(linkrev, "%s: %s" % (msg, inst), filename) def checklog(self, obj, name, linkrev): if not len(obj) and (self.havecl or self.havemf): self.err(linkrev, _("empty or missing %s") % name) return d = obj.checksize() if d[0]: self.err(None, _("data length off by %d bytes") % d[0], name) if d[1]: self.err(None, _("index contains %d extra bytes") % d[1], name) if obj.version != revlog.REVLOGV0: if not self.revlogv1: self.warn(_("warning: `%s' uses revlog format 1") % name) elif self.revlogv1: self.warn(_("warning: `%s' uses revlog format 0") % name) def checkentry(self, obj, i, node, seen, linkrevs, f): lr = obj.linkrev(obj.rev(node)) if lr < 0 or (self.havecl and lr not in linkrevs): if lr < 0 or lr >= len(self.repo.changelog): msg = _("rev %d points to nonexistent changeset %d") else: msg = _("rev %d points to unexpected changeset %d") self.err(None, msg % (i, lr), f) if linkrevs: if f and len(linkrevs) > 1: try: # attempt to filter down to real linkrevs linkrevs = [l for l in linkrevs if self.lrugetctx(l)[f].filenode() == node] except Exception: pass self.warn(_(" (expected %s)") % " ".join(map(str, linkrevs))) lr = None # can't be trusted try: p1, p2 = obj.parents(node) if p1 not in seen and p1 != nullid: self.err(lr, _("unknown parent 1 %s of %s") % (short(p1), short(node)), f) if p2 not in seen and p2 != nullid: self.err(lr, _("unknown parent 2 %s of %s") % (short(p2), short(node)), f) except Exception as inst: self.exc(lr, _("checking parents of %s") % short(node), inst, f) if node in seen: self.err(lr, _("duplicate revision %d (%d)") % (i, seen[node]), f) seen[node] = i return lr def verify(self): repo = self.repo ui = repo.ui if not repo.url().startswith('file:'): raise error.Abort(_("cannot verify bundle or remote repos")) if os.path.exists(repo.sjoin("journal")): ui.warn(_("abandoned transaction found - run hg recover\n")) if ui.verbose or not self.revlogv1: ui.status(_("repository uses revlog format %d\n") % (self.revlogv1 and 1 or 0)) mflinkrevs, filelinkrevs = self._verifychangelog() filenodes = self._verifymanifest(mflinkrevs) self._crosscheckfiles(mflinkrevs, filelinkrevs, filenodes) del mflinkrevs totalfiles, filerevisions = self._verifyfiles(filenodes, filelinkrevs) ui.status(_("%d files, %d changesets, %d total revisions\n") % (totalfiles, len(repo.changelog), filerevisions)) if self.warnings: ui.warn(_("%d warnings encountered!\n") % self.warnings) if self.fncachewarned: ui.warn(_('hint: run "hg debugrebuildfncache" to recover from ' 'corrupt fncache\n')) if self.errors: ui.warn(_("%d integrity errors encountered!\n") % self.errors) if self.badrevs: ui.warn(_("(first damaged changeset appears to be %d)\n") % min(self.badrevs)) return 1 def _verifychangelog(self): ui = self.ui repo = self.repo cl = repo.changelog ui.status(_("checking changesets\n")) mflinkrevs = {} filelinkrevs = {} seen = {} self.checklog(cl, "changelog", 0) total = len(repo) for i in repo: ui.progress(_('checking'), i, total=total, unit=_('changesets')) n = cl.node(i) self.checkentry(cl, i, n, seen, [i], "changelog") try: changes = cl.read(n) if changes[0] != nullid: mflinkrevs.setdefault(changes[0], []).append(i) self.refersmf = True for f in changes[3]: if _validpath(repo, f): filelinkrevs.setdefault(_normpath(f), []).append(i) except Exception as inst: self.refersmf = True self.exc(i, _("unpacking changeset %s") % short(n), inst) ui.progress(_('checking'), None) return mflinkrevs, filelinkrevs def _verifymanifest(self, mflinkrevs): repo = self.repo ui = self.ui mf = self.repo.manifest ui.status(_("checking manifests\n")) filenodes = {} seen = {} if self.refersmf: # Do not check manifest if there are only changelog entries with # null manifests. self.checklog(mf, "manifest", 0) total = len(mf) for i in mf: ui.progress(_('checking'), i, total=total, unit=_('manifests')) n = mf.node(i) lr = self.checkentry(mf, i, n, seen, mflinkrevs.get(n, []), "manifest") if n in mflinkrevs: del mflinkrevs[n] else: self.err(lr, _("%s not in changesets") % short(n), "manifest") try: for f, fn in mf.readdelta(n).iteritems(): if not f: self.err(lr, _("file without name in manifest")) elif f != "/dev/null": # ignore this in very old repos if _validpath(repo, f): filenodes.setdefault( _normpath(f), {}).setdefault(fn, lr) except Exception as inst: self.exc(lr, _("reading manifest delta %s") % short(n), inst) ui.progress(_('checking'), None) return filenodes def _crosscheckfiles(self, mflinkrevs, filelinkrevs, filenodes): repo = self.repo ui = self.ui ui.status(_("crosschecking files in changesets and manifests\n")) total = len(mflinkrevs) + len(filelinkrevs) + len(filenodes) count = 0 if self.havemf: for c, m in sorted([(c, m) for m in mflinkrevs for c in mflinkrevs[m]]): count += 1 if m == nullid: continue ui.progress(_('crosschecking'), count, total=total) self.err(c, _("changeset refers to unknown manifest %s") % short(m)) for f in sorted(filelinkrevs): count += 1 ui.progress(_('crosschecking'), count, total=total) if f not in filenodes: lr = filelinkrevs[f][0] self.err(lr, _("in changeset but not in manifest"), f) if self.havecl: for f in sorted(filenodes): count += 1 ui.progress(_('crosschecking'), count, total=total) if f not in filelinkrevs: try: fl = repo.file(f) lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]]) except Exception: lr = None self.err(lr, _("in manifest but not in changeset"), f) ui.progress(_('crosschecking'), None) def _verifyfiles(self, filenodes, filelinkrevs): repo = self.repo ui = self.ui lrugetctx = self.lrugetctx revlogv1 = self.revlogv1 havemf = self.havemf ui.status(_("checking files\n")) storefiles = set() for f, f2, size in repo.store.datafiles(): if not f: self.err(None, _("cannot decode filename '%s'") % f2) elif size > 0 or not revlogv1: storefiles.add(_normpath(f)) files = sorted(set(filenodes) | set(filelinkrevs)) total = len(files) revisions = 0 for i, f in enumerate(files): ui.progress(_('checking'), i, item=f, total=total) try: linkrevs = filelinkrevs[f] except KeyError: # in manifest but not in changelog linkrevs = [] if linkrevs: lr = linkrevs[0] else: lr = None try: fl = repo.file(f) except error.RevlogError as e: self.err(lr, _("broken revlog! (%s)") % e, f) continue for ff in fl.files(): try: storefiles.remove(ff) except KeyError: self.warn(_(" warning: revlog '%s' not in fncache!") % ff) self.fncachewarned = True self.checklog(fl, f, lr) seen = {} rp = None for i in fl: revisions += 1 n = fl.node(i) lr = self.checkentry(fl, i, n, seen, linkrevs, f) if f in filenodes: if havemf and n not in filenodes[f]: self.err(lr, _("%s not in manifests") % (short(n)), f) else: del filenodes[f][n] # verify contents try: l = len(fl.read(n)) rp = fl.renamed(n) if l != fl.size(i): if len(fl.revision(n)) != fl.size(i): self.err(lr, _("unpacked size is %s, %s expected") % (l, fl.size(i)), f) except error.CensoredNodeError: # experimental config: censor.policy if ui.config("censor", "policy", "abort") == "abort": self.err(lr, _("censored file data"), f) except Exception as inst: self.exc(lr, _("unpacking %s") % short(n), inst, f) # check renames try: if rp: if lr is not None and ui.verbose: ctx = lrugetctx(lr) found = False for pctx in ctx.parents(): if rp[0] in pctx: found = True break if not found: self.warn(_("warning: copy source of '%s' not" " in parents of %s") % (f, ctx)) fl2 = repo.file(rp[0]) if not len(fl2): self.err(lr, _("empty or missing copy source " "revlog %s:%s") % (rp[0], short(rp[1])), f) elif rp[1] == nullid: ui.note(_("warning: %s@%s: copy source" " revision is nullid %s:%s\n") % (f, lr, rp[0], short(rp[1]))) else: fl2.rev(rp[1]) except Exception as inst: self.exc(lr, _("checking rename of %s") % short(n), inst, f) # cross-check if f in filenodes: fns = [(lr, n) for n, lr in filenodes[f].iteritems()] for lr, node in sorted(fns): self.err(lr, _("%s in manifests not found") % short(node), f) ui.progress(_('checking'), None) for f in storefiles: self.warn(_("warning: orphan revlog '%s'") % f) return len(files), revisions mercurial-3.7.3/mercurial/filelog.py0000644000175000017500000001033212676531525017104 0ustar mpmmpm00000000000000# filelog.py - file history class for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import re import struct from . import ( error, mdiff, revlog, ) _mdre = re.compile('\1\n') def parsemeta(text): """return (metadatadict, keylist, metadatasize)""" # text can be buffer, so we can't use .startswith or .index if text[:2] != '\1\n': return None, None s = _mdre.search(text, 2).start() mtext = text[2:s] meta = {} for l in mtext.splitlines(): k, v = l.split(": ", 1) meta[k] = v return meta, (s + 2) def packmeta(meta, text): keys = sorted(meta.iterkeys()) metatext = "".join("%s: %s\n" % (k, meta[k]) for k in keys) return "\1\n%s\1\n%s" % (metatext, text) def _censoredtext(text): m, offs = parsemeta(text) return m and "censored" in m class filelog(revlog.revlog): def __init__(self, opener, path): super(filelog, self).__init__(opener, "/".join(("data", path + ".i"))) def read(self, node): t = self.revision(node) if not t.startswith('\1\n'): return t s = t.index('\1\n', 2) return t[s + 2:] def add(self, text, meta, transaction, link, p1=None, p2=None): if meta or text.startswith('\1\n'): text = packmeta(meta, text) return self.addrevision(text, transaction, link, p1, p2) def renamed(self, node): if self.parents(node)[0] != revlog.nullid: return False t = self.revision(node) m = parsemeta(t)[0] if m and "copy" in m: return (m["copy"], revlog.bin(m["copyrev"])) return False def size(self, rev): """return the size of a given revision""" # for revisions with renames, we have to go the slow way node = self.node(rev) if self.renamed(node): return len(self.read(node)) if self.iscensored(rev): return 0 # XXX if self.read(node).startswith("\1\n"), this returns (size+4) return super(filelog, self).size(rev) def cmp(self, node, text): """compare text with a given file revision returns True if text is different than what is stored. """ t = text if text.startswith('\1\n'): t = '\1\n\1\n' + text samehashes = not super(filelog, self).cmp(node, t) if samehashes: return False # censored files compare against the empty file if self.iscensored(self.rev(node)): return text != '' # renaming a file produces a different hash, even if the data # remains unchanged. Check if it's the case (slow): if self.renamed(node): t2 = self.read(node) return t2 != text return True def checkhash(self, text, p1, p2, node, rev=None): try: super(filelog, self).checkhash(text, p1, p2, node, rev=rev) except error.RevlogError: if _censoredtext(text): raise error.CensoredNodeError(self.indexfile, node, text) raise def iscensored(self, rev): """Check if a file revision is censored.""" return self.flags(rev) & revlog.REVIDX_ISCENSORED def _peek_iscensored(self, baserev, delta, flush): """Quickly check if a delta produces a censored revision.""" # Fragile heuristic: unless new file meta keys are added alphabetically # preceding "censored", all censored revisions are prefixed by # "\1\ncensored:". A delta producing such a censored revision must be a # full-replacement delta, so we inspect the first and only patch in the # delta for this prefix. hlen = struct.calcsize(">lll") if len(delta) <= hlen: return False oldlen = self.rawsize(baserev) newlen = len(delta) - hlen if delta[:hlen] != mdiff.replacediffheader(oldlen, newlen): return False add = "\1\ncensored:" addlen = len(add) return newlen >= addlen and delta[hlen:hlen + addlen] == add mercurial-3.7.3/mercurial/lock.py0000644000175000017500000001754012676531525016423 0ustar mpmmpm00000000000000# lock.py - simple advisory locking scheme for mercurial # # Copyright 2005, 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import contextlib import errno import os import socket import time import warnings from . import ( error, util, ) class lock(object): '''An advisory lock held by one process to control access to a set of files. Non-cooperating processes or incorrectly written scripts can ignore Mercurial's locking scheme and stomp all over the repository, so don't do that. Typically used via localrepository.lock() to lock the repository store (.hg/store/) or localrepository.wlock() to lock everything else under .hg/.''' # lock is symlink on platforms that support it, file on others. # symlink is used because create of directory entry and contents # are atomic even over nfs. # old-style lock: symlink to pid # new-style lock: symlink to hostname:pid _host = None def __init__(self, vfs, file, timeout=-1, releasefn=None, acquirefn=None, desc=None, inheritchecker=None, parentlock=None): self.vfs = vfs self.f = file self.held = 0 self.timeout = timeout self.releasefn = releasefn self.acquirefn = acquirefn self.desc = desc self._inheritchecker = inheritchecker self.parentlock = parentlock self._parentheld = False self._inherited = False self.postrelease = [] self.pid = self._getpid() self.delay = self.lock() if self.acquirefn: self.acquirefn() def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_tb): self.release() def __del__(self): if self.held: warnings.warn("use lock.release instead of del lock", category=DeprecationWarning, stacklevel=2) # ensure the lock will be removed # even if recursive locking did occur self.held = 1 self.release() def _getpid(self): # wrapper around os.getpid() to make testing easier return os.getpid() def lock(self): timeout = self.timeout while True: try: self._trylock() return self.timeout - timeout except error.LockHeld as inst: if timeout != 0: time.sleep(1) if timeout > 0: timeout -= 1 continue raise error.LockHeld(errno.ETIMEDOUT, inst.filename, self.desc, inst.locker) def _trylock(self): if self.held: self.held += 1 return if lock._host is None: lock._host = socket.gethostname() lockname = '%s:%s' % (lock._host, self.pid) retry = 5 while not self.held and retry: retry -= 1 try: self.vfs.makelock(lockname, self.f) self.held = 1 except (OSError, IOError) as why: if why.errno == errno.EEXIST: locker = self._readlock() # special case where a parent process holds the lock -- this # is different from the pid being different because we do # want the unlock and postrelease functions to be called, # but the lockfile to not be removed. if locker == self.parentlock: self._parentheld = True self.held = 1 return locker = self._testlock(locker) if locker is not None: raise error.LockHeld(errno.EAGAIN, self.vfs.join(self.f), self.desc, locker) else: raise error.LockUnavailable(why.errno, why.strerror, why.filename, self.desc) def _readlock(self): """read lock and return its value Returns None if no lock exists, pid for old-style locks, and host:pid for new-style locks. """ try: return self.vfs.readlock(self.f) except (OSError, IOError) as why: if why.errno == errno.ENOENT: return None raise def _testlock(self, locker): if locker is None: return None try: host, pid = locker.split(":", 1) except ValueError: return locker if host != lock._host: return locker try: pid = int(pid) except ValueError: return locker if util.testpid(pid): return locker # if locker dead, break lock. must do this with another lock # held, or can race and break valid lock. try: l = lock(self.vfs, self.f + '.break', timeout=0) self.vfs.unlink(self.f) l.release() except error.LockError: return locker def testlock(self): """return id of locker if lock is valid, else None. If old-style lock, we cannot tell what machine locker is on. with new-style lock, if locker is on this machine, we can see if locker is alive. If locker is on this machine but not alive, we can safely break lock. The lock file is only deleted when None is returned. """ locker = self._readlock() return self._testlock(locker) @contextlib.contextmanager def inherit(self): """context for the lock to be inherited by a Mercurial subprocess. Yields a string that will be recognized by the lock in the subprocess. Communicating this string to the subprocess needs to be done separately -- typically by an environment variable. """ if not self.held: raise error.LockInheritanceContractViolation( 'inherit can only be called while lock is held') if self._inherited: raise error.LockInheritanceContractViolation( 'inherit cannot be called while lock is already inherited') if self._inheritchecker is not None: self._inheritchecker() if self.releasefn: self.releasefn() if self._parentheld: lockname = self.parentlock else: lockname = '%s:%s' % (lock._host, self.pid) self._inherited = True try: yield lockname finally: if self.acquirefn: self.acquirefn() self._inherited = False def release(self): """release the lock and execute callback function if any If the lock has been acquired multiple times, the actual release is delayed to the last release call.""" if self.held > 1: self.held -= 1 elif self.held == 1: self.held = 0 if self._getpid() != self.pid: # we forked, and are not the parent return try: if self.releasefn: self.releasefn() finally: if not self._parentheld: try: self.vfs.unlink(self.f) except OSError: pass # The postrelease functions typically assume the lock is not held # at all. if not self._parentheld: for callback in self.postrelease: callback() def release(*locks): for lock in locks: if lock is not None: lock.release() mercurial-3.7.3/mercurial/bdiff.c0000644000175000017500000002344412676531524016336 0ustar mpmmpm00000000000000/* bdiff.c - efficient binary diff extension for Mercurial Copyright 2005, 2006 Matt Mackall This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. Based roughly on Python difflib */ #define PY_SSIZE_T_CLEAN #include #include #include #include #include "util.h" struct line { int hash, n, e; Py_ssize_t len; const char *l; }; struct pos { int pos, len; }; struct hunk; struct hunk { int a1, a2, b1, b2; struct hunk *next; }; static int splitlines(const char *a, Py_ssize_t len, struct line **lr) { unsigned hash; int i; const char *p, *b = a; const char * const plast = a + len - 1; struct line *l; /* count the lines */ i = 1; /* extra line for sentinel */ for (p = a; p < a + len; p++) if (*p == '\n' || p == plast) i++; *lr = l = (struct line *)malloc(sizeof(struct line) * i); if (!l) return -1; /* build the line array and calculate hashes */ hash = 0; for (p = a; p < a + len; p++) { /* Leonid Yuriev's hash */ hash = (hash * 1664525) + (unsigned char)*p + 1013904223; if (*p == '\n' || p == plast) { l->hash = hash; hash = 0; l->len = p - b + 1; l->l = b; l->n = INT_MAX; l++; b = p + 1; } } /* set up a sentinel */ l->hash = 0; l->len = 0; l->l = a + len; return i - 1; } static inline int cmp(struct line *a, struct line *b) { return a->hash != b->hash || a->len != b->len || memcmp(a->l, b->l, a->len); } static int equatelines(struct line *a, int an, struct line *b, int bn) { int i, j, buckets = 1, t, scale; struct pos *h = NULL; /* build a hash table of the next highest power of 2 */ while (buckets < bn + 1) buckets *= 2; /* try to allocate a large hash table to avoid collisions */ for (scale = 4; scale; scale /= 2) { h = (struct pos *)malloc(scale * buckets * sizeof(struct pos)); if (h) break; } if (!h) return 0; buckets = buckets * scale - 1; /* clear the hash table */ for (i = 0; i <= buckets; i++) { h[i].pos = INT_MAX; h[i].len = 0; } /* add lines to the hash table chains */ for (i = bn - 1; i >= 0; i--) { /* find the equivalence class */ for (j = b[i].hash & buckets; h[j].pos != INT_MAX; j = (j + 1) & buckets) if (!cmp(b + i, b + h[j].pos)) break; /* add to the head of the equivalence class */ b[i].n = h[j].pos; b[i].e = j; h[j].pos = i; h[j].len++; /* keep track of popularity */ } /* compute popularity threshold */ t = (bn >= 31000) ? bn / 1000 : 1000000 / (bn + 1); /* match items in a to their equivalence class in b */ for (i = 0; i < an; i++) { /* find the equivalence class */ for (j = a[i].hash & buckets; h[j].pos != INT_MAX; j = (j + 1) & buckets) if (!cmp(a + i, b + h[j].pos)) break; a[i].e = j; /* use equivalence class for quick compare */ if (h[j].len <= t) a[i].n = h[j].pos; /* point to head of match list */ else a[i].n = INT_MAX; /* too popular */ } /* discard hash tables */ free(h); return 1; } static int longest_match(struct line *a, struct line *b, struct pos *pos, int a1, int a2, int b1, int b2, int *omi, int *omj) { int mi = a1, mj = b1, mk = 0, mb = 0, i, j, k; for (i = a1; i < a2; i++) { /* skip things before the current block */ for (j = a[i].n; j < b1; j = b[j].n) ; /* loop through all lines match a[i] in b */ for (; j < b2; j = b[j].n) { /* does this extend an earlier match? */ if (i > a1 && j > b1 && pos[j - 1].pos == i - 1) k = pos[j - 1].len + 1; else k = 1; pos[j].pos = i; pos[j].len = k; /* best match so far? */ if (k > mk) { mi = i; mj = j; mk = k; } } } if (mk) { mi = mi - mk + 1; mj = mj - mk + 1; } /* expand match to include neighboring popular lines */ while (mi - mb > a1 && mj - mb > b1 && a[mi - mb - 1].e == b[mj - mb - 1].e) mb++; while (mi + mk < a2 && mj + mk < b2 && a[mi + mk].e == b[mj + mk].e) mk++; *omi = mi - mb; *omj = mj - mb; return mk + mb; } static struct hunk *recurse(struct line *a, struct line *b, struct pos *pos, int a1, int a2, int b1, int b2, struct hunk *l) { int i, j, k; while (1) { /* find the longest match in this chunk */ k = longest_match(a, b, pos, a1, a2, b1, b2, &i, &j); if (!k) return l; /* and recurse on the remaining chunks on either side */ l = recurse(a, b, pos, a1, i, b1, j, l); if (!l) return NULL; l->next = (struct hunk *)malloc(sizeof(struct hunk)); if (!l->next) return NULL; l = l->next; l->a1 = i; l->a2 = i + k; l->b1 = j; l->b2 = j + k; l->next = NULL; /* tail-recursion didn't happen, so do equivalent iteration */ a1 = i + k; b1 = j + k; } } static int diff(struct line *a, int an, struct line *b, int bn, struct hunk *base) { struct hunk *curr; struct pos *pos; int t, count = 0; /* allocate and fill arrays */ t = equatelines(a, an, b, bn); pos = (struct pos *)calloc(bn ? bn : 1, sizeof(struct pos)); if (pos && t) { /* generate the matching block list */ curr = recurse(a, b, pos, 0, an, 0, bn, base); if (!curr) return -1; /* sentinel end hunk */ curr->next = (struct hunk *)malloc(sizeof(struct hunk)); if (!curr->next) return -1; curr = curr->next; curr->a1 = curr->a2 = an; curr->b1 = curr->b2 = bn; curr->next = NULL; } free(pos); /* normalize the hunk list, try to push each hunk towards the end */ for (curr = base->next; curr; curr = curr->next) { struct hunk *next = curr->next; int shift = 0; if (!next) break; if (curr->a2 == next->a1) while (curr->a2 + shift < an && curr->b2 + shift < bn && !cmp(a + curr->a2 + shift, b + curr->b2 + shift)) shift++; else if (curr->b2 == next->b1) while (curr->b2 + shift < bn && curr->a2 + shift < an && !cmp(b + curr->b2 + shift, a + curr->a2 + shift)) shift++; if (!shift) continue; curr->b2 += shift; next->b1 += shift; curr->a2 += shift; next->a1 += shift; } for (curr = base->next; curr; curr = curr->next) count++; return count; } static void freehunks(struct hunk *l) { struct hunk *n; for (; l; l = n) { n = l->next; free(l); } } static PyObject *blocks(PyObject *self, PyObject *args) { PyObject *sa, *sb, *rl = NULL, *m; struct line *a, *b; struct hunk l, *h; int an, bn, count, pos = 0; l.next = NULL; if (!PyArg_ParseTuple(args, "SS:bdiff", &sa, &sb)) return NULL; an = splitlines(PyBytes_AsString(sa), PyBytes_Size(sa), &a); bn = splitlines(PyBytes_AsString(sb), PyBytes_Size(sb), &b); if (!a || !b) goto nomem; count = diff(a, an, b, bn, &l); if (count < 0) goto nomem; rl = PyList_New(count); if (!rl) goto nomem; for (h = l.next; h; h = h->next) { m = Py_BuildValue("iiii", h->a1, h->a2, h->b1, h->b2); PyList_SetItem(rl, pos, m); pos++; } nomem: free(a); free(b); freehunks(l.next); return rl ? rl : PyErr_NoMemory(); } static PyObject *bdiff(PyObject *self, PyObject *args) { char *sa, *sb, *rb; PyObject *result = NULL; struct line *al, *bl; struct hunk l, *h; int an, bn, count; Py_ssize_t len = 0, la, lb; PyThreadState *_save; l.next = NULL; if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb)) return NULL; if (la > UINT_MAX || lb > UINT_MAX) { PyErr_SetString(PyExc_ValueError, "bdiff inputs too large"); return NULL; } _save = PyEval_SaveThread(); an = splitlines(sa, la, &al); bn = splitlines(sb, lb, &bl); if (!al || !bl) goto nomem; count = diff(al, an, bl, bn, &l); if (count < 0) goto nomem; /* calculate length of output */ la = lb = 0; for (h = l.next; h; h = h->next) { if (h->a1 != la || h->b1 != lb) len += 12 + bl[h->b1].l - bl[lb].l; la = h->a2; lb = h->b2; } PyEval_RestoreThread(_save); _save = NULL; result = PyBytes_FromStringAndSize(NULL, len); if (!result) goto nomem; /* build binary patch */ rb = PyBytes_AsString(result); la = lb = 0; for (h = l.next; h; h = h->next) { if (h->a1 != la || h->b1 != lb) { len = bl[h->b1].l - bl[lb].l; putbe32((uint32_t)(al[la].l - al->l), rb); putbe32((uint32_t)(al[h->a1].l - al->l), rb + 4); putbe32((uint32_t)len, rb + 8); memcpy(rb + 12, bl[lb].l, len); rb += 12 + len; } la = h->a2; lb = h->b2; } nomem: if (_save) PyEval_RestoreThread(_save); free(al); free(bl); freehunks(l.next); return result ? result : PyErr_NoMemory(); } /* * If allws != 0, remove all whitespace (' ', \t and \r). Otherwise, * reduce whitespace sequences to a single space and trim remaining whitespace * from end of lines. */ static PyObject *fixws(PyObject *self, PyObject *args) { PyObject *s, *result = NULL; char allws, c; const char *r; Py_ssize_t i, rlen, wlen = 0; char *w; if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws)) return NULL; r = PyBytes_AsString(s); rlen = PyBytes_Size(s); w = (char *)malloc(rlen ? rlen : 1); if (!w) goto nomem; for (i = 0; i != rlen; i++) { c = r[i]; if (c == ' ' || c == '\t' || c == '\r') { if (!allws && (wlen == 0 || w[wlen - 1] != ' ')) w[wlen++] = ' '; } else if (c == '\n' && !allws && wlen > 0 && w[wlen - 1] == ' ') { w[wlen - 1] = '\n'; } else { w[wlen++] = c; } } result = PyBytes_FromStringAndSize(w, wlen); nomem: free(w); return result ? result : PyErr_NoMemory(); } static char mdiff_doc[] = "Efficient binary diff."; static PyMethodDef methods[] = { {"bdiff", bdiff, METH_VARARGS, "calculate a binary diff\n"}, {"blocks", blocks, METH_VARARGS, "find a list of matching lines\n"}, {"fixws", fixws, METH_VARARGS, "normalize diff whitespaces\n"}, {NULL, NULL} }; #ifdef IS_PY3K static struct PyModuleDef bdiff_module = { PyModuleDef_HEAD_INIT, "bdiff", mdiff_doc, -1, methods }; PyMODINIT_FUNC PyInit_bdiff(void) { return PyModule_Create(&bdiff_module); } #else PyMODINIT_FUNC initbdiff(void) { Py_InitModule3("bdiff", methods, mdiff_doc); } #endif mercurial-3.7.3/mercurial/util.py0000644000175000017500000025002212676531525016442 0ustar mpmmpm00000000000000# util.py - Mercurial utility functions and platform specific implementations # # Copyright 2005 K. Thananchayan # Copyright 2005-2007 Matt Mackall # Copyright 2006 Vadim Gelfer # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """Mercurial utility functions and platform specific implementations. This contains helper routines that are independent of the SCM core and hide platform-specific details from the core. """ from __future__ import absolute_import import bz2 import calendar import collections import datetime import errno import gc import hashlib import imp import os import re as remod import shutil import signal import socket import subprocess import sys import tempfile import textwrap import time import traceback import urllib import zlib from . import ( encoding, error, i18n, osutil, parsers, ) if os.name == 'nt': from . import windows as platform else: from . import posix as platform md5 = hashlib.md5 sha1 = hashlib.sha1 sha512 = hashlib.sha512 _ = i18n._ cachestat = platform.cachestat checkexec = platform.checkexec checklink = platform.checklink copymode = platform.copymode executablepath = platform.executablepath expandglobs = platform.expandglobs explainexit = platform.explainexit findexe = platform.findexe gethgcmd = platform.gethgcmd getuser = platform.getuser groupmembers = platform.groupmembers groupname = platform.groupname hidewindow = platform.hidewindow isexec = platform.isexec isowner = platform.isowner localpath = platform.localpath lookupreg = platform.lookupreg makedir = platform.makedir nlinks = platform.nlinks normpath = platform.normpath normcase = platform.normcase normcasespec = platform.normcasespec normcasefallback = platform.normcasefallback openhardlinks = platform.openhardlinks oslink = platform.oslink parsepatchoutput = platform.parsepatchoutput pconvert = platform.pconvert poll = platform.poll popen = platform.popen posixfile = platform.posixfile quotecommand = platform.quotecommand readpipe = platform.readpipe rename = platform.rename removedirs = platform.removedirs samedevice = platform.samedevice samefile = platform.samefile samestat = platform.samestat setbinary = platform.setbinary setflags = platform.setflags setsignalhandler = platform.setsignalhandler shellquote = platform.shellquote spawndetached = platform.spawndetached split = platform.split sshargs = platform.sshargs statfiles = getattr(osutil, 'statfiles', platform.statfiles) statisexec = platform.statisexec statislink = platform.statislink termwidth = platform.termwidth testpid = platform.testpid umask = platform.umask unlink = platform.unlink unlinkpath = platform.unlinkpath username = platform.username # Python compatibility _notset = object() # disable Python's problematic floating point timestamps (issue4836) # (Python hypocritically says you shouldn't change this behavior in # libraries, and sure enough Mercurial is not a library.) os.stat_float_times(False) def safehasattr(thing, attr): return getattr(thing, attr, _notset) is not _notset DIGESTS = { 'md5': md5, 'sha1': sha1, 'sha512': sha512, } # List of digest types from strongest to weakest DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5'] for k in DIGESTS_BY_STRENGTH: assert k in DIGESTS class digester(object): """helper to compute digests. This helper can be used to compute one or more digests given their name. >>> d = digester(['md5', 'sha1']) >>> d.update('foo') >>> [k for k in sorted(d)] ['md5', 'sha1'] >>> d['md5'] 'acbd18db4cc2f85cedef654fccc4a4d8' >>> d['sha1'] '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33' >>> digester.preferred(['md5', 'sha1']) 'sha1' """ def __init__(self, digests, s=''): self._hashes = {} for k in digests: if k not in DIGESTS: raise Abort(_('unknown digest type: %s') % k) self._hashes[k] = DIGESTS[k]() if s: self.update(s) def update(self, data): for h in self._hashes.values(): h.update(data) def __getitem__(self, key): if key not in DIGESTS: raise Abort(_('unknown digest type: %s') % k) return self._hashes[key].hexdigest() def __iter__(self): return iter(self._hashes) @staticmethod def preferred(supported): """returns the strongest digest type in both supported and DIGESTS.""" for k in DIGESTS_BY_STRENGTH: if k in supported: return k return None class digestchecker(object): """file handle wrapper that additionally checks content against a given size and digests. d = digestchecker(fh, size, {'md5': '...'}) When multiple digests are given, all of them are validated. """ def __init__(self, fh, size, digests): self._fh = fh self._size = size self._got = 0 self._digests = dict(digests) self._digester = digester(self._digests.keys()) def read(self, length=-1): content = self._fh.read(length) self._digester.update(content) self._got += len(content) return content def validate(self): if self._size != self._got: raise Abort(_('size mismatch: expected %d, got %d') % (self._size, self._got)) for k, v in self._digests.items(): if v != self._digester[k]: # i18n: first parameter is a digest name raise Abort(_('%s mismatch: expected %s, got %s') % (k, v, self._digester[k])) try: buffer = buffer except NameError: if sys.version_info[0] < 3: def buffer(sliceable, offset=0): return sliceable[offset:] else: def buffer(sliceable, offset=0): return memoryview(sliceable)[offset:] closefds = os.name == 'posix' _chunksize = 4096 class bufferedinputpipe(object): """a manually buffered input pipe Python will not let us use buffered IO and lazy reading with 'polling' at the same time. We cannot probe the buffer state and select will not detect that data are ready to read if they are already buffered. This class let us work around that by implementing its own buffering (allowing efficient readline) while offering a way to know if the buffer is empty from the output (allowing collaboration of the buffer with polling). This class lives in the 'util' module because it makes use of the 'os' module from the python stdlib. """ def __init__(self, input): self._input = input self._buffer = [] self._eof = False self._lenbuf = 0 @property def hasbuffer(self): """True is any data is currently buffered This will be used externally a pre-step for polling IO. If there is already data then no polling should be set in place.""" return bool(self._buffer) @property def closed(self): return self._input.closed def fileno(self): return self._input.fileno() def close(self): return self._input.close() def read(self, size): while (not self._eof) and (self._lenbuf < size): self._fillbuffer() return self._frombuffer(size) def readline(self, *args, **kwargs): if 1 < len(self._buffer): # this should not happen because both read and readline end with a # _frombuffer call that collapse it. self._buffer = [''.join(self._buffer)] self._lenbuf = len(self._buffer[0]) lfi = -1 if self._buffer: lfi = self._buffer[-1].find('\n') while (not self._eof) and lfi < 0: self._fillbuffer() if self._buffer: lfi = self._buffer[-1].find('\n') size = lfi + 1 if lfi < 0: # end of file size = self._lenbuf elif 1 < len(self._buffer): # we need to take previous chunks into account size += self._lenbuf - len(self._buffer[-1]) return self._frombuffer(size) def _frombuffer(self, size): """return at most 'size' data from the buffer The data are removed from the buffer.""" if size == 0 or not self._buffer: return '' buf = self._buffer[0] if 1 < len(self._buffer): buf = ''.join(self._buffer) data = buf[:size] buf = buf[len(data):] if buf: self._buffer = [buf] self._lenbuf = len(buf) else: self._buffer = [] self._lenbuf = 0 return data def _fillbuffer(self): """read data to the buffer""" data = os.read(self._input.fileno(), _chunksize) if not data: self._eof = True else: self._lenbuf += len(data) self._buffer.append(data) def popen2(cmd, env=None, newlines=False): # Setting bufsize to -1 lets the system decide the buffer size. # The default for bufsize is 0, meaning unbuffered. This leads to # poor performance on Mac OS X: http://bugs.python.org/issue4194 p = subprocess.Popen(cmd, shell=True, bufsize=-1, close_fds=closefds, stdin=subprocess.PIPE, stdout=subprocess.PIPE, universal_newlines=newlines, env=env) return p.stdin, p.stdout def popen3(cmd, env=None, newlines=False): stdin, stdout, stderr, p = popen4(cmd, env, newlines) return stdin, stdout, stderr def popen4(cmd, env=None, newlines=False, bufsize=-1): p = subprocess.Popen(cmd, shell=True, bufsize=bufsize, close_fds=closefds, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=newlines, env=env) return p.stdin, p.stdout, p.stderr, p def version(): """Return version information if available.""" try: from . import __version__ return __version__.version except ImportError: return 'unknown' def versiontuple(v=None, n=4): """Parses a Mercurial version string into an N-tuple. The version string to be parsed is specified with the ``v`` argument. If it isn't defined, the current Mercurial version string will be parsed. ``n`` can be 2, 3, or 4. Here is how some version strings map to returned values: >>> v = '3.6.1+190-df9b73d2d444' >>> versiontuple(v, 2) (3, 6) >>> versiontuple(v, 3) (3, 6, 1) >>> versiontuple(v, 4) (3, 6, 1, '190-df9b73d2d444') >>> versiontuple('3.6.1+190-df9b73d2d444+20151118') (3, 6, 1, '190-df9b73d2d444+20151118') >>> v = '3.6' >>> versiontuple(v, 2) (3, 6) >>> versiontuple(v, 3) (3, 6, None) >>> versiontuple(v, 4) (3, 6, None, None) """ if not v: v = version() parts = v.split('+', 1) if len(parts) == 1: vparts, extra = parts[0], None else: vparts, extra = parts vints = [] for i in vparts.split('.'): try: vints.append(int(i)) except ValueError: break # (3, 6) -> (3, 6, None) while len(vints) < 3: vints.append(None) if n == 2: return (vints[0], vints[1]) if n == 3: return (vints[0], vints[1], vints[2]) if n == 4: return (vints[0], vints[1], vints[2], extra) # used by parsedate defaultdateformats = ( '%Y-%m-%d %H:%M:%S', '%Y-%m-%d %I:%M:%S%p', '%Y-%m-%d %H:%M', '%Y-%m-%d %I:%M%p', '%Y-%m-%d', '%m-%d', '%m/%d', '%m/%d/%y', '%m/%d/%Y', '%a %b %d %H:%M:%S %Y', '%a %b %d %I:%M:%S%p %Y', '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822" '%b %d %H:%M:%S %Y', '%b %d %I:%M:%S%p %Y', '%b %d %H:%M:%S', '%b %d %I:%M:%S%p', '%b %d %H:%M', '%b %d %I:%M%p', '%b %d %Y', '%b %d', '%H:%M:%S', '%I:%M:%S%p', '%H:%M', '%I:%M%p', ) extendeddateformats = defaultdateformats + ( "%Y", "%Y-%m", "%b", "%b %Y", ) def cachefunc(func): '''cache the result of function calls''' # XXX doesn't handle keywords args if func.func_code.co_argcount == 0: cache = [] def f(): if len(cache) == 0: cache.append(func()) return cache[0] return f cache = {} if func.func_code.co_argcount == 1: # we gain a small amount of time because # we don't need to pack/unpack the list def f(arg): if arg not in cache: cache[arg] = func(arg) return cache[arg] else: def f(*args): if args not in cache: cache[args] = func(*args) return cache[args] return f class sortdict(dict): '''a simple sorted dictionary''' def __init__(self, data=None): self._list = [] if data: self.update(data) def copy(self): return sortdict(self) def __setitem__(self, key, val): if key in self: self._list.remove(key) self._list.append(key) dict.__setitem__(self, key, val) def __iter__(self): return self._list.__iter__() def update(self, src): if isinstance(src, dict): src = src.iteritems() for k, v in src: self[k] = v def clear(self): dict.clear(self) self._list = [] def items(self): return [(k, self[k]) for k in self._list] def __delitem__(self, key): dict.__delitem__(self, key) self._list.remove(key) def pop(self, key, *args, **kwargs): dict.pop(self, key, *args, **kwargs) try: self._list.remove(key) except ValueError: pass def keys(self): return self._list def iterkeys(self): return self._list.__iter__() def iteritems(self): for k in self._list: yield k, self[k] def insert(self, index, key, val): self._list.insert(index, key) dict.__setitem__(self, key, val) class _lrucachenode(object): """A node in a doubly linked list. Holds a reference to nodes on either side as well as a key-value pair for the dictionary entry. """ __slots__ = ('next', 'prev', 'key', 'value') def __init__(self): self.next = None self.prev = None self.key = _notset self.value = None def markempty(self): """Mark the node as emptied.""" self.key = _notset class lrucachedict(object): """Dict that caches most recent accesses and sets. The dict consists of an actual backing dict - indexed by original key - and a doubly linked circular list defining the order of entries in the cache. The head node is the newest entry in the cache. If the cache is full, we recycle head.prev and make it the new head. Cache accesses result in the node being moved to before the existing head and being marked as the new head node. """ def __init__(self, max): self._cache = {} self._head = head = _lrucachenode() head.prev = head head.next = head self._size = 1 self._capacity = max def __len__(self): return len(self._cache) def __contains__(self, k): return k in self._cache def __iter__(self): # We don't have to iterate in cache order, but why not. n = self._head for i in range(len(self._cache)): yield n.key n = n.next def __getitem__(self, k): node = self._cache[k] self._movetohead(node) return node.value def __setitem__(self, k, v): node = self._cache.get(k) # Replace existing value and mark as newest. if node is not None: node.value = v self._movetohead(node) return if self._size < self._capacity: node = self._addcapacity() else: # Grab the last/oldest item. node = self._head.prev # At capacity. Kill the old entry. if node.key is not _notset: del self._cache[node.key] node.key = k node.value = v self._cache[k] = node # And mark it as newest entry. No need to adjust order since it # is already self._head.prev. self._head = node def __delitem__(self, k): node = self._cache.pop(k) node.markempty() # Temporarily mark as newest item before re-adjusting head to make # this node the oldest item. self._movetohead(node) self._head = node.next # Additional dict methods. def get(self, k, default=None): try: return self._cache[k] except KeyError: return default def clear(self): n = self._head while n.key is not _notset: n.markempty() n = n.next self._cache.clear() def copy(self): result = lrucachedict(self._capacity) n = self._head.prev # Iterate in oldest-to-newest order, so the copy has the right ordering for i in range(len(self._cache)): result[n.key] = n.value n = n.prev return result def _movetohead(self, node): """Mark a node as the newest, making it the new head. When a node is accessed, it becomes the freshest entry in the LRU list, which is denoted by self._head. Visually, let's make ``N`` the new head node (* denotes head): previous/oldest <-> head <-> next/next newest ----<->--- A* ---<->----- | | E <-> D <-> N <-> C <-> B To: ----<->--- N* ---<->----- | | E <-> D <-> C <-> B <-> A This requires the following moves: C.next = D (node.prev.next = node.next) D.prev = C (node.next.prev = node.prev) E.next = N (head.prev.next = node) N.prev = E (node.prev = head.prev) N.next = A (node.next = head) A.prev = N (head.prev = node) """ head = self._head # C.next = D node.prev.next = node.next # D.prev = C node.next.prev = node.prev # N.prev = E node.prev = head.prev # N.next = A # It is tempting to do just "head" here, however if node is # adjacent to head, this will do bad things. node.next = head.prev.next # E.next = N node.next.prev = node # A.prev = N node.prev.next = node self._head = node def _addcapacity(self): """Add a node to the circular linked list. The new node is inserted before the head node. """ head = self._head node = _lrucachenode() head.prev.next = node node.prev = head.prev node.next = head head.prev = node self._size += 1 return node def lrucachefunc(func): '''cache most recent results of function calls''' cache = {} order = collections.deque() if func.func_code.co_argcount == 1: def f(arg): if arg not in cache: if len(cache) > 20: del cache[order.popleft()] cache[arg] = func(arg) else: order.remove(arg) order.append(arg) return cache[arg] else: def f(*args): if args not in cache: if len(cache) > 20: del cache[order.popleft()] cache[args] = func(*args) else: order.remove(args) order.append(args) return cache[args] return f class propertycache(object): def __init__(self, func): self.func = func self.name = func.__name__ def __get__(self, obj, type=None): result = self.func(obj) self.cachevalue(obj, result) return result def cachevalue(self, obj, value): # __dict__ assignment required to bypass __setattr__ (eg: repoview) obj.__dict__[self.name] = value def pipefilter(s, cmd): '''filter string S through command CMD, returning its output''' p = subprocess.Popen(cmd, shell=True, close_fds=closefds, stdin=subprocess.PIPE, stdout=subprocess.PIPE) pout, perr = p.communicate(s) return pout def tempfilter(s, cmd): '''filter string S through a pair of temporary files with CMD. CMD is used as a template to create the real command to be run, with the strings INFILE and OUTFILE replaced by the real names of the temporary files generated.''' inname, outname = None, None try: infd, inname = tempfile.mkstemp(prefix='hg-filter-in-') fp = os.fdopen(infd, 'wb') fp.write(s) fp.close() outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-') os.close(outfd) cmd = cmd.replace('INFILE', inname) cmd = cmd.replace('OUTFILE', outname) code = os.system(cmd) if sys.platform == 'OpenVMS' and code & 1: code = 0 if code: raise Abort(_("command '%s' failed: %s") % (cmd, explainexit(code))) return readfile(outname) finally: try: if inname: os.unlink(inname) except OSError: pass try: if outname: os.unlink(outname) except OSError: pass filtertable = { 'tempfile:': tempfilter, 'pipe:': pipefilter, } def filter(s, cmd): "filter a string through a command that transforms its input to its output" for name, fn in filtertable.iteritems(): if cmd.startswith(name): return fn(s, cmd[len(name):].lstrip()) return pipefilter(s, cmd) def binary(s): """return true if a string is binary data""" return bool(s and '\0' in s) def increasingchunks(source, min=1024, max=65536): '''return no less than min bytes per chunk while data remains, doubling min after each chunk until it reaches max''' def log2(x): if not x: return 0 i = 0 while x: x >>= 1 i += 1 return i - 1 buf = [] blen = 0 for chunk in source: buf.append(chunk) blen += len(chunk) if blen >= min: if min < max: min = min << 1 nmin = 1 << log2(blen) if nmin > min: min = nmin if min > max: min = max yield ''.join(buf) blen = 0 buf = [] if buf: yield ''.join(buf) Abort = error.Abort def always(fn): return True def never(fn): return False def nogc(func): """disable garbage collector Python's garbage collector triggers a GC each time a certain number of container objects (the number being defined by gc.get_threshold()) are allocated even when marked not to be tracked by the collector. Tracking has no effect on when GCs are triggered, only on what objects the GC looks into. As a workaround, disable GC while building complex (huge) containers. This garbage collector issue have been fixed in 2.7. """ def wrapper(*args, **kwargs): gcenabled = gc.isenabled() gc.disable() try: return func(*args, **kwargs) finally: if gcenabled: gc.enable() return wrapper def pathto(root, n1, n2): '''return the relative path from one place to another. root should use os.sep to separate directories n1 should use os.sep to separate directories n2 should use "/" to separate directories returns an os.sep-separated path. If n1 is a relative path, it's assumed it's relative to root. n2 should always be relative to root. ''' if not n1: return localpath(n2) if os.path.isabs(n1): if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]: return os.path.join(root, localpath(n2)) n2 = '/'.join((pconvert(root), n2)) a, b = splitpath(n1), n2.split('/') a.reverse() b.reverse() while a and b and a[-1] == b[-1]: a.pop() b.pop() b.reverse() return os.sep.join((['..'] * len(a)) + b) or '.' def mainfrozen(): """return True if we are a frozen executable. The code supports py2exe (most common, Windows only) and tools/freeze (portable, not much used). """ return (safehasattr(sys, "frozen") or # new py2exe safehasattr(sys, "importers") or # old py2exe imp.is_frozen("__main__")) # tools/freeze # the location of data files matching the source code if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app': # executable version (py2exe) doesn't support __file__ datapath = os.path.dirname(sys.executable) else: datapath = os.path.dirname(__file__) i18n.setdatapath(datapath) _hgexecutable = None def hgexecutable(): """return location of the 'hg' executable. Defaults to $HG or 'hg' in the search path. """ if _hgexecutable is None: hg = os.environ.get('HG') mainmod = sys.modules['__main__'] if hg: _sethgexecutable(hg) elif mainfrozen(): if getattr(sys, 'frozen', None) == 'macosx_app': # Env variable set by py2app _sethgexecutable(os.environ['EXECUTABLEPATH']) else: _sethgexecutable(sys.executable) elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg': _sethgexecutable(mainmod.__file__) else: exe = findexe('hg') or os.path.basename(sys.argv[0]) _sethgexecutable(exe) return _hgexecutable def _sethgexecutable(path): """set location of the 'hg' executable""" global _hgexecutable _hgexecutable = path def _isstdout(f): fileno = getattr(f, 'fileno', None) return fileno and fileno() == sys.__stdout__.fileno() def system(cmd, environ=None, cwd=None, onerr=None, errprefix=None, out=None): '''enhanced shell command execution. run with environment maybe modified, maybe in different dir. if command fails and onerr is None, return status, else raise onerr object as exception. if out is specified, it is assumed to be a file-like object that has a write() method. stdout and stderr will be redirected to out.''' if environ is None: environ = {} try: sys.stdout.flush() except Exception: pass def py2shell(val): 'convert python object into string that is useful to shell' if val is None or val is False: return '0' if val is True: return '1' return str(val) origcmd = cmd cmd = quotecommand(cmd) if sys.platform == 'plan9' and (sys.version_info[0] == 2 and sys.version_info[1] < 7): # subprocess kludge to work around issues in half-baked Python # ports, notably bichued/python: if not cwd is None: os.chdir(cwd) rc = os.system(cmd) else: env = dict(os.environ) env.update((k, py2shell(v)) for k, v in environ.iteritems()) env['HG'] = hgexecutable() if out is None or _isstdout(out): rc = subprocess.call(cmd, shell=True, close_fds=closefds, env=env, cwd=cwd) else: proc = subprocess.Popen(cmd, shell=True, close_fds=closefds, env=env, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) while True: line = proc.stdout.readline() if not line: break out.write(line) proc.wait() rc = proc.returncode if sys.platform == 'OpenVMS' and rc & 1: rc = 0 if rc and onerr: errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]), explainexit(rc)[0]) if errprefix: errmsg = '%s: %s' % (errprefix, errmsg) raise onerr(errmsg) return rc def checksignature(func): '''wrap a function with code to check for calling errors''' def check(*args, **kwargs): try: return func(*args, **kwargs) except TypeError: if len(traceback.extract_tb(sys.exc_info()[2])) == 1: raise error.SignatureError raise return check def copyfile(src, dest, hardlink=False, copystat=False): '''copy a file, preserving mode and optionally other stat info like atime/mtime''' if os.path.lexists(dest): unlink(dest) # hardlinks are problematic on CIFS, quietly ignore this flag # until we find a way to work around it cleanly (issue4546) if False and hardlink: try: oslink(src, dest) return except (IOError, OSError): pass # fall back to normal copy if os.path.islink(src): os.symlink(os.readlink(src), dest) # copytime is ignored for symlinks, but in general copytime isn't needed # for them anyway else: try: shutil.copyfile(src, dest) if copystat: # copystat also copies mode shutil.copystat(src, dest) else: shutil.copymode(src, dest) except shutil.Error as inst: raise Abort(str(inst)) def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None): """Copy a directory tree using hardlinks if possible.""" num = 0 if hardlink is None: hardlink = (os.stat(src).st_dev == os.stat(os.path.dirname(dst)).st_dev) if hardlink: topic = _('linking') else: topic = _('copying') if os.path.isdir(src): os.mkdir(dst) for name, kind in osutil.listdir(src): srcname = os.path.join(src, name) dstname = os.path.join(dst, name) def nprog(t, pos): if pos is not None: return progress(t, pos + num) hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog) num += n else: if hardlink: try: oslink(src, dst) except (IOError, OSError): hardlink = False shutil.copy(src, dst) else: shutil.copy(src, dst) num += 1 progress(topic, num) progress(topic, None) return hardlink, num _winreservednames = '''con prn aux nul com1 com2 com3 com4 com5 com6 com7 com8 com9 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split() _winreservedchars = ':*?"<>|' def checkwinfilename(path): r'''Check that the base-relative path is a valid filename on Windows. Returns None if the path is ok, or a UI string describing the problem. >>> checkwinfilename("just/a/normal/path") >>> checkwinfilename("foo/bar/con.xml") "filename contains 'con', which is reserved on Windows" >>> checkwinfilename("foo/con.xml/bar") "filename contains 'con', which is reserved on Windows" >>> checkwinfilename("foo/bar/xml.con") >>> checkwinfilename("foo/bar/AUX/bla.txt") "filename contains 'AUX', which is reserved on Windows" >>> checkwinfilename("foo/bar/bla:.txt") "filename contains ':', which is reserved on Windows" >>> checkwinfilename("foo/bar/b\07la.txt") "filename contains '\\x07', which is invalid on Windows" >>> checkwinfilename("foo/bar/bla ") "filename ends with ' ', which is not allowed on Windows" >>> checkwinfilename("../bar") >>> checkwinfilename("foo\\") "filename ends with '\\', which is invalid on Windows" >>> checkwinfilename("foo\\/bar") "directory name ends with '\\', which is invalid on Windows" ''' if path.endswith('\\'): return _("filename ends with '\\', which is invalid on Windows") if '\\/' in path: return _("directory name ends with '\\', which is invalid on Windows") for n in path.replace('\\', '/').split('/'): if not n: continue for c in n: if c in _winreservedchars: return _("filename contains '%s', which is reserved " "on Windows") % c if ord(c) <= 31: return _("filename contains %r, which is invalid " "on Windows") % c base = n.split('.')[0] if base and base.lower() in _winreservednames: return _("filename contains '%s', which is reserved " "on Windows") % base t = n[-1] if t in '. ' and n not in '..': return _("filename ends with '%s', which is not allowed " "on Windows") % t if os.name == 'nt': checkosfilename = checkwinfilename else: checkosfilename = platform.checkosfilename def makelock(info, pathname): try: return os.symlink(info, pathname) except OSError as why: if why.errno == errno.EEXIST: raise except AttributeError: # no symlink in os pass ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL) os.write(ld, info) os.close(ld) def readlock(pathname): try: return os.readlink(pathname) except OSError as why: if why.errno not in (errno.EINVAL, errno.ENOSYS): raise except AttributeError: # no symlink in os pass fp = posixfile(pathname) r = fp.read() fp.close() return r def fstat(fp): '''stat file object that may not have fileno method.''' try: return os.fstat(fp.fileno()) except AttributeError: return os.stat(fp.name) # File system features def checkcase(path): """ Return true if the given path is on a case-sensitive filesystem Requires a path (like /foo/.hg) ending with a foldable final directory component. """ s1 = os.lstat(path) d, b = os.path.split(path) b2 = b.upper() if b == b2: b2 = b.lower() if b == b2: return True # no evidence against case sensitivity p2 = os.path.join(d, b2) try: s2 = os.lstat(p2) if s2 == s1: return False return True except OSError: return True try: import re2 _re2 = None except ImportError: _re2 = False class _re(object): def _checkre2(self): global _re2 try: # check if match works, see issue3964 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]')) except ImportError: _re2 = False def compile(self, pat, flags=0): '''Compile a regular expression, using re2 if possible For best performance, use only re2-compatible regexp features. The only flags from the re module that are re2-compatible are IGNORECASE and MULTILINE.''' if _re2 is None: self._checkre2() if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0: if flags & remod.IGNORECASE: pat = '(?i)' + pat if flags & remod.MULTILINE: pat = '(?m)' + pat try: return re2.compile(pat) except re2.error: pass return remod.compile(pat, flags) @propertycache def escape(self): '''Return the version of escape corresponding to self.compile. This is imperfect because whether re2 or re is used for a particular function depends on the flags, etc, but it's the best we can do. ''' global _re2 if _re2 is None: self._checkre2() if _re2: return re2.escape else: return remod.escape re = _re() _fspathcache = {} def fspath(name, root): '''Get name in the case stored in the filesystem The name should be relative to root, and be normcase-ed for efficiency. Note that this function is unnecessary, and should not be called, for case-sensitive filesystems (simply because it's expensive). The root should be normcase-ed, too. ''' def _makefspathcacheentry(dir): return dict((normcase(n), n) for n in os.listdir(dir)) seps = os.sep if os.altsep: seps = seps + os.altsep # Protect backslashes. This gets silly very quickly. seps.replace('\\','\\\\') pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps)) dir = os.path.normpath(root) result = [] for part, sep in pattern.findall(name): if sep: result.append(sep) continue if dir not in _fspathcache: _fspathcache[dir] = _makefspathcacheentry(dir) contents = _fspathcache[dir] found = contents.get(part) if not found: # retry "once per directory" per "dirstate.walk" which # may take place for each patches of "hg qpush", for example _fspathcache[dir] = contents = _makefspathcacheentry(dir) found = contents.get(part) result.append(found or part) dir = os.path.join(dir, part) return ''.join(result) def checknlink(testfile): '''check whether hardlink count reporting works properly''' # testfile may be open, so we need a separate file for checking to # work around issue2543 (or testfile may get lost on Samba shares) f1 = testfile + ".hgtmp1" if os.path.lexists(f1): return False try: posixfile(f1, 'w').close() except IOError: return False f2 = testfile + ".hgtmp2" fd = None try: oslink(f1, f2) # nlinks() may behave differently for files on Windows shares if # the file is open. fd = posixfile(f2) return nlinks(f2) > 1 except OSError: return False finally: if fd is not None: fd.close() for f in (f1, f2): try: os.unlink(f) except OSError: pass def endswithsep(path): '''Check path ends with os.sep or os.altsep.''' return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep) def splitpath(path): '''Split path by os.sep. Note that this function does not use os.altsep because this is an alternative of simple "xxx.split(os.sep)". It is recommended to use os.path.normpath() before using this function if need.''' return path.split(os.sep) def gui(): '''Are we running in a GUI?''' if sys.platform == 'darwin': if 'SSH_CONNECTION' in os.environ: # handle SSH access to a box where the user is logged in return False elif getattr(osutil, 'isgui', None): # check if a CoreGraphics session is available return osutil.isgui() else: # pure build; use a safe default return True else: return os.name == "nt" or os.environ.get("DISPLAY") def mktempcopy(name, emptyok=False, createmode=None): """Create a temporary file with the same contents from name The permission bits are copied from the original file. If the temporary file is going to be truncated immediately, you can use emptyok=True as an optimization. Returns the name of the temporary file. """ d, fn = os.path.split(name) fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d) os.close(fd) # Temporary files are created with mode 0600, which is usually not # what we want. If the original file already exists, just copy # its mode. Otherwise, manually obey umask. copymode(name, temp, createmode) if emptyok: return temp try: try: ifp = posixfile(name, "rb") except IOError as inst: if inst.errno == errno.ENOENT: return temp if not getattr(inst, 'filename', None): inst.filename = name raise ofp = posixfile(temp, "wb") for chunk in filechunkiter(ifp): ofp.write(chunk) ifp.close() ofp.close() except: # re-raises try: os.unlink(temp) except OSError: pass raise return temp class atomictempfile(object): '''writable file object that atomically updates a file All writes will go to a temporary copy of the original file. Call close() when you are done writing, and atomictempfile will rename the temporary copy to the original name, making the changes visible. If the object is destroyed without being closed, all your writes are discarded. ''' def __init__(self, name, mode='w+b', createmode=None): self.__name = name # permanent name self._tempname = mktempcopy(name, emptyok=('w' in mode), createmode=createmode) self._fp = posixfile(self._tempname, mode) # delegated methods self.write = self._fp.write self.seek = self._fp.seek self.tell = self._fp.tell self.fileno = self._fp.fileno def close(self): if not self._fp.closed: self._fp.close() rename(self._tempname, localpath(self.__name)) def discard(self): if not self._fp.closed: try: os.unlink(self._tempname) except OSError: pass self._fp.close() def __del__(self): if safehasattr(self, '_fp'): # constructor actually did something self.discard() def makedirs(name, mode=None, notindexed=False): """recursive directory creation with parent mode inheritance""" try: makedir(name, notindexed) except OSError as err: if err.errno == errno.EEXIST: return if err.errno != errno.ENOENT or not name: raise parent = os.path.dirname(os.path.abspath(name)) if parent == name: raise makedirs(parent, mode, notindexed) makedir(name, notindexed) if mode is not None: os.chmod(name, mode) def ensuredirs(name, mode=None, notindexed=False): """race-safe recursive directory creation Newly created directories are marked as "not to be indexed by the content indexing service", if ``notindexed`` is specified for "write" mode access. """ if os.path.isdir(name): return parent = os.path.dirname(os.path.abspath(name)) if parent != name: ensuredirs(parent, mode, notindexed) try: makedir(name, notindexed) except OSError as err: if err.errno == errno.EEXIST and os.path.isdir(name): # someone else seems to have won a directory creation race return raise if mode is not None: os.chmod(name, mode) def readfile(path): with open(path, 'rb') as fp: return fp.read() def writefile(path, text): with open(path, 'wb') as fp: fp.write(text) def appendfile(path, text): with open(path, 'ab') as fp: fp.write(text) class chunkbuffer(object): """Allow arbitrary sized chunks of data to be efficiently read from an iterator over chunks of arbitrary size.""" def __init__(self, in_iter): """in_iter is the iterator that's iterating over the input chunks. targetsize is how big a buffer to try to maintain.""" def splitbig(chunks): for chunk in chunks: if len(chunk) > 2**20: pos = 0 while pos < len(chunk): end = pos + 2 ** 18 yield chunk[pos:end] pos = end else: yield chunk self.iter = splitbig(in_iter) self._queue = collections.deque() self._chunkoffset = 0 def read(self, l=None): """Read L bytes of data from the iterator of chunks of data. Returns less than L bytes if the iterator runs dry. If size parameter is omitted, read everything""" if l is None: return ''.join(self.iter) left = l buf = [] queue = self._queue while left > 0: # refill the queue if not queue: target = 2**18 for chunk in self.iter: queue.append(chunk) target -= len(chunk) if target <= 0: break if not queue: break # The easy way to do this would be to queue.popleft(), modify the # chunk (if necessary), then queue.appendleft(). However, for cases # where we read partial chunk content, this incurs 2 dequeue # mutations and creates a new str for the remaining chunk in the # queue. Our code below avoids this overhead. chunk = queue[0] chunkl = len(chunk) offset = self._chunkoffset # Use full chunk. if offset == 0 and left >= chunkl: left -= chunkl queue.popleft() buf.append(chunk) # self._chunkoffset remains at 0. continue chunkremaining = chunkl - offset # Use all of unconsumed part of chunk. if left >= chunkremaining: left -= chunkremaining queue.popleft() # offset == 0 is enabled by block above, so this won't merely # copy via ``chunk[0:]``. buf.append(chunk[offset:]) self._chunkoffset = 0 # Partial chunk needed. else: buf.append(chunk[offset:offset + left]) self._chunkoffset += left left -= chunkremaining return ''.join(buf) def filechunkiter(f, size=65536, limit=None): """Create a generator that produces the data in the file size (default 65536) bytes at a time, up to optional limit (default is to read all data). Chunks may be less than size bytes if the chunk is the last chunk in the file, or the file is a socket or some other type of file that sometimes reads less data than is requested.""" assert size >= 0 assert limit is None or limit >= 0 while True: if limit is None: nbytes = size else: nbytes = min(limit, size) s = nbytes and f.read(nbytes) if not s: break if limit: limit -= len(s) yield s def makedate(timestamp=None): '''Return a unix timestamp (or the current time) as a (unixtime, offset) tuple based off the local timezone.''' if timestamp is None: timestamp = time.time() if timestamp < 0: hint = _("check your clock") raise Abort(_("negative timestamp: %d") % timestamp, hint=hint) delta = (datetime.datetime.utcfromtimestamp(timestamp) - datetime.datetime.fromtimestamp(timestamp)) tz = delta.days * 86400 + delta.seconds return timestamp, tz def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'): """represent a (unixtime, offset) tuple as a localized time. unixtime is seconds since the epoch, and offset is the time zone's number of seconds away from UTC. if timezone is false, do not append time zone to string.""" t, tz = date or makedate() if t < 0: t = 0 # time.gmtime(lt) fails on Windows for lt < -43200 tz = 0 if "%1" in format or "%2" in format or "%z" in format: sign = (tz > 0) and "-" or "+" minutes = abs(tz) // 60 q, r = divmod(minutes, 60) format = format.replace("%z", "%1%2") format = format.replace("%1", "%c%02d" % (sign, q)) format = format.replace("%2", "%02d" % r) try: t = time.gmtime(float(t) - tz) except ValueError: # time was out of range t = time.gmtime(sys.maxint) s = time.strftime(format, t) return s def shortdate(date=None): """turn (timestamp, tzoff) tuple into iso 8631 date.""" return datestr(date, format='%Y-%m-%d') def parsetimezone(tz): """parse a timezone string and return an offset integer""" if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit(): sign = (tz[0] == "+") and 1 or -1 hours = int(tz[1:3]) minutes = int(tz[3:5]) return -sign * (hours * 60 + minutes) * 60 if tz == "GMT" or tz == "UTC": return 0 return None def strdate(string, format, defaults=[]): """parse a localized time string and return a (unixtime, offset) tuple. if the string cannot be parsed, ValueError is raised.""" # NOTE: unixtime = localunixtime + offset offset, date = parsetimezone(string.split()[-1]), string if offset is not None: date = " ".join(string.split()[:-1]) # add missing elements from defaults usenow = False # default to using biased defaults for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity found = [True for p in part if ("%"+p) in format] if not found: date += "@" + defaults[part][usenow] format += "@%" + part[0] else: # We've found a specific time element, less specific time # elements are relative to today usenow = True timetuple = time.strptime(date, format) localunixtime = int(calendar.timegm(timetuple)) if offset is None: # local timezone unixtime = int(time.mktime(timetuple)) offset = unixtime - localunixtime else: unixtime = localunixtime + offset return unixtime, offset def parsedate(date, formats=None, bias=None): """parse a localized date/time and return a (unixtime, offset) tuple. The date may be a "unixtime offset" string or in one of the specified formats. If the date already is a (unixtime, offset) tuple, it is returned. >>> parsedate(' today ') == parsedate(\ datetime.date.today().strftime('%b %d')) True >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\ datetime.timedelta(days=1)\ ).strftime('%b %d')) True >>> now, tz = makedate() >>> strnow, strtz = parsedate('now') >>> (strnow - now) < 1 True >>> tz == strtz True """ if bias is None: bias = {} if not date: return 0, 0 if isinstance(date, tuple) and len(date) == 2: return date if not formats: formats = defaultdateformats date = date.strip() if date == 'now' or date == _('now'): return makedate() if date == 'today' or date == _('today'): date = datetime.date.today().strftime('%b %d') elif date == 'yesterday' or date == _('yesterday'): date = (datetime.date.today() - datetime.timedelta(days=1)).strftime('%b %d') try: when, offset = map(int, date.split(' ')) except ValueError: # fill out defaults now = makedate() defaults = {} for part in ("d", "mb", "yY", "HI", "M", "S"): # this piece is for rounding the specific end of unknowns b = bias.get(part) if b is None: if part[0] in "HMS": b = "00" else: b = "0" # this piece is for matching the generic end to today's date n = datestr(now, "%" + part[0]) defaults[part] = (b, n) for format in formats: try: when, offset = strdate(date, format, defaults) except (ValueError, OverflowError): pass else: break else: raise Abort(_('invalid date: %r') % date) # validate explicit (probably user-specified) date and # time zone offset. values must fit in signed 32 bits for # current 32-bit linux runtimes. timezones go from UTC-12 # to UTC+14 if abs(when) > 0x7fffffff: raise Abort(_('date exceeds 32 bits: %d') % when) if when < 0: raise Abort(_('negative date value: %d') % when) if offset < -50400 or offset > 43200: raise Abort(_('impossible time zone offset: %d') % offset) return when, offset def matchdate(date): """Return a function that matches a given date match specifier Formats include: '{date}' match a given date to the accuracy provided '<{date}' on or before a given date '>{date}' on or after a given date >>> p1 = parsedate("10:29:59") >>> p2 = parsedate("10:30:00") >>> p3 = parsedate("10:30:59") >>> p4 = parsedate("10:31:00") >>> p5 = parsedate("Sep 15 10:30:00 1999") >>> f = matchdate("10:30") >>> f(p1[0]) False >>> f(p2[0]) True >>> f(p3[0]) True >>> f(p4[0]) False >>> f(p5[0]) False """ def lower(date): d = {'mb': "1", 'd': "1"} return parsedate(date, extendeddateformats, d)[0] def upper(date): d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"} for days in ("31", "30", "29"): try: d["d"] = days return parsedate(date, extendeddateformats, d)[0] except Abort: pass d["d"] = "28" return parsedate(date, extendeddateformats, d)[0] date = date.strip() if not date: raise Abort(_("dates cannot consist entirely of whitespace")) elif date[0] == "<": if not date[1:]: raise Abort(_("invalid day spec, use '": if not date[1:]: raise Abort(_("invalid day spec, use '>DATE'")) when = lower(date[1:]) return lambda x: x >= when elif date[0] == "-": try: days = int(date[1:]) except ValueError: raise Abort(_("invalid day spec: %s") % date[1:]) if days < 0: raise Abort(_('%s must be nonnegative (see "hg help dates")') % date[1:]) when = makedate()[0] - days * 3600 * 24 return lambda x: x >= when elif " to " in date: a, b = date.split(" to ") start, stop = lower(a), upper(b) return lambda x: x >= start and x <= stop else: start, stop = lower(date), upper(date) return lambda x: x >= start and x <= stop def stringmatcher(pattern): """ accepts a string, possibly starting with 're:' or 'literal:' prefix. returns the matcher name, pattern, and matcher function. missing or unknown prefixes are treated as literal matches. helper for tests: >>> def test(pattern, *tests): ... kind, pattern, matcher = stringmatcher(pattern) ... return (kind, pattern, [bool(matcher(t)) for t in tests]) exact matching (no prefix): >>> test('abcdefg', 'abc', 'def', 'abcdefg') ('literal', 'abcdefg', [False, False, True]) regex matching ('re:' prefix) >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar') ('re', 'a.+b', [False, False, True]) force exact matches ('literal:' prefix) >>> test('literal:re:foobar', 'foobar', 're:foobar') ('literal', 're:foobar', [False, True]) unknown prefixes are ignored and treated as literals >>> test('foo:bar', 'foo', 'bar', 'foo:bar') ('literal', 'foo:bar', [False, False, True]) """ if pattern.startswith('re:'): pattern = pattern[3:] try: regex = remod.compile(pattern) except remod.error as e: raise error.ParseError(_('invalid regular expression: %s') % e) return 're', pattern, regex.search elif pattern.startswith('literal:'): pattern = pattern[8:] return 'literal', pattern, pattern.__eq__ def shortuser(user): """Return a short representation of a user name or email address.""" f = user.find('@') if f >= 0: user = user[:f] f = user.find('<') if f >= 0: user = user[f + 1:] f = user.find(' ') if f >= 0: user = user[:f] f = user.find('.') if f >= 0: user = user[:f] return user def emailuser(user): """Return the user portion of an email address.""" f = user.find('@') if f >= 0: user = user[:f] f = user.find('<') if f >= 0: user = user[f + 1:] return user def email(author): '''get email of author.''' r = author.find('>') if r == -1: r = None return author[author.find('<') + 1:r] def ellipsis(text, maxlength=400): """Trim string to at most maxlength (default: 400) columns in display.""" return encoding.trim(text, maxlength, ellipsis='...') def unitcountfn(*unittable): '''return a function that renders a readable count of some quantity''' def go(count): for multiplier, divisor, format in unittable: if count >= divisor * multiplier: return format % (count / float(divisor)) return unittable[-1][2] % count return go bytecount = unitcountfn( (100, 1 << 30, _('%.0f GB')), (10, 1 << 30, _('%.1f GB')), (1, 1 << 30, _('%.2f GB')), (100, 1 << 20, _('%.0f MB')), (10, 1 << 20, _('%.1f MB')), (1, 1 << 20, _('%.2f MB')), (100, 1 << 10, _('%.0f KB')), (10, 1 << 10, _('%.1f KB')), (1, 1 << 10, _('%.2f KB')), (1, 1, _('%.0f bytes')), ) def uirepr(s): # Avoid double backslash in Windows path repr() return repr(s).replace('\\\\', '\\') # delay import of textwrap def MBTextWrapper(**kwargs): class tw(textwrap.TextWrapper): """ Extend TextWrapper for width-awareness. Neither number of 'bytes' in any encoding nor 'characters' is appropriate to calculate terminal columns for specified string. Original TextWrapper implementation uses built-in 'len()' directly, so overriding is needed to use width information of each characters. In addition, characters classified into 'ambiguous' width are treated as wide in East Asian area, but as narrow in other. This requires use decision to determine width of such characters. """ def _cutdown(self, ucstr, space_left): l = 0 colwidth = encoding.ucolwidth for i in xrange(len(ucstr)): l += colwidth(ucstr[i]) if space_left < l: return (ucstr[:i], ucstr[i:]) return ucstr, '' # overriding of base class def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): space_left = max(width - cur_len, 1) if self.break_long_words: cut, res = self._cutdown(reversed_chunks[-1], space_left) cur_line.append(cut) reversed_chunks[-1] = res elif not cur_line: cur_line.append(reversed_chunks.pop()) # this overriding code is imported from TextWrapper of Python 2.6 # to calculate columns of string by 'encoding.ucolwidth()' def _wrap_chunks(self, chunks): colwidth = encoding.ucolwidth lines = [] if self.width <= 0: raise ValueError("invalid width %r (must be > 0)" % self.width) # Arrange in reverse order so items can be efficiently popped # from a stack of chucks. chunks.reverse() while chunks: # Start the list of chunks that will make up the current line. # cur_len is just the length of all the chunks in cur_line. cur_line = [] cur_len = 0 # Figure out which static string will prefix this line. if lines: indent = self.subsequent_indent else: indent = self.initial_indent # Maximum width for this line. width = self.width - len(indent) # First chunk on line is whitespace -- drop it, unless this # is the very beginning of the text (i.e. no lines started yet). if self.drop_whitespace and chunks[-1].strip() == '' and lines: del chunks[-1] while chunks: l = colwidth(chunks[-1]) # Can at least squeeze this chunk onto the current line. if cur_len + l <= width: cur_line.append(chunks.pop()) cur_len += l # Nope, this line is full. else: break # The current line is full, and the next chunk is too big to # fit on *any* line (not just this one). if chunks and colwidth(chunks[-1]) > width: self._handle_long_word(chunks, cur_line, cur_len, width) # If the last chunk on this line is all whitespace, drop it. if (self.drop_whitespace and cur_line and cur_line[-1].strip() == ''): del cur_line[-1] # Convert current line back to a string and store it in list # of all lines (return value). if cur_line: lines.append(indent + ''.join(cur_line)) return lines global MBTextWrapper MBTextWrapper = tw return tw(**kwargs) def wrap(line, width, initindent='', hangindent=''): maxindent = max(len(hangindent), len(initindent)) if width <= maxindent: # adjust for weird terminal size width = max(78, maxindent + 1) line = line.decode(encoding.encoding, encoding.encodingmode) initindent = initindent.decode(encoding.encoding, encoding.encodingmode) hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode) wrapper = MBTextWrapper(width=width, initial_indent=initindent, subsequent_indent=hangindent) return wrapper.fill(line).encode(encoding.encoding) def iterlines(iterator): for chunk in iterator: for line in chunk.splitlines(): yield line def expandpath(path): return os.path.expanduser(os.path.expandvars(path)) def hgcmd(): """Return the command used to execute current hg This is different from hgexecutable() because on Windows we want to avoid things opening new shell windows like batch files, so we get either the python call or current executable. """ if mainfrozen(): if getattr(sys, 'frozen', None) == 'macosx_app': # Env variable set by py2app return [os.environ['EXECUTABLEPATH']] else: return [sys.executable] return gethgcmd() def rundetached(args, condfn): """Execute the argument list in a detached process. condfn is a callable which is called repeatedly and should return True once the child process is known to have started successfully. At this point, the child process PID is returned. If the child process fails to start or finishes before condfn() evaluates to True, return -1. """ # Windows case is easier because the child process is either # successfully starting and validating the condition or exiting # on failure. We just poll on its PID. On Unix, if the child # process fails to start, it will be left in a zombie state until # the parent wait on it, which we cannot do since we expect a long # running process on success. Instead we listen for SIGCHLD telling # us our child process terminated. terminated = set() def handler(signum, frame): terminated.add(os.wait()) prevhandler = None SIGCHLD = getattr(signal, 'SIGCHLD', None) if SIGCHLD is not None: prevhandler = signal.signal(SIGCHLD, handler) try: pid = spawndetached(args) while not condfn(): if ((pid in terminated or not testpid(pid)) and not condfn()): return -1 time.sleep(0.1) return pid finally: if prevhandler is not None: signal.signal(signal.SIGCHLD, prevhandler) def interpolate(prefix, mapping, s, fn=None, escape_prefix=False): """Return the result of interpolating items in the mapping into string s. prefix is a single character string, or a two character string with a backslash as the first character if the prefix needs to be escaped in a regular expression. fn is an optional function that will be applied to the replacement text just before replacement. escape_prefix is an optional flag that allows using doubled prefix for its escaping. """ fn = fn or (lambda s: s) patterns = '|'.join(mapping.keys()) if escape_prefix: patterns += '|' + prefix if len(prefix) > 1: prefix_char = prefix[1:] else: prefix_char = prefix mapping[prefix_char] = prefix_char r = remod.compile(r'%s(%s)' % (prefix, patterns)) return r.sub(lambda x: fn(mapping[x.group()[1:]]), s) def getport(port): """Return the port for a given network service. If port is an integer, it's returned as is. If it's a string, it's looked up using socket.getservbyname(). If there's no matching service, error.Abort is raised. """ try: return int(port) except ValueError: pass try: return socket.getservbyname(port) except socket.error: raise Abort(_("no port number associated with service '%s'") % port) _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True, '0': False, 'no': False, 'false': False, 'off': False, 'never': False} def parsebool(s): """Parse s into a boolean. If s is not a valid boolean, returns None. """ return _booleans.get(s.lower(), None) _hexdig = '0123456789ABCDEFabcdef' _hextochr = dict((a + b, chr(int(a + b, 16))) for a in _hexdig for b in _hexdig) def _urlunquote(s): """Decode HTTP/HTML % encoding. >>> _urlunquote('abc%20def') 'abc def' """ res = s.split('%') # fastpath if len(res) == 1: return s s = res[0] for item in res[1:]: try: s += _hextochr[item[:2]] + item[2:] except KeyError: s += '%' + item except UnicodeDecodeError: s += unichr(int(item[:2], 16)) + item[2:] return s class url(object): r"""Reliable URL parser. This parses URLs and provides attributes for the following components: ://:@:/?# Missing components are set to None. The only exception is fragment, which is set to '' if present but empty. If parsefragment is False, fragment is included in query. If parsequery is False, query is included in path. If both are False, both fragment and query are included in path. See http://www.ietf.org/rfc/rfc2396.txt for more information. Note that for backward compatibility reasons, bundle URLs do not take host names. That means 'bundle://../' has a path of '../'. Examples: >>> url('http://www.ietf.org/rfc/rfc2396.txt') >>> url('ssh://[::1]:2200//home/joe/repo') >>> url('file:///home/joe/repo') >>> url('file:///c:/temp/foo/') >>> url('bundle:foo') >>> url('bundle://../foo') >>> url(r'c:\foo\bar') >>> url(r'\\blah\blah\blah') >>> url(r'\\blah\blah\blah#baz') >>> url(r'file:///C:\users\me') Authentication credentials: >>> url('ssh://joe:xyz@x/repo') >>> url('ssh://joe@x/repo') Query strings and fragments: >>> url('http://host/a?b#c') >>> url('http://host/a?b#c', parsequery=False, parsefragment=False) """ _safechars = "!~*'()+" _safepchars = "/!~*'()+:\\" _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match def __init__(self, path, parsequery=True, parsefragment=True): # We slowly chomp away at path until we have only the path left self.scheme = self.user = self.passwd = self.host = None self.port = self.path = self.query = self.fragment = None self._localpath = True self._hostport = '' self._origpath = path if parsefragment and '#' in path: path, self.fragment = path.split('#', 1) if not path: path = None # special case for Windows drive letters and UNC paths if hasdriveletter(path) or path.startswith(r'\\'): self.path = path return # For compatibility reasons, we can't handle bundle paths as # normal URLS if path.startswith('bundle:'): self.scheme = 'bundle' path = path[7:] if path.startswith('//'): path = path[2:] self.path = path return if self._matchscheme(path): parts = path.split(':', 1) if parts[0]: self.scheme, path = parts self._localpath = False if not path: path = None if self._localpath: self.path = '' return else: if self._localpath: self.path = path return if parsequery and '?' in path: path, self.query = path.split('?', 1) if not path: path = None if not self.query: self.query = None # // is required to specify a host/authority if path and path.startswith('//'): parts = path[2:].split('/', 1) if len(parts) > 1: self.host, path = parts else: self.host = parts[0] path = None if not self.host: self.host = None # path of file:///d is /d # path of file:///d:/ is d:/, not /d:/ if path and not hasdriveletter(path): path = '/' + path if self.host and '@' in self.host: self.user, self.host = self.host.rsplit('@', 1) if ':' in self.user: self.user, self.passwd = self.user.split(':', 1) if not self.host: self.host = None # Don't split on colons in IPv6 addresses without ports if (self.host and ':' in self.host and not (self.host.startswith('[') and self.host.endswith(']'))): self._hostport = self.host self.host, self.port = self.host.rsplit(':', 1) if not self.host: self.host = None if (self.host and self.scheme == 'file' and self.host not in ('localhost', '127.0.0.1', '[::1]')): raise Abort(_('file:// URLs can only refer to localhost')) self.path = path # leave the query string escaped for a in ('user', 'passwd', 'host', 'port', 'path', 'fragment'): v = getattr(self, a) if v is not None: setattr(self, a, _urlunquote(v)) def __repr__(self): attrs = [] for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path', 'query', 'fragment'): v = getattr(self, a) if v is not None: attrs.append('%s: %r' % (a, v)) return '' % ', '.join(attrs) def __str__(self): r"""Join the URL's components back into a URL string. Examples: >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar')) 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar' >>> str(url('http://user:pw@host:80/?foo=bar&baz=42')) 'http://user:pw@host:80/?foo=bar&baz=42' >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz')) 'http://user:pw@host:80/?foo=bar%3dbaz' >>> str(url('ssh://user:pw@[::1]:2200//home/joe#')) 'ssh://user:pw@[::1]:2200//home/joe#' >>> str(url('http://localhost:80//')) 'http://localhost:80//' >>> str(url('http://localhost:80/')) 'http://localhost:80/' >>> str(url('http://localhost:80')) 'http://localhost:80/' >>> str(url('bundle:foo')) 'bundle:foo' >>> str(url('bundle://../foo')) 'bundle:../foo' >>> str(url('path')) 'path' >>> str(url('file:///tmp/foo/bar')) 'file:///tmp/foo/bar' >>> str(url('file:///c:/tmp/foo/bar')) 'file:///c:/tmp/foo/bar' >>> print url(r'bundle:foo\bar') bundle:foo\bar >>> print url(r'file:///D:\data\hg') file:///D:\data\hg """ if self._localpath: s = self.path if self.scheme == 'bundle': s = 'bundle:' + s if self.fragment: s += '#' + self.fragment return s s = self.scheme + ':' if self.user or self.passwd or self.host: s += '//' elif self.scheme and (not self.path or self.path.startswith('/') or hasdriveletter(self.path)): s += '//' if hasdriveletter(self.path): s += '/' if self.user: s += urllib.quote(self.user, safe=self._safechars) if self.passwd: s += ':' + urllib.quote(self.passwd, safe=self._safechars) if self.user or self.passwd: s += '@' if self.host: if not (self.host.startswith('[') and self.host.endswith(']')): s += urllib.quote(self.host) else: s += self.host if self.port: s += ':' + urllib.quote(self.port) if self.host: s += '/' if self.path: # TODO: similar to the query string, we should not unescape the # path when we store it, the path might contain '%2f' = '/', # which we should *not* escape. s += urllib.quote(self.path, safe=self._safepchars) if self.query: # we store the query in escaped form. s += '?' + self.query if self.fragment is not None: s += '#' + urllib.quote(self.fragment, safe=self._safepchars) return s def authinfo(self): user, passwd = self.user, self.passwd try: self.user, self.passwd = None, None s = str(self) finally: self.user, self.passwd = user, passwd if not self.user: return (s, None) # authinfo[1] is passed to urllib2 password manager, and its # URIs must not contain credentials. The host is passed in the # URIs list because Python < 2.4.3 uses only that to search for # a password. return (s, (None, (s, self.host), self.user, self.passwd or '')) def isabs(self): if self.scheme and self.scheme != 'file': return True # remote URL if hasdriveletter(self.path): return True # absolute for our purposes - can't be joined() if self.path.startswith(r'\\'): return True # Windows UNC path if self.path.startswith('/'): return True # POSIX-style return False def localpath(self): if self.scheme == 'file' or self.scheme == 'bundle': path = self.path or '/' # For Windows, we need to promote hosts containing drive # letters to paths with drive letters. if hasdriveletter(self._hostport): path = self._hostport + '/' + self.path elif (self.host is not None and self.path and not hasdriveletter(path)): path = '/' + path return path return self._origpath def islocal(self): '''whether localpath will return something that posixfile can open''' return (not self.scheme or self.scheme == 'file' or self.scheme == 'bundle') def hasscheme(path): return bool(url(path).scheme) def hasdriveletter(path): return path and path[1:2] == ':' and path[0:1].isalpha() def urllocalpath(path): return url(path, parsequery=False, parsefragment=False).localpath() def hidepassword(u): '''hide user credential in a url string''' u = url(u) if u.passwd: u.passwd = '***' return str(u) def removeauth(u): '''remove all authentication information from a url string''' u = url(u) u.user = u.passwd = None return str(u) def isatty(fp): try: return fp.isatty() except AttributeError: return False timecount = unitcountfn( (1, 1e3, _('%.0f s')), (100, 1, _('%.1f s')), (10, 1, _('%.2f s')), (1, 1, _('%.3f s')), (100, 0.001, _('%.1f ms')), (10, 0.001, _('%.2f ms')), (1, 0.001, _('%.3f ms')), (100, 0.000001, _('%.1f us')), (10, 0.000001, _('%.2f us')), (1, 0.000001, _('%.3f us')), (100, 0.000000001, _('%.1f ns')), (10, 0.000000001, _('%.2f ns')), (1, 0.000000001, _('%.3f ns')), ) _timenesting = [0] def timed(func): '''Report the execution time of a function call to stderr. During development, use as a decorator when you need to measure the cost of a function, e.g. as follows: @util.timed def foo(a, b, c): pass ''' def wrapper(*args, **kwargs): start = time.time() indent = 2 _timenesting[0] += indent try: return func(*args, **kwargs) finally: elapsed = time.time() - start _timenesting[0] -= indent sys.stderr.write('%s%s: %s\n' % (' ' * _timenesting[0], func.__name__, timecount(elapsed))) return wrapper _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30), ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1)) def sizetoint(s): '''Convert a space specifier to a byte count. >>> sizetoint('30') 30 >>> sizetoint('2.2kb') 2252 >>> sizetoint('6M') 6291456 ''' t = s.strip().lower() try: for k, u in _sizeunits: if t.endswith(k): return int(float(t[:-len(k)]) * u) return int(t) except ValueError: raise error.ParseError(_("couldn't parse size: %s") % s) class hooks(object): '''A collection of hook functions that can be used to extend a function's behavior. Hooks are called in lexicographic order, based on the names of their sources.''' def __init__(self): self._hooks = [] def add(self, source, hook): self._hooks.append((source, hook)) def __call__(self, *args): self._hooks.sort(key=lambda x: x[0]) results = [] for source, hook in self._hooks: results.append(hook(*args)) return results def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout): '''Writes a message to f (stderr) with a nicely formatted stacktrace. Skips the 'skip' last entries. By default it will flush stdout first. It can be used everywhere and do intentionally not require an ui object. Not be used in production code but very convenient while developing. ''' if otherf: otherf.flush() f.write('%s at:\n' % msg) entries = [('%s:%s' % (fn, ln), func) for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]] if entries: fnmax = max(len(entry[0]) for entry in entries) for fnln, func in entries: f.write(' %-*s in %s\n' % (fnmax, fnln, func)) f.flush() class dirs(object): '''a multiset of directory names from a dirstate or manifest''' def __init__(self, map, skip=None): self._dirs = {} addpath = self.addpath if safehasattr(map, 'iteritems') and skip is not None: for f, s in map.iteritems(): if s[0] != skip: addpath(f) else: for f in map: addpath(f) def addpath(self, path): dirs = self._dirs for base in finddirs(path): if base in dirs: dirs[base] += 1 return dirs[base] = 1 def delpath(self, path): dirs = self._dirs for base in finddirs(path): if dirs[base] > 1: dirs[base] -= 1 return del dirs[base] def __iter__(self): return self._dirs.iterkeys() def __contains__(self, d): return d in self._dirs if safehasattr(parsers, 'dirs'): dirs = parsers.dirs def finddirs(path): pos = path.rfind('/') while pos != -1: yield path[:pos] pos = path.rfind('/', 0, pos) # compression utility class nocompress(object): def compress(self, x): return x def flush(self): return "" compressors = { None: nocompress, # lambda to prevent early import 'BZ': lambda: bz2.BZ2Compressor(), 'GZ': lambda: zlib.compressobj(), } # also support the old form by courtesies compressors['UN'] = compressors[None] def _makedecompressor(decompcls): def generator(f): d = decompcls() for chunk in filechunkiter(f): yield d.decompress(chunk) def func(fh): return chunkbuffer(generator(fh)) return func class ctxmanager(object): '''A context manager for use in 'with' blocks to allow multiple contexts to be entered at once. This is both safer and more flexible than contextlib.nested. Once Mercurial supports Python 2.7+, this will become mostly unnecessary. ''' def __init__(self, *args): '''Accepts a list of no-argument functions that return context managers. These will be invoked at __call__ time.''' self._pending = args self._atexit = [] def __enter__(self): return self def enter(self): '''Create and enter context managers in the order in which they were passed to the constructor.''' values = [] for func in self._pending: obj = func() values.append(obj.__enter__()) self._atexit.append(obj.__exit__) del self._pending return values def atexit(self, func, *args, **kwargs): '''Add a function to call when this context manager exits. The ordering of multiple atexit calls is unspecified, save that they will happen before any __exit__ functions.''' def wrapper(exc_type, exc_val, exc_tb): func(*args, **kwargs) self._atexit.append(wrapper) return func def __exit__(self, exc_type, exc_val, exc_tb): '''Context managers are exited in the reverse order from which they were created.''' received = exc_type is not None suppressed = False pending = None self._atexit.reverse() for exitfunc in self._atexit: try: if exitfunc(exc_type, exc_val, exc_tb): suppressed = True exc_type = None exc_val = None exc_tb = None except BaseException: pending = sys.exc_info() exc_type, exc_val, exc_tb = pending = sys.exc_info() del self._atexit if pending: raise exc_val return received and suppressed def _bz2(): d = bz2.BZ2Decompressor() # Bzip2 stream start with BZ, but we stripped it. # we put it back for good measure. d.decompress('BZ') return d decompressors = {None: lambda fh: fh, '_truncatedBZ': _makedecompressor(_bz2), 'BZ': _makedecompressor(lambda: bz2.BZ2Decompressor()), 'GZ': _makedecompressor(lambda: zlib.decompressobj()), } # also support the old form by courtesies decompressors['UN'] = decompressors[None] # convenient shortcut dst = debugstacktrace mercurial-3.7.3/mercurial/sslutil.py0000644000175000017500000002057512676531525017174 0ustar mpmmpm00000000000000# sslutil.py - SSL handling for mercurial # # Copyright 2005, 2006, 2007, 2008 Matt Mackall # Copyright 2006, 2007 Alexis S. L. Carvalho # Copyright 2006 Vadim Gelfer # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import os import ssl import sys from .i18n import _ from . import error, util hassni = getattr(ssl, 'HAS_SNI', False) _canloaddefaultcerts = False try: ssl_context = ssl.SSLContext _canloaddefaultcerts = util.safehasattr(ssl_context, 'load_default_certs') def wrapsocket(sock, keyfile, certfile, ui, cert_reqs=ssl.CERT_NONE, ca_certs=None, serverhostname=None): # Allow any version of SSL starting with TLSv1 and # up. Note that specifying TLSv1 here prohibits use of # newer standards (like TLSv1_2), so this is the right way # to do this. Note that in the future it'd be better to # support using ssl.create_default_context(), which sets # up a bunch of things in smart ways (strong ciphers, # protocol versions, etc) and is upgraded by Python # maintainers for us, but that breaks too many things to # do it in a hurry. sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) sslcontext.options |= ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 if certfile is not None: def password(): f = keyfile or certfile return ui.getpass(_('passphrase for %s: ') % f, '') sslcontext.load_cert_chain(certfile, keyfile, password) sslcontext.verify_mode = cert_reqs if ca_certs is not None: sslcontext.load_verify_locations(cafile=ca_certs) elif _canloaddefaultcerts: sslcontext.load_default_certs() sslsocket = sslcontext.wrap_socket(sock, server_hostname=serverhostname) # check if wrap_socket failed silently because socket had been # closed # - see http://bugs.python.org/issue13721 if not sslsocket.cipher(): raise error.Abort(_('ssl connection failed')) return sslsocket except AttributeError: def wrapsocket(sock, keyfile, certfile, ui, cert_reqs=ssl.CERT_NONE, ca_certs=None, serverhostname=None): sslsocket = ssl.wrap_socket(sock, keyfile, certfile, cert_reqs=cert_reqs, ca_certs=ca_certs, ssl_version=ssl.PROTOCOL_TLSv1) # check if wrap_socket failed silently because socket had been # closed # - see http://bugs.python.org/issue13721 if not sslsocket.cipher(): raise error.Abort(_('ssl connection failed')) return sslsocket def _verifycert(cert, hostname): '''Verify that cert (in socket.getpeercert() format) matches hostname. CRLs is not handled. Returns error message if any problems are found and None on success. ''' if not cert: return _('no certificate received') dnsname = hostname.lower() def matchdnsname(certname): return (certname == dnsname or '.' in dnsname and certname == '*.' + dnsname.split('.', 1)[1]) san = cert.get('subjectAltName', []) if san: certnames = [value.lower() for key, value in san if key == 'DNS'] for name in certnames: if matchdnsname(name): return None if certnames: return _('certificate is for %s') % ', '.join(certnames) # subject is only checked when subjectAltName is empty for s in cert.get('subject', []): key, value = s[0] if key == 'commonName': try: # 'subject' entries are unicode certname = value.lower().encode('ascii') except UnicodeEncodeError: return _('IDN in certificate not supported') if matchdnsname(certname): return None return _('certificate is for %s') % certname return _('no commonName or subjectAltName found in certificate') # CERT_REQUIRED means fetch the cert from the server all the time AND # validate it against the CA store provided in web.cacerts. def _plainapplepython(): """return true if this seems to be a pure Apple Python that * is unfrozen and presumably has the whole mercurial module in the file system * presumably is an Apple Python that uses Apple OpenSSL which has patches for using system certificate store CAs in addition to the provided cacerts file """ if sys.platform != 'darwin' or util.mainfrozen() or not sys.executable: return False exe = os.path.realpath(sys.executable).lower() return (exe.startswith('/usr/bin/python') or exe.startswith('/system/library/frameworks/python.framework/')) def _defaultcacerts(): """return path to CA certificates; None for system's store; ! to disable""" if _plainapplepython(): dummycert = os.path.join(os.path.dirname(__file__), 'dummycert.pem') if os.path.exists(dummycert): return dummycert if _canloaddefaultcerts: return None return '!' def sslkwargs(ui, host): kws = {'ui': ui} hostfingerprint = ui.config('hostfingerprints', host) if hostfingerprint: return kws cacerts = ui.config('web', 'cacerts') if cacerts == '!': pass elif cacerts: cacerts = util.expandpath(cacerts) if not os.path.exists(cacerts): raise error.Abort(_('could not find web.cacerts: %s') % cacerts) else: cacerts = _defaultcacerts() if cacerts and cacerts != '!': ui.debug('using %s to enable OS X system CA\n' % cacerts) ui.setconfig('web', 'cacerts', cacerts, 'defaultcacerts') if cacerts != '!': kws.update({'ca_certs': cacerts, 'cert_reqs': ssl.CERT_REQUIRED, }) return kws class validator(object): def __init__(self, ui, host): self.ui = ui self.host = host def __call__(self, sock, strict=False): host = self.host cacerts = self.ui.config('web', 'cacerts') hostfingerprint = self.ui.config('hostfingerprints', host) if not sock.cipher(): # work around http://bugs.python.org/issue13721 raise error.Abort(_('%s ssl connection error') % host) try: peercert = sock.getpeercert(True) peercert2 = sock.getpeercert() except AttributeError: raise error.Abort(_('%s ssl connection error') % host) if not peercert: raise error.Abort(_('%s certificate error: ' 'no certificate received') % host) peerfingerprint = util.sha1(peercert).hexdigest() nicefingerprint = ":".join([peerfingerprint[x:x + 2] for x in xrange(0, len(peerfingerprint), 2)]) if hostfingerprint: if peerfingerprint.lower() != \ hostfingerprint.replace(':', '').lower(): raise error.Abort(_('certificate for %s has unexpected ' 'fingerprint %s') % (host, nicefingerprint), hint=_('check hostfingerprint configuration')) self.ui.debug('%s certificate matched fingerprint %s\n' % (host, nicefingerprint)) elif cacerts != '!': msg = _verifycert(peercert2, host) if msg: raise error.Abort(_('%s certificate error: %s') % (host, msg), hint=_('configure hostfingerprint %s or use ' '--insecure to connect insecurely') % nicefingerprint) self.ui.debug('%s certificate successfully verified\n' % host) elif strict: raise error.Abort(_('%s certificate with fingerprint %s not ' 'verified') % (host, nicefingerprint), hint=_('check hostfingerprints or web.cacerts ' 'config setting')) else: self.ui.warn(_('warning: %s certificate with fingerprint %s not ' 'verified (check hostfingerprints or web.cacerts ' 'config setting)\n') % (host, nicefingerprint)) mercurial-3.7.3/mercurial/changegroup.py0000644000175000017500000013041112676531525017766 0ustar mpmmpm00000000000000# changegroup.py - Mercurial changegroup manipulation functions # # Copyright 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import os import struct import tempfile import weakref from .i18n import _ from .node import ( hex, nullid, nullrev, short, ) from . import ( branchmap, dagutil, discovery, error, mdiff, phases, util, ) _CHANGEGROUPV1_DELTA_HEADER = "20s20s20s20s" _CHANGEGROUPV2_DELTA_HEADER = "20s20s20s20s20s" _CHANGEGROUPV3_DELTA_HEADER = ">20s20s20s20s20sH" def readexactly(stream, n): '''read n bytes from stream.read and abort if less was available''' s = stream.read(n) if len(s) < n: raise error.Abort(_("stream ended unexpectedly" " (got %d bytes, expected %d)") % (len(s), n)) return s def getchunk(stream): """return the next chunk from stream as a string""" d = readexactly(stream, 4) l = struct.unpack(">l", d)[0] if l <= 4: if l: raise error.Abort(_("invalid chunk length %d") % l) return "" return readexactly(stream, l - 4) def chunkheader(length): """return a changegroup chunk header (string)""" return struct.pack(">l", length + 4) def closechunk(): """return a changegroup chunk header (string) for a zero-length chunk""" return struct.pack(">l", 0) def combineresults(results): """logic to combine 0 or more addchangegroup results into one""" changedheads = 0 result = 1 for ret in results: # If any changegroup result is 0, return 0 if ret == 0: result = 0 break if ret < -1: changedheads += ret + 1 elif ret > 1: changedheads += ret - 1 if changedheads > 0: result = 1 + changedheads elif changedheads < 0: result = -1 + changedheads return result bundletypes = { "": ("", None), # only when using unbundle on ssh and old http servers # since the unification ssh accepts a header but there # is no capability signaling it. "HG20": (), # special-cased below "HG10UN": ("HG10UN", None), "HG10BZ": ("HG10", 'BZ'), "HG10GZ": ("HG10GZ", 'GZ'), } # hgweb uses this list to communicate its preferred type bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN'] def writechunks(ui, chunks, filename, vfs=None): """Write chunks to a file and return its filename. The stream is assumed to be a bundle file. Existing files will not be overwritten. If no filename is specified, a temporary file is created. """ fh = None cleanup = None try: if filename: if vfs: fh = vfs.open(filename, "wb") else: fh = open(filename, "wb") else: fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg") fh = os.fdopen(fd, "wb") cleanup = filename for c in chunks: fh.write(c) cleanup = None return filename finally: if fh is not None: fh.close() if cleanup is not None: if filename and vfs: vfs.unlink(cleanup) else: os.unlink(cleanup) def writebundle(ui, cg, filename, bundletype, vfs=None, compression=None): """Write a bundle file and return its filename. Existing files will not be overwritten. If no filename is specified, a temporary file is created. bz2 compression can be turned off. The bundle file will be deleted in case of errors. """ if bundletype == "HG20": from . import bundle2 bundle = bundle2.bundle20(ui) bundle.setcompression(compression) part = bundle.newpart('changegroup', data=cg.getchunks()) part.addparam('version', cg.version) chunkiter = bundle.getchunks() else: # compression argument is only for the bundle2 case assert compression is None if cg.version != '01': raise error.Abort(_('old bundle types only supports v1 ' 'changegroups')) header, comp = bundletypes[bundletype] if comp not in util.compressors: raise error.Abort(_('unknown stream compression type: %s') % comp) z = util.compressors[comp]() subchunkiter = cg.getchunks() def chunkiter(): yield header for chunk in subchunkiter: yield z.compress(chunk) yield z.flush() chunkiter = chunkiter() # parse the changegroup data, otherwise we will block # in case of sshrepo because we don't know the end of the stream # an empty chunkgroup is the end of the changegroup # a changegroup has at least 2 chunkgroups (changelog and manifest). # after that, an empty chunkgroup is the end of the changegroup return writechunks(ui, chunkiter, filename, vfs=vfs) class cg1unpacker(object): """Unpacker for cg1 changegroup streams. A changegroup unpacker handles the framing of the revision data in the wire format. Most consumers will want to use the apply() method to add the changes from the changegroup to a repository. If you're forwarding a changegroup unmodified to another consumer, use getchunks(), which returns an iterator of changegroup chunks. This is mostly useful for cases where you need to know the data stream has ended by observing the end of the changegroup. deltachunk() is useful only if you're applying delta data. Most consumers should prefer apply() instead. A few other public methods exist. Those are used only for bundlerepo and some debug commands - their use is discouraged. """ deltaheader = _CHANGEGROUPV1_DELTA_HEADER deltaheadersize = struct.calcsize(deltaheader) version = '01' _grouplistcount = 1 # One list of files after the manifests def __init__(self, fh, alg): if alg == 'UN': alg = None # get more modern without breaking too much if not alg in util.decompressors: raise error.Abort(_('unknown stream compression type: %s') % alg) if alg == 'BZ': alg = '_truncatedBZ' self._stream = util.decompressors[alg](fh) self._type = alg self.callback = None # These methods (compressed, read, seek, tell) all appear to only # be used by bundlerepo, but it's a little hard to tell. def compressed(self): return self._type is not None def read(self, l): return self._stream.read(l) def seek(self, pos): return self._stream.seek(pos) def tell(self): return self._stream.tell() def close(self): return self._stream.close() def _chunklength(self): d = readexactly(self._stream, 4) l = struct.unpack(">l", d)[0] if l <= 4: if l: raise error.Abort(_("invalid chunk length %d") % l) return 0 if self.callback: self.callback() return l - 4 def changelogheader(self): """v10 does not have a changelog header chunk""" return {} def manifestheader(self): """v10 does not have a manifest header chunk""" return {} def filelogheader(self): """return the header of the filelogs chunk, v10 only has the filename""" l = self._chunklength() if not l: return {} fname = readexactly(self._stream, l) return {'filename': fname} def _deltaheader(self, headertuple, prevnode): node, p1, p2, cs = headertuple if prevnode is None: deltabase = p1 else: deltabase = prevnode flags = 0 return node, p1, p2, deltabase, cs, flags def deltachunk(self, prevnode): l = self._chunklength() if not l: return {} headerdata = readexactly(self._stream, self.deltaheadersize) header = struct.unpack(self.deltaheader, headerdata) delta = readexactly(self._stream, l - self.deltaheadersize) node, p1, p2, deltabase, cs, flags = self._deltaheader(header, prevnode) return {'node': node, 'p1': p1, 'p2': p2, 'cs': cs, 'deltabase': deltabase, 'delta': delta, 'flags': flags} def getchunks(self): """returns all the chunks contains in the bundle Used when you need to forward the binary stream to a file or another network API. To do so, it parse the changegroup data, otherwise it will block in case of sshrepo because it don't know the end of the stream. """ # an empty chunkgroup is the end of the changegroup # a changegroup has at least 2 chunkgroups (changelog and manifest). # after that, changegroup versions 1 and 2 have a series of groups # with one group per file. changegroup 3 has a series of directory # manifests before the files. count = 0 emptycount = 0 while emptycount < self._grouplistcount: empty = True count += 1 while True: chunk = getchunk(self) if not chunk: if empty and count > 2: emptycount += 1 break empty = False yield chunkheader(len(chunk)) pos = 0 while pos < len(chunk): next = pos + 2**20 yield chunk[pos:next] pos = next yield closechunk() def _unpackmanifests(self, repo, revmap, trp, prog, numchanges): # We know that we'll never have more manifests than we had # changesets. self.callback = prog(_('manifests'), numchanges) # no need to check for empty manifest group here: # if the result of the merge of 1 and 2 is the same in 3 and 4, # no new manifest will be created and the manifest group will # be empty during the pull self.manifestheader() repo.manifest.addgroup(self, revmap, trp) repo.ui.progress(_('manifests'), None) def apply(self, repo, srctype, url, emptyok=False, targetphase=phases.draft, expectedtotal=None): """Add the changegroup returned by source.read() to this repo. srctype is a string like 'push', 'pull', or 'unbundle'. url is the URL of the repo where this changegroup is coming from. Return an integer summarizing the change to this repo: - nothing changed or no source: 0 - more heads than before: 1+added heads (2..n) - fewer heads than before: -1-removed heads (-2..-n) - number of heads stays the same: 1 """ repo = repo.unfiltered() def csmap(x): repo.ui.debug("add changeset %s\n" % short(x)) return len(cl) def revmap(x): return cl.rev(x) changesets = files = revisions = 0 try: with repo.transaction("\n".join([srctype, util.hidepassword(url)])) as tr: # The transaction could have been created before and already # carries source information. In this case we use the top # level data. We overwrite the argument because we need to use # the top level value (if they exist) in this function. srctype = tr.hookargs.setdefault('source', srctype) url = tr.hookargs.setdefault('url', url) repo.hook('prechangegroup', throw=True, **tr.hookargs) # write changelog data to temp files so concurrent readers # will not see an inconsistent view cl = repo.changelog cl.delayupdate(tr) oldheads = cl.heads() trp = weakref.proxy(tr) # pull off the changeset group repo.ui.status(_("adding changesets\n")) clstart = len(cl) class prog(object): def __init__(self, step, total): self._step = step self._total = total self._count = 1 def __call__(self): repo.ui.progress(self._step, self._count, unit=_('chunks'), total=self._total) self._count += 1 self.callback = prog(_('changesets'), expectedtotal) efiles = set() def onchangelog(cl, node): efiles.update(cl.read(node)[3]) self.changelogheader() srccontent = cl.addgroup(self, csmap, trp, addrevisioncb=onchangelog) efiles = len(efiles) if not (srccontent or emptyok): raise error.Abort(_("received changelog group is empty")) clend = len(cl) changesets = clend - clstart repo.ui.progress(_('changesets'), None) # pull off the manifest group repo.ui.status(_("adding manifests\n")) self._unpackmanifests(repo, revmap, trp, prog, changesets) needfiles = {} if repo.ui.configbool('server', 'validate', default=False): # validate incoming csets have their manifests for cset in xrange(clstart, clend): mfnode = repo.changelog.read( repo.changelog.node(cset))[0] mfest = repo.manifest.readdelta(mfnode) # store file nodes we must see for f, n in mfest.iteritems(): needfiles.setdefault(f, set()).add(n) # process the files repo.ui.status(_("adding file changes\n")) self.callback = None pr = prog(_('files'), efiles) newrevs, newfiles = _addchangegroupfiles( repo, self, revmap, trp, pr, needfiles) revisions += newrevs files += newfiles dh = 0 if oldheads: heads = cl.heads() dh = len(heads) - len(oldheads) for h in heads: if h not in oldheads and repo[h].closesbranch(): dh -= 1 htext = "" if dh: htext = _(" (%+d heads)") % dh repo.ui.status(_("added %d changesets" " with %d changes to %d files%s\n") % (changesets, revisions, files, htext)) repo.invalidatevolatilesets() if changesets > 0: if 'node' not in tr.hookargs: tr.hookargs['node'] = hex(cl.node(clstart)) tr.hookargs['node_last'] = hex(cl.node(clend - 1)) hookargs = dict(tr.hookargs) else: hookargs = dict(tr.hookargs) hookargs['node'] = hex(cl.node(clstart)) hookargs['node_last'] = hex(cl.node(clend - 1)) repo.hook('pretxnchangegroup', throw=True, **hookargs) added = [cl.node(r) for r in xrange(clstart, clend)] publishing = repo.publishing() if srctype in ('push', 'serve'): # Old servers can not push the boundary themselves. # New servers won't push the boundary if changeset already # exists locally as secret # # We should not use added here but the list of all change in # the bundle if publishing: phases.advanceboundary(repo, tr, phases.public, srccontent) else: # Those changesets have been pushed from the # outside, their phases are going to be pushed # alongside. Therefor `targetphase` is # ignored. phases.advanceboundary(repo, tr, phases.draft, srccontent) phases.retractboundary(repo, tr, phases.draft, added) elif srctype != 'strip': # publishing only alter behavior during push # # strip should not touch boundary at all phases.retractboundary(repo, tr, targetphase, added) if changesets > 0: if srctype != 'strip': # During strip, branchcache is invalid but # coming call to `destroyed` will repair it. # In other case we can safely update cache on # disk. branchmap.updatecache(repo.filtered('served')) def runhooks(): # These hooks run when the lock releases, not when the # transaction closes. So it's possible for the changelog # to have changed since we last saw it. if clstart >= len(repo): return # forcefully update the on-disk branch cache repo.ui.debug("updating the branch cache\n") repo.hook("changegroup", **hookargs) for n in added: args = hookargs.copy() args['node'] = hex(n) del args['node_last'] repo.hook("incoming", **args) newheads = [h for h in repo.heads() if h not in oldheads] repo.ui.log("incoming", "%s incoming changes - new heads: %s\n", len(added), ', '.join([hex(c[:6]) for c in newheads])) tr.addpostclose('changegroup-runhooks-%020i' % clstart, lambda tr: repo._afterlock(runhooks)) finally: repo.ui.flush() # never return 0 here: if dh < 0: return dh - 1 else: return dh + 1 class cg2unpacker(cg1unpacker): """Unpacker for cg2 streams. cg2 streams add support for generaldelta, so the delta header format is slightly different. All other features about the data remain the same. """ deltaheader = _CHANGEGROUPV2_DELTA_HEADER deltaheadersize = struct.calcsize(deltaheader) version = '02' def _deltaheader(self, headertuple, prevnode): node, p1, p2, deltabase, cs = headertuple flags = 0 return node, p1, p2, deltabase, cs, flags class cg3unpacker(cg2unpacker): """Unpacker for cg3 streams. cg3 streams add support for exchanging treemanifests and revlog flags. It adds the revlog flags to the delta header and an empty chunk separating manifests and files. """ deltaheader = _CHANGEGROUPV3_DELTA_HEADER deltaheadersize = struct.calcsize(deltaheader) version = '03' _grouplistcount = 2 # One list of manifests and one list of files def _deltaheader(self, headertuple, prevnode): node, p1, p2, deltabase, cs, flags = headertuple return node, p1, p2, deltabase, cs, flags def _unpackmanifests(self, repo, revmap, trp, prog, numchanges): super(cg3unpacker, self)._unpackmanifests(repo, revmap, trp, prog, numchanges) while True: chunkdata = self.filelogheader() if not chunkdata: break # If we get here, there are directory manifests in the changegroup d = chunkdata["filename"] repo.ui.debug("adding %s revisions\n" % d) dirlog = repo.manifest.dirlog(d) if not dirlog.addgroup(self, revmap, trp): raise error.Abort(_("received dir revlog group is empty")) class headerlessfixup(object): def __init__(self, fh, h): self._h = h self._fh = fh def read(self, n): if self._h: d, self._h = self._h[:n], self._h[n:] if len(d) < n: d += readexactly(self._fh, n - len(d)) return d return readexactly(self._fh, n) def _moddirs(files): """Given a set of modified files, find the list of modified directories. This returns a list of (path to changed dir, changed dir) tuples, as that's what the one client needs anyway. >>> _moddirs(['a/b/c.py', 'a/b/c.txt', 'a/d/e/f/g.txt', 'i.txt', ]) [('/', 'a/'), ('a/', 'b/'), ('a/', 'd/'), ('a/d/', 'e/'), ('a/d/e/', 'f/')] """ alldirs = set() for f in files: path = f.split('/')[:-1] for i in xrange(len(path) - 1, -1, -1): dn = '/'.join(path[:i]) current = dn + '/', path[i] + '/' if current in alldirs: break alldirs.add(current) return sorted(alldirs) class cg1packer(object): deltaheader = _CHANGEGROUPV1_DELTA_HEADER version = '01' def __init__(self, repo, bundlecaps=None): """Given a source repo, construct a bundler. bundlecaps is optional and can be used to specify the set of capabilities which can be used to build the bundle. """ # Set of capabilities we can use to build the bundle. if bundlecaps is None: bundlecaps = set() self._bundlecaps = bundlecaps # experimental config: bundle.reorder reorder = repo.ui.config('bundle', 'reorder', 'auto') if reorder == 'auto': reorder = None else: reorder = util.parsebool(reorder) self._repo = repo self._reorder = reorder self._progress = repo.ui.progress if self._repo.ui.verbose and not self._repo.ui.debugflag: self._verbosenote = self._repo.ui.note else: self._verbosenote = lambda s: None def close(self): return closechunk() def fileheader(self, fname): return chunkheader(len(fname)) + fname def group(self, nodelist, revlog, lookup, units=None): """Calculate a delta group, yielding a sequence of changegroup chunks (strings). Given a list of changeset revs, return a set of deltas and metadata corresponding to nodes. The first delta is first parent(nodelist[0]) -> nodelist[0], the receiver is guaranteed to have this parent as it has all history before these changesets. In the case firstparent is nullrev the changegroup starts with a full revision. If units is not None, progress detail will be generated, units specifies the type of revlog that is touched (changelog, manifest, etc.). """ # if we don't have any revisions touched by these changesets, bail if len(nodelist) == 0: yield self.close() return # for generaldelta revlogs, we linearize the revs; this will both be # much quicker and generate a much smaller bundle if (revlog._generaldelta and self._reorder is None) or self._reorder: dag = dagutil.revlogdag(revlog) revs = set(revlog.rev(n) for n in nodelist) revs = dag.linearize(revs) else: revs = sorted([revlog.rev(n) for n in nodelist]) # add the parent of the first rev p = revlog.parentrevs(revs[0])[0] revs.insert(0, p) # build deltas total = len(revs) - 1 msgbundling = _('bundling') for r in xrange(len(revs) - 1): if units is not None: self._progress(msgbundling, r + 1, unit=units, total=total) prev, curr = revs[r], revs[r + 1] linknode = lookup(revlog.node(curr)) for c in self.revchunk(revlog, curr, prev, linknode): yield c if units is not None: self._progress(msgbundling, None) yield self.close() # filter any nodes that claim to be part of the known set def prune(self, revlog, missing, commonrevs): rr, rl = revlog.rev, revlog.linkrev return [n for n in missing if rl(rr(n)) not in commonrevs] def _packmanifests(self, mfnodes, tmfnodes, lookuplinknode): """Pack flat manifests into a changegroup stream.""" ml = self._repo.manifest size = 0 for chunk in self.group( mfnodes, ml, lookuplinknode, units=_('manifests')): size += len(chunk) yield chunk self._verbosenote(_('%8.i (manifests)\n') % size) # It looks odd to assert this here, but tmfnodes doesn't get # filled in until after we've called lookuplinknode for # sending root manifests, so the only way to tell the streams # got crossed is to check after we've done all the work. assert not tmfnodes def generate(self, commonrevs, clnodes, fastpathlinkrev, source): '''yield a sequence of changegroup chunks (strings)''' repo = self._repo cl = repo.changelog ml = repo.manifest clrevorder = {} mfs = {} # needed manifests tmfnodes = {} fnodes = {} # needed file nodes # maps manifest node id -> set(changed files) mfchangedfiles = {} # Callback for the changelog, used to collect changed files and manifest # nodes. # Returns the linkrev node (identity in the changelog case). def lookupcl(x): c = cl.read(x) clrevorder[x] = len(clrevorder) n = c[0] # record the first changeset introducing this manifest version mfs.setdefault(n, x) # Record a complete list of potentially-changed files in # this manifest. mfchangedfiles.setdefault(n, set()).update(c[3]) return x self._verbosenote(_('uncompressed size of bundle content:\n')) size = 0 for chunk in self.group(clnodes, cl, lookupcl, units=_('changesets')): size += len(chunk) yield chunk self._verbosenote(_('%8.i (changelog)\n') % size) # We need to make sure that the linkrev in the changegroup refers to # the first changeset that introduced the manifest or file revision. # The fastpath is usually safer than the slowpath, because the filelogs # are walked in revlog order. # # When taking the slowpath with reorder=None and the manifest revlog # uses generaldelta, the manifest may be walked in the "wrong" order. # Without 'clrevorder', we would get an incorrect linkrev (see fix in # cc0ff93d0c0c). # # When taking the fastpath, we are only vulnerable to reordering # of the changelog itself. The changelog never uses generaldelta, so # it is only reordered when reorder=True. To handle this case, we # simply take the slowpath, which already has the 'clrevorder' logic. # This was also fixed in cc0ff93d0c0c. fastpathlinkrev = fastpathlinkrev and not self._reorder # Treemanifests don't work correctly with fastpathlinkrev # either, because we don't discover which directory nodes to # send along with files. This could probably be fixed. fastpathlinkrev = fastpathlinkrev and ( 'treemanifest' not in repo.requirements) # Callback for the manifest, used to collect linkrevs for filelog # revisions. # Returns the linkrev node (collected in lookupcl). if fastpathlinkrev: lookupmflinknode = mfs.__getitem__ else: def lookupmflinknode(x): """Callback for looking up the linknode for manifests. Returns the linkrev node for the specified manifest. SIDE EFFECT: 1) fclnodes gets populated with the list of relevant file nodes if we're not using fastpathlinkrev 2) When treemanifests are in use, collects treemanifest nodes to send Note that this means manifests must be completely sent to the client before you can trust the list of files and treemanifests to send. """ clnode = mfs[x] # We no longer actually care about reading deltas of # the manifest here, because we already know the list # of changed files, so for treemanifests (which # lazily-load anyway to *generate* a readdelta) we can # just load them with read() and then we'll actually # be able to correctly load node IDs from the # submanifest entries. if 'treemanifest' in repo.requirements: mdata = ml.read(x) else: mdata = ml.readfast(x) for f in mfchangedfiles[x]: try: n = mdata[f] except KeyError: continue # record the first changeset introducing this filelog # version fclnodes = fnodes.setdefault(f, {}) fclnode = fclnodes.setdefault(n, clnode) if clrevorder[clnode] < clrevorder[fclnode]: fclnodes[n] = clnode # gather list of changed treemanifest nodes if 'treemanifest' in repo.requirements: submfs = {'/': mdata} for dn, bn in _moddirs(mfchangedfiles[x]): try: submf = submfs[dn] submf = submf._dirs[bn] except KeyError: continue # deleted directory, so nothing to send submfs[submf.dir()] = submf tmfclnodes = tmfnodes.setdefault(submf.dir(), {}) tmfclnode = tmfclnodes.setdefault(submf._node, clnode) if clrevorder[clnode] < clrevorder[tmfclnode]: tmfclnodes[n] = clnode return clnode mfnodes = self.prune(ml, mfs, commonrevs) for x in self._packmanifests( mfnodes, tmfnodes, lookupmflinknode): yield x mfs.clear() clrevs = set(cl.rev(x) for x in clnodes) if not fastpathlinkrev: def linknodes(unused, fname): return fnodes.get(fname, {}) else: cln = cl.node def linknodes(filerevlog, fname): llr = filerevlog.linkrev fln = filerevlog.node revs = ((r, llr(r)) for r in filerevlog) return dict((fln(r), cln(lr)) for r, lr in revs if lr in clrevs) changedfiles = set() for x in mfchangedfiles.itervalues(): changedfiles.update(x) for chunk in self.generatefiles(changedfiles, linknodes, commonrevs, source): yield chunk yield self.close() if clnodes: repo.hook('outgoing', node=hex(clnodes[0]), source=source) # The 'source' parameter is useful for extensions def generatefiles(self, changedfiles, linknodes, commonrevs, source): repo = self._repo progress = self._progress msgbundling = _('bundling') total = len(changedfiles) # for progress output msgfiles = _('files') for i, fname in enumerate(sorted(changedfiles)): filerevlog = repo.file(fname) if not filerevlog: raise error.Abort(_("empty or missing revlog for %s") % fname) linkrevnodes = linknodes(filerevlog, fname) # Lookup for filenodes, we collected the linkrev nodes above in the # fastpath case and with lookupmf in the slowpath case. def lookupfilelog(x): return linkrevnodes[x] filenodes = self.prune(filerevlog, linkrevnodes, commonrevs) if filenodes: progress(msgbundling, i + 1, item=fname, unit=msgfiles, total=total) h = self.fileheader(fname) size = len(h) yield h for chunk in self.group(filenodes, filerevlog, lookupfilelog): size += len(chunk) yield chunk self._verbosenote(_('%8.i %s\n') % (size, fname)) progress(msgbundling, None) def deltaparent(self, revlog, rev, p1, p2, prev): return prev def revchunk(self, revlog, rev, prev, linknode): node = revlog.node(rev) p1, p2 = revlog.parentrevs(rev) base = self.deltaparent(revlog, rev, p1, p2, prev) prefix = '' if revlog.iscensored(base) or revlog.iscensored(rev): try: delta = revlog.revision(node) except error.CensoredNodeError as e: delta = e.tombstone if base == nullrev: prefix = mdiff.trivialdiffheader(len(delta)) else: baselen = revlog.rawsize(base) prefix = mdiff.replacediffheader(baselen, len(delta)) elif base == nullrev: delta = revlog.revision(node) prefix = mdiff.trivialdiffheader(len(delta)) else: delta = revlog.revdiff(base, rev) p1n, p2n = revlog.parents(node) basenode = revlog.node(base) flags = revlog.flags(rev) meta = self.builddeltaheader(node, p1n, p2n, basenode, linknode, flags) meta += prefix l = len(meta) + len(delta) yield chunkheader(l) yield meta yield delta def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags): # do nothing with basenode, it is implicitly the previous one in HG10 # do nothing with flags, it is implicitly 0 for cg1 and cg2 return struct.pack(self.deltaheader, node, p1n, p2n, linknode) class cg2packer(cg1packer): version = '02' deltaheader = _CHANGEGROUPV2_DELTA_HEADER def __init__(self, repo, bundlecaps=None): super(cg2packer, self).__init__(repo, bundlecaps) if self._reorder is None: # Since generaldelta is directly supported by cg2, reordering # generally doesn't help, so we disable it by default (treating # bundle.reorder=auto just like bundle.reorder=False). self._reorder = False def deltaparent(self, revlog, rev, p1, p2, prev): dp = revlog.deltaparent(rev) # avoid storing full revisions; pick prev in those cases # also pick prev when we can't be sure remote has dp if dp == nullrev or (dp != p1 and dp != p2 and dp != prev): return prev return dp def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags): # Do nothing with flags, it is implicitly 0 in cg1 and cg2 return struct.pack(self.deltaheader, node, p1n, p2n, basenode, linknode) class cg3packer(cg2packer): version = '03' deltaheader = _CHANGEGROUPV3_DELTA_HEADER def _packmanifests(self, mfnodes, tmfnodes, lookuplinknode): # Note that debug prints are super confusing in this code, as # tmfnodes gets populated by the calls to lookuplinknode in # the superclass's manifest packer. In the future we should # probably see if we can refactor this somehow to be less # confusing. for x in super(cg3packer, self)._packmanifests( mfnodes, {}, lookuplinknode): yield x dirlog = self._repo.manifest.dirlog for name, nodes in tmfnodes.iteritems(): # For now, directory headers are simply file headers with # a trailing '/' on the path (already in the name). yield self.fileheader(name) for chunk in self.group(nodes, dirlog(name), nodes.get): yield chunk yield self.close() def builddeltaheader(self, node, p1n, p2n, basenode, linknode, flags): return struct.pack( self.deltaheader, node, p1n, p2n, basenode, linknode, flags) _packermap = {'01': (cg1packer, cg1unpacker), # cg2 adds support for exchanging generaldelta '02': (cg2packer, cg2unpacker), # cg3 adds support for exchanging revlog flags and treemanifests '03': (cg3packer, cg3unpacker), } def allsupportedversions(ui): versions = set(_packermap.keys()) versions.discard('03') if (ui.configbool('experimental', 'changegroup3') or ui.configbool('experimental', 'treemanifest')): versions.add('03') return versions # Changegroup versions that can be applied to the repo def supportedincomingversions(repo): versions = allsupportedversions(repo.ui) if 'treemanifest' in repo.requirements: versions.add('03') return versions # Changegroup versions that can be created from the repo def supportedoutgoingversions(repo): versions = allsupportedversions(repo.ui) if 'treemanifest' in repo.requirements: # Versions 01 and 02 support only flat manifests and it's just too # expensive to convert between the flat manifest and tree manifest on # the fly. Since tree manifests are hashed differently, all of history # would have to be converted. Instead, we simply don't even pretend to # support versions 01 and 02. versions.discard('01') versions.discard('02') versions.add('03') return versions def safeversion(repo): # Finds the smallest version that it's safe to assume clients of the repo # will support. For example, all hg versions that support generaldelta also # support changegroup 02. versions = supportedoutgoingversions(repo) if 'generaldelta' in repo.requirements: versions.discard('01') assert versions return min(versions) def getbundler(version, repo, bundlecaps=None): assert version in supportedoutgoingversions(repo) return _packermap[version][0](repo, bundlecaps) def getunbundler(version, fh, alg): return _packermap[version][1](fh, alg) def _changegroupinfo(repo, nodes, source): if repo.ui.verbose or source == 'bundle': repo.ui.status(_("%d changesets found\n") % len(nodes)) if repo.ui.debugflag: repo.ui.debug("list of changesets:\n") for node in nodes: repo.ui.debug("%s\n" % hex(node)) def getsubsetraw(repo, outgoing, bundler, source, fastpath=False): repo = repo.unfiltered() commonrevs = outgoing.common csets = outgoing.missing heads = outgoing.missingheads # We go through the fast path if we get told to, or if all (unfiltered # heads have been requested (since we then know there all linkrevs will # be pulled by the client). heads.sort() fastpathlinkrev = fastpath or ( repo.filtername is None and heads == sorted(repo.heads())) repo.hook('preoutgoing', throw=True, source=source) _changegroupinfo(repo, csets, source) return bundler.generate(commonrevs, csets, fastpathlinkrev, source) def getsubset(repo, outgoing, bundler, source, fastpath=False): gengroup = getsubsetraw(repo, outgoing, bundler, source, fastpath) return getunbundler(bundler.version, util.chunkbuffer(gengroup), None) def changegroupsubset(repo, roots, heads, source, version='01'): """Compute a changegroup consisting of all the nodes that are descendants of any of the roots and ancestors of any of the heads. Return a chunkbuffer object whose read() method will return successive changegroup chunks. It is fairly complex as determining which filenodes and which manifest nodes need to be included for the changeset to be complete is non-trivial. Another wrinkle is doing the reverse, figuring out which changeset in the changegroup a particular filenode or manifestnode belongs to. """ cl = repo.changelog if not roots: roots = [nullid] discbases = [] for n in roots: discbases.extend([p for p in cl.parents(n) if p != nullid]) # TODO: remove call to nodesbetween. csets, roots, heads = cl.nodesbetween(roots, heads) included = set(csets) discbases = [n for n in discbases if n not in included] outgoing = discovery.outgoing(cl, discbases, heads) bundler = getbundler(version, repo) return getsubset(repo, outgoing, bundler, source) def getlocalchangegroupraw(repo, source, outgoing, bundlecaps=None, version='01'): """Like getbundle, but taking a discovery.outgoing as an argument. This is only implemented for local repos and reuses potentially precomputed sets in outgoing. Returns a raw changegroup generator.""" if not outgoing.missing: return None bundler = getbundler(version, repo, bundlecaps) return getsubsetraw(repo, outgoing, bundler, source) def getlocalchangegroup(repo, source, outgoing, bundlecaps=None, version='01'): """Like getbundle, but taking a discovery.outgoing as an argument. This is only implemented for local repos and reuses potentially precomputed sets in outgoing.""" if not outgoing.missing: return None bundler = getbundler(version, repo, bundlecaps) return getsubset(repo, outgoing, bundler, source) def computeoutgoing(repo, heads, common): """Computes which revs are outgoing given a set of common and a set of heads. This is a separate function so extensions can have access to the logic. Returns a discovery.outgoing object. """ cl = repo.changelog if common: hasnode = cl.hasnode common = [n for n in common if hasnode(n)] else: common = [nullid] if not heads: heads = cl.heads() return discovery.outgoing(cl, common, heads) def getchangegroup(repo, source, heads=None, common=None, bundlecaps=None, version='01'): """Like changegroupsubset, but returns the set difference between the ancestors of heads and the ancestors common. If heads is None, use the local heads. If common is None, use [nullid]. The nodes in common might not all be known locally due to the way the current discovery protocol works. """ outgoing = computeoutgoing(repo, heads, common) return getlocalchangegroup(repo, source, outgoing, bundlecaps=bundlecaps, version=version) def changegroup(repo, basenodes, source): # to avoid a race we use changegroupsubset() (issue1320) return changegroupsubset(repo, basenodes, repo.heads(), source) def _addchangegroupfiles(repo, source, revmap, trp, pr, needfiles): revisions = 0 files = 0 while True: chunkdata = source.filelogheader() if not chunkdata: break f = chunkdata["filename"] repo.ui.debug("adding %s revisions\n" % f) pr() fl = repo.file(f) o = len(fl) try: if not fl.addgroup(source, revmap, trp): raise error.Abort(_("received file revlog group is empty")) except error.CensoredBaseError as e: raise error.Abort(_("received delta base is censored: %s") % e) revisions += len(fl) - o files += 1 if f in needfiles: needs = needfiles[f] for new in xrange(o, len(fl)): n = fl.node(new) if n in needs: needs.remove(n) else: raise error.Abort( _("received spurious file revlog entry")) if not needs: del needfiles[f] repo.ui.progress(_('files'), None) for f, needs in needfiles.iteritems(): fl = repo.file(f) for n in needs: try: fl.rev(n) except error.LookupError: raise error.Abort( _('missing file data for %s:%s - run hg verify') % (f, hex(n))) return revisions, files mercurial-3.7.3/mercurial/base85.c0000644000175000017500000000657712676531524016363 0ustar mpmmpm00000000000000/* base85 codec Copyright 2006 Brendan Cully This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. Largely based on git's implementation */ #define PY_SSIZE_T_CLEAN #include #include "util.h" static const char b85chars[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~"; static char b85dec[256]; static void b85prep(void) { unsigned i; memset(b85dec, 0, sizeof(b85dec)); for (i = 0; i < sizeof(b85chars); i++) b85dec[(int)(b85chars[i])] = i + 1; } static PyObject *b85encode(PyObject *self, PyObject *args) { const unsigned char *text; PyObject *out; char *dst; Py_ssize_t len, olen, i; unsigned int acc, val, ch; int pad = 0; if (!PyArg_ParseTuple(args, "s#|i", &text, &len, &pad)) return NULL; if (pad) olen = ((len + 3) / 4 * 5) - 3; else { olen = len % 4; if (olen) olen++; olen += len / 4 * 5; } if (!(out = PyBytes_FromStringAndSize(NULL, olen + 3))) return NULL; dst = PyBytes_AsString(out); while (len) { acc = 0; for (i = 24; i >= 0; i -= 8) { ch = *text++; acc |= ch << i; if (--len == 0) break; } for (i = 4; i >= 0; i--) { val = acc % 85; acc /= 85; dst[i] = b85chars[val]; } dst += 5; } if (!pad) _PyBytes_Resize(&out, olen); return out; } static PyObject *b85decode(PyObject *self, PyObject *args) { PyObject *out; const char *text; char *dst; Py_ssize_t len, i, j, olen, cap; int c; unsigned int acc; if (!PyArg_ParseTuple(args, "s#", &text, &len)) return NULL; olen = len / 5 * 4; i = len % 5; if (i) olen += i - 1; if (!(out = PyBytes_FromStringAndSize(NULL, olen))) return NULL; dst = PyBytes_AsString(out); i = 0; while (i < len) { acc = 0; cap = len - i - 1; if (cap > 4) cap = 4; for (j = 0; j < cap; i++, j++) { c = b85dec[(int)*text++] - 1; if (c < 0) return PyErr_Format( PyExc_ValueError, "bad base85 character at position %d", (int)i); acc = acc * 85 + c; } if (i++ < len) { c = b85dec[(int)*text++] - 1; if (c < 0) return PyErr_Format( PyExc_ValueError, "bad base85 character at position %d", (int)i); /* overflow detection: 0xffffffff == "|NsC0", * "|NsC" == 0x03030303 */ if (acc > 0x03030303 || (acc *= 85) > 0xffffffff - c) return PyErr_Format( PyExc_ValueError, "bad base85 sequence at position %d", (int)i); acc += c; } cap = olen < 4 ? olen : 4; olen -= cap; for (j = 0; j < 4 - cap; j++) acc *= 85; if (cap && cap < 4) acc += 0xffffff >> (cap - 1) * 8; for (j = 0; j < cap; j++) { acc = (acc << 8) | (acc >> 24); *dst++ = acc; } } return out; } static char base85_doc[] = "Base85 Data Encoding"; static PyMethodDef methods[] = { {"b85encode", b85encode, METH_VARARGS, "Encode text in base85.\n\n" "If the second parameter is true, pad the result to a multiple of " "five characters.\n"}, {"b85decode", b85decode, METH_VARARGS, "Decode base85 text.\n"}, {NULL, NULL} }; #ifdef IS_PY3K static struct PyModuleDef base85_module = { PyModuleDef_HEAD_INIT, "base85", base85_doc, -1, methods }; PyMODINIT_FUNC PyInit_base85(void) { b85prep(); return PyModule_Create(&base85_module); } #else PyMODINIT_FUNC initbase85(void) { Py_InitModule3("base85", methods, base85_doc); b85prep(); } #endif mercurial-3.7.3/mercurial/osutil.c0000644000175000017500000005154112676531524016602 0ustar mpmmpm00000000000000/* osutil.c - native operating system services Copyright 2007 Matt Mackall and others This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. */ #define _ATFILE_SOURCE #include #include #include #include #include #include #ifdef _WIN32 #include #include #else #include #include #include #include #include #endif #ifdef __APPLE__ #include #include #endif #include "util.h" /* some platforms lack the PATH_MAX definition (eg. GNU/Hurd) */ #ifndef PATH_MAX #define PATH_MAX 4096 #endif #ifdef _WIN32 /* stat struct compatible with hg expectations Mercurial only uses st_mode, st_size and st_mtime the rest is kept to minimize changes between implementations */ struct hg_stat { int st_dev; int st_mode; int st_nlink; __int64 st_size; int st_mtime; int st_ctime; }; struct listdir_stat { PyObject_HEAD struct hg_stat st; }; #else struct listdir_stat { PyObject_HEAD struct stat st; }; #endif #define listdir_slot(name) \ static PyObject *listdir_stat_##name(PyObject *self, void *x) \ { \ return PyInt_FromLong(((struct listdir_stat *)self)->st.name); \ } listdir_slot(st_dev) listdir_slot(st_mode) listdir_slot(st_nlink) #ifdef _WIN32 static PyObject *listdir_stat_st_size(PyObject *self, void *x) { return PyLong_FromLongLong( (PY_LONG_LONG)((struct listdir_stat *)self)->st.st_size); } #else listdir_slot(st_size) #endif listdir_slot(st_mtime) listdir_slot(st_ctime) static struct PyGetSetDef listdir_stat_getsets[] = { {"st_dev", listdir_stat_st_dev, 0, 0, 0}, {"st_mode", listdir_stat_st_mode, 0, 0, 0}, {"st_nlink", listdir_stat_st_nlink, 0, 0, 0}, {"st_size", listdir_stat_st_size, 0, 0, 0}, {"st_mtime", listdir_stat_st_mtime, 0, 0, 0}, {"st_ctime", listdir_stat_st_ctime, 0, 0, 0}, {0, 0, 0, 0, 0} }; static PyObject *listdir_stat_new(PyTypeObject *t, PyObject *a, PyObject *k) { return t->tp_alloc(t, 0); } static void listdir_stat_dealloc(PyObject *o) { o->ob_type->tp_free(o); } static PyTypeObject listdir_stat_type = { PyVarObject_HEAD_INIT(NULL, 0) "osutil.stat", /*tp_name*/ sizeof(struct listdir_stat), /*tp_basicsize*/ 0, /*tp_itemsize*/ (destructor)listdir_stat_dealloc, /*tp_dealloc*/ 0, /*tp_print*/ 0, /*tp_getattr*/ 0, /*tp_setattr*/ 0, /*tp_compare*/ 0, /*tp_repr*/ 0, /*tp_as_number*/ 0, /*tp_as_sequence*/ 0, /*tp_as_mapping*/ 0, /*tp_hash */ 0, /*tp_call*/ 0, /*tp_str*/ 0, /*tp_getattro*/ 0, /*tp_setattro*/ 0, /*tp_as_buffer*/ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /*tp_flags*/ "stat objects", /* tp_doc */ 0, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ 0, /* tp_methods */ 0, /* tp_members */ listdir_stat_getsets, /* tp_getset */ 0, /* tp_base */ 0, /* tp_dict */ 0, /* tp_descr_get */ 0, /* tp_descr_set */ 0, /* tp_dictoffset */ 0, /* tp_init */ 0, /* tp_alloc */ listdir_stat_new, /* tp_new */ }; #ifdef _WIN32 static int to_python_time(const FILETIME *tm) { /* number of seconds between epoch and January 1 1601 */ const __int64 a0 = (__int64)134774L * (__int64)24L * (__int64)3600L; /* conversion factor from 100ns to 1s */ const __int64 a1 = 10000000; /* explicit (int) cast to suspend compiler warnings */ return (int)((((__int64)tm->dwHighDateTime << 32) + tm->dwLowDateTime) / a1 - a0); } static PyObject *make_item(const WIN32_FIND_DATAA *fd, int wantstat) { PyObject *py_st; struct hg_stat *stp; int kind = (fd->dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) ? _S_IFDIR : _S_IFREG; if (!wantstat) return Py_BuildValue("si", fd->cFileName, kind); py_st = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL); if (!py_st) return NULL; stp = &((struct listdir_stat *)py_st)->st; /* use kind as st_mode rwx bits on Win32 are meaningless and Hg does not use them anyway */ stp->st_mode = kind; stp->st_mtime = to_python_time(&fd->ftLastWriteTime); stp->st_ctime = to_python_time(&fd->ftCreationTime); if (kind == _S_IFREG) stp->st_size = ((__int64)fd->nFileSizeHigh << 32) + fd->nFileSizeLow; return Py_BuildValue("siN", fd->cFileName, kind, py_st); } static PyObject *_listdir(char *path, int plen, int wantstat, char *skip) { PyObject *rval = NULL; /* initialize - return value */ PyObject *list; HANDLE fh; WIN32_FIND_DATAA fd; char *pattern; /* build the path + \* pattern string */ pattern = malloc(plen + 3); /* path + \* + \0 */ if (!pattern) { PyErr_NoMemory(); goto error_nomem; } strcpy(pattern, path); if (plen > 0) { char c = path[plen-1]; if (c != ':' && c != '/' && c != '\\') pattern[plen++] = '\\'; } strcpy(pattern + plen, "*"); fh = FindFirstFileA(pattern, &fd); if (fh == INVALID_HANDLE_VALUE) { PyErr_SetFromWindowsErrWithFilename(GetLastError(), path); goto error_file; } list = PyList_New(0); if (!list) goto error_list; do { PyObject *item; if (fd.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) { if (!strcmp(fd.cFileName, ".") || !strcmp(fd.cFileName, "..")) continue; if (skip && !strcmp(fd.cFileName, skip)) { rval = PyList_New(0); goto error; } } item = make_item(&fd, wantstat); if (!item) goto error; if (PyList_Append(list, item)) { Py_XDECREF(item); goto error; } Py_XDECREF(item); } while (FindNextFileA(fh, &fd)); if (GetLastError() != ERROR_NO_MORE_FILES) { PyErr_SetFromWindowsErrWithFilename(GetLastError(), path); goto error; } rval = list; Py_XINCREF(rval); error: Py_XDECREF(list); error_list: FindClose(fh); error_file: free(pattern); error_nomem: return rval; } #else int entkind(struct dirent *ent) { #ifdef DT_REG switch (ent->d_type) { case DT_REG: return S_IFREG; case DT_DIR: return S_IFDIR; case DT_LNK: return S_IFLNK; case DT_BLK: return S_IFBLK; case DT_CHR: return S_IFCHR; case DT_FIFO: return S_IFIFO; case DT_SOCK: return S_IFSOCK; } #endif return -1; } static PyObject *makestat(const struct stat *st) { PyObject *stat; stat = PyObject_CallObject((PyObject *)&listdir_stat_type, NULL); if (stat) memcpy(&((struct listdir_stat *)stat)->st, st, sizeof(*st)); return stat; } static PyObject *_listdir_stat(char *path, int pathlen, int keepstat, char *skip) { PyObject *list, *elem, *stat = NULL, *ret = NULL; char fullpath[PATH_MAX + 10]; int kind, err; struct stat st; struct dirent *ent; DIR *dir; #ifdef AT_SYMLINK_NOFOLLOW int dfd = -1; #endif if (pathlen >= PATH_MAX) { errno = ENAMETOOLONG; PyErr_SetFromErrnoWithFilename(PyExc_OSError, path); goto error_value; } strncpy(fullpath, path, PATH_MAX); fullpath[pathlen] = '/'; #ifdef AT_SYMLINK_NOFOLLOW dfd = open(path, O_RDONLY); if (dfd == -1) { PyErr_SetFromErrnoWithFilename(PyExc_OSError, path); goto error_value; } dir = fdopendir(dfd); #else dir = opendir(path); #endif if (!dir) { PyErr_SetFromErrnoWithFilename(PyExc_OSError, path); goto error_dir; } list = PyList_New(0); if (!list) goto error_list; while ((ent = readdir(dir))) { if (!strcmp(ent->d_name, ".") || !strcmp(ent->d_name, "..")) continue; kind = entkind(ent); if (kind == -1 || keepstat) { #ifdef AT_SYMLINK_NOFOLLOW err = fstatat(dfd, ent->d_name, &st, AT_SYMLINK_NOFOLLOW); #else strncpy(fullpath + pathlen + 1, ent->d_name, PATH_MAX - pathlen); fullpath[PATH_MAX] = '\0'; err = lstat(fullpath, &st); #endif if (err == -1) { /* race with file deletion? */ if (errno == ENOENT) continue; strncpy(fullpath + pathlen + 1, ent->d_name, PATH_MAX - pathlen); fullpath[PATH_MAX] = 0; PyErr_SetFromErrnoWithFilename(PyExc_OSError, fullpath); goto error; } kind = st.st_mode & S_IFMT; } /* quit early? */ if (skip && kind == S_IFDIR && !strcmp(ent->d_name, skip)) { ret = PyList_New(0); goto error; } if (keepstat) { stat = makestat(&st); if (!stat) goto error; elem = Py_BuildValue("siN", ent->d_name, kind, stat); } else elem = Py_BuildValue("si", ent->d_name, kind); if (!elem) goto error; stat = NULL; PyList_Append(list, elem); Py_DECREF(elem); } ret = list; Py_INCREF(ret); error: Py_DECREF(list); Py_XDECREF(stat); error_list: closedir(dir); error_dir: #ifdef AT_SYMLINK_NOFOLLOW close(dfd); #endif error_value: return ret; } #ifdef __APPLE__ typedef struct { u_int32_t length; attrreference_t name; fsobj_type_t obj_type; struct timespec mtime; #if __LITTLE_ENDIAN__ mode_t access_mask; uint16_t padding; #else uint16_t padding; mode_t access_mask; #endif off_t size; } __attribute__((packed)) attrbuf_entry; int attrkind(attrbuf_entry *entry) { switch (entry->obj_type) { case VREG: return S_IFREG; case VDIR: return S_IFDIR; case VLNK: return S_IFLNK; case VBLK: return S_IFBLK; case VCHR: return S_IFCHR; case VFIFO: return S_IFIFO; case VSOCK: return S_IFSOCK; } return -1; } /* get these many entries at a time */ #define LISTDIR_BATCH_SIZE 50 static PyObject *_listdir_batch(char *path, int pathlen, int keepstat, char *skip, bool *fallback) { PyObject *list, *elem, *stat = NULL, *ret = NULL; int kind, err; unsigned long index; unsigned int count, old_state, new_state; bool state_seen = false; attrbuf_entry *entry; /* from the getattrlist(2) man page: a path can be no longer than (NAME_MAX * 3 + 1) bytes. Also, "The getattrlist() function will silently truncate attribute data if attrBufSize is too small." So pass in a buffer big enough for the worst case. */ char attrbuf[LISTDIR_BATCH_SIZE * (sizeof(attrbuf_entry) + NAME_MAX * 3 + 1)]; unsigned int basep_unused; struct stat st; int dfd = -1; /* these must match the attrbuf_entry struct, otherwise you'll end up with garbage */ struct attrlist requested_attr = {0}; requested_attr.bitmapcount = ATTR_BIT_MAP_COUNT; requested_attr.commonattr = (ATTR_CMN_NAME | ATTR_CMN_OBJTYPE | ATTR_CMN_MODTIME | ATTR_CMN_ACCESSMASK); requested_attr.fileattr = ATTR_FILE_DATALENGTH; *fallback = false; if (pathlen >= PATH_MAX) { errno = ENAMETOOLONG; PyErr_SetFromErrnoWithFilename(PyExc_OSError, path); goto error_value; } dfd = open(path, O_RDONLY); if (dfd == -1) { PyErr_SetFromErrnoWithFilename(PyExc_OSError, path); goto error_value; } list = PyList_New(0); if (!list) goto error_dir; do { count = LISTDIR_BATCH_SIZE; err = getdirentriesattr(dfd, &requested_attr, &attrbuf, sizeof(attrbuf), &count, &basep_unused, &new_state, 0); if (err < 0) { if (errno == ENOTSUP) { /* We're on a filesystem that doesn't support getdirentriesattr. Fall back to the stat-based implementation. */ *fallback = true; } else PyErr_SetFromErrnoWithFilename(PyExc_OSError, path); goto error; } if (!state_seen) { old_state = new_state; state_seen = true; } else if (old_state != new_state) { /* There's an edge case with getdirentriesattr. Consider the following initial list of files: a b <-- c d If the iteration is paused at the arrow, and b is deleted before it is resumed, getdirentriesattr will not return d at all! Ordinarily we're expected to restart the iteration from the beginning. To avoid getting stuck in a retry loop here, fall back to stat. */ *fallback = true; goto error; } entry = (attrbuf_entry *)attrbuf; for (index = 0; index < count; index++) { char *filename = ((char *)&entry->name) + entry->name.attr_dataoffset; if (!strcmp(filename, ".") || !strcmp(filename, "..")) continue; kind = attrkind(entry); if (kind == -1) { PyErr_Format(PyExc_OSError, "unknown object type %u for file " "%s%s!", entry->obj_type, path, filename); goto error; } /* quit early? */ if (skip && kind == S_IFDIR && !strcmp(filename, skip)) { ret = PyList_New(0); goto error; } if (keepstat) { /* from the getattrlist(2) man page: "Only the permission bits ... are valid". */ st.st_mode = (entry->access_mask & ~S_IFMT) | kind; st.st_mtime = entry->mtime.tv_sec; st.st_size = entry->size; stat = makestat(&st); if (!stat) goto error; elem = Py_BuildValue("siN", filename, kind, stat); } else elem = Py_BuildValue("si", filename, kind); if (!elem) goto error; stat = NULL; PyList_Append(list, elem); Py_DECREF(elem); entry = (attrbuf_entry *)((char *)entry + entry->length); } } while (err == 0); ret = list; Py_INCREF(ret); error: Py_DECREF(list); Py_XDECREF(stat); error_dir: close(dfd); error_value: return ret; } #endif /* __APPLE__ */ static PyObject *_listdir(char *path, int pathlen, int keepstat, char *skip) { #ifdef __APPLE__ PyObject *ret; bool fallback = false; ret = _listdir_batch(path, pathlen, keepstat, skip, &fallback); if (ret != NULL || !fallback) return ret; #endif return _listdir_stat(path, pathlen, keepstat, skip); } static PyObject *statfiles(PyObject *self, PyObject *args) { PyObject *names, *stats; Py_ssize_t i, count; if (!PyArg_ParseTuple(args, "O:statfiles", &names)) return NULL; count = PySequence_Length(names); if (count == -1) { PyErr_SetString(PyExc_TypeError, "not a sequence"); return NULL; } stats = PyList_New(count); if (stats == NULL) return NULL; for (i = 0; i < count; i++) { PyObject *stat, *pypath; struct stat st; int ret, kind; char *path; /* With a large file count or on a slow filesystem, don't block signals for long (issue4878). */ if ((i % 1000) == 999 && PyErr_CheckSignals() == -1) goto bail; pypath = PySequence_GetItem(names, i); if (!pypath) goto bail; path = PyString_AsString(pypath); if (path == NULL) { Py_DECREF(pypath); PyErr_SetString(PyExc_TypeError, "not a string"); goto bail; } ret = lstat(path, &st); Py_DECREF(pypath); kind = st.st_mode & S_IFMT; if (ret != -1 && (kind == S_IFREG || kind == S_IFLNK)) { stat = makestat(&st); if (stat == NULL) goto bail; PyList_SET_ITEM(stats, i, stat); } else { Py_INCREF(Py_None); PyList_SET_ITEM(stats, i, Py_None); } } return stats; bail: Py_DECREF(stats); return NULL; } /* * recvfds() simply does not release GIL during blocking io operation because * command server is known to be single-threaded. * * Old systems such as Solaris don't provide CMSG_LEN, msg_control, etc. * Currently, recvfds() is not supported on these platforms. */ #ifdef CMSG_LEN static ssize_t recvfdstobuf(int sockfd, int **rfds, void *cbuf, size_t cbufsize) { char dummy[1]; struct iovec iov = {dummy, sizeof(dummy)}; struct msghdr msgh = {0}; struct cmsghdr *cmsg; msgh.msg_iov = &iov; msgh.msg_iovlen = 1; msgh.msg_control = cbuf; msgh.msg_controllen = (socklen_t)cbufsize; if (recvmsg(sockfd, &msgh, 0) < 0) return -1; for (cmsg = CMSG_FIRSTHDR(&msgh); cmsg; cmsg = CMSG_NXTHDR(&msgh, cmsg)) { if (cmsg->cmsg_level != SOL_SOCKET || cmsg->cmsg_type != SCM_RIGHTS) continue; *rfds = (int *)CMSG_DATA(cmsg); return (cmsg->cmsg_len - CMSG_LEN(0)) / sizeof(int); } *rfds = cbuf; return 0; } static PyObject *recvfds(PyObject *self, PyObject *args) { int sockfd; int *rfds = NULL; ssize_t rfdscount, i; char cbuf[256]; PyObject *rfdslist = NULL; if (!PyArg_ParseTuple(args, "i", &sockfd)) return NULL; rfdscount = recvfdstobuf(sockfd, &rfds, cbuf, sizeof(cbuf)); if (rfdscount < 0) return PyErr_SetFromErrno(PyExc_OSError); rfdslist = PyList_New(rfdscount); if (!rfdslist) goto bail; for (i = 0; i < rfdscount; i++) { PyObject *obj = PyInt_FromLong(rfds[i]); if (!obj) goto bail; PyList_SET_ITEM(rfdslist, i, obj); } return rfdslist; bail: Py_XDECREF(rfdslist); return NULL; } #endif /* CMSG_LEN */ #endif /* ndef _WIN32 */ static PyObject *listdir(PyObject *self, PyObject *args, PyObject *kwargs) { PyObject *statobj = NULL; /* initialize - optional arg */ PyObject *skipobj = NULL; /* initialize - optional arg */ char *path, *skip = NULL; int wantstat, plen; static char *kwlist[] = {"path", "stat", "skip", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s#|OO:listdir", kwlist, &path, &plen, &statobj, &skipobj)) return NULL; wantstat = statobj && PyObject_IsTrue(statobj); if (skipobj && skipobj != Py_None) { skip = PyBytes_AsString(skipobj); if (!skip) return NULL; } return _listdir(path, plen, wantstat, skip); } #ifdef _WIN32 static PyObject *posixfile(PyObject *self, PyObject *args, PyObject *kwds) { static char *kwlist[] = {"name", "mode", "buffering", NULL}; PyObject *file_obj = NULL; char *name = NULL; char *mode = "rb"; DWORD access = 0; DWORD creation; HANDLE handle; int fd, flags = 0; int bufsize = -1; char m0, m1, m2; char fpmode[4]; int fppos = 0; int plus; FILE *fp; if (!PyArg_ParseTupleAndKeywords(args, kwds, "et|si:posixfile", kwlist, Py_FileSystemDefaultEncoding, &name, &mode, &bufsize)) return NULL; m0 = mode[0]; m1 = m0 ? mode[1] : '\0'; m2 = m1 ? mode[2] : '\0'; plus = m1 == '+' || m2 == '+'; fpmode[fppos++] = m0; if (m1 == 'b' || m2 == 'b') { flags = _O_BINARY; fpmode[fppos++] = 'b'; } else flags = _O_TEXT; if (m0 == 'r' && !plus) { flags |= _O_RDONLY; access = GENERIC_READ; } else { /* work around http://support.microsoft.com/kb/899149 and set _O_RDWR for 'w' and 'a', even if mode has no '+' */ flags |= _O_RDWR; access = GENERIC_READ | GENERIC_WRITE; fpmode[fppos++] = '+'; } fpmode[fppos++] = '\0'; switch (m0) { case 'r': creation = OPEN_EXISTING; break; case 'w': creation = CREATE_ALWAYS; break; case 'a': creation = OPEN_ALWAYS; flags |= _O_APPEND; break; default: PyErr_Format(PyExc_ValueError, "mode string must begin with one of 'r', 'w', " "or 'a', not '%c'", m0); goto bail; } handle = CreateFile(name, access, FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE, NULL, creation, FILE_ATTRIBUTE_NORMAL, 0); if (handle == INVALID_HANDLE_VALUE) { PyErr_SetFromWindowsErrWithFilename(GetLastError(), name); goto bail; } fd = _open_osfhandle((intptr_t)handle, flags); if (fd == -1) { CloseHandle(handle); PyErr_SetFromErrnoWithFilename(PyExc_IOError, name); goto bail; } #ifndef IS_PY3K fp = _fdopen(fd, fpmode); if (fp == NULL) { _close(fd); PyErr_SetFromErrnoWithFilename(PyExc_IOError, name); goto bail; } file_obj = PyFile_FromFile(fp, name, mode, fclose); if (file_obj == NULL) { fclose(fp); goto bail; } PyFile_SetBufSize(file_obj, bufsize); #else file_obj = PyFile_FromFd(fd, name, mode, bufsize, NULL, NULL, NULL, 1); if (file_obj == NULL) goto bail; #endif bail: PyMem_Free(name); return file_obj; } #endif #ifdef __APPLE__ #include static PyObject *isgui(PyObject *self) { CFDictionaryRef dict = CGSessionCopyCurrentDictionary(); if (dict != NULL) { CFRelease(dict); Py_RETURN_TRUE; } else { Py_RETURN_FALSE; } } #endif static char osutil_doc[] = "Native operating system services."; static PyMethodDef methods[] = { {"listdir", (PyCFunction)listdir, METH_VARARGS | METH_KEYWORDS, "list a directory\n"}, #ifdef _WIN32 {"posixfile", (PyCFunction)posixfile, METH_VARARGS | METH_KEYWORDS, "Open a file with POSIX-like semantics.\n" "On error, this function may raise either a WindowsError or an IOError."}, #else {"statfiles", (PyCFunction)statfiles, METH_VARARGS | METH_KEYWORDS, "stat a series of files or symlinks\n" "Returns None for non-existent entries and entries of other types.\n"}, #ifdef CMSG_LEN {"recvfds", (PyCFunction)recvfds, METH_VARARGS, "receive list of file descriptors via socket\n"}, #endif #endif #ifdef __APPLE__ { "isgui", (PyCFunction)isgui, METH_NOARGS, "Is a CoreGraphics session available?" }, #endif {NULL, NULL} }; #ifdef IS_PY3K static struct PyModuleDef osutil_module = { PyModuleDef_HEAD_INIT, "osutil", osutil_doc, -1, methods }; PyMODINIT_FUNC PyInit_osutil(void) { if (PyType_Ready(&listdir_stat_type) < 0) return NULL; return PyModule_Create(&osutil_module); } #else PyMODINIT_FUNC initosutil(void) { if (PyType_Ready(&listdir_stat_type) == -1) return; Py_InitModule3("osutil", methods, osutil_doc); } #endif mercurial-3.7.3/mercurial/pvec.py0000644000175000017500000001364312676531525016430 0ustar mpmmpm00000000000000# pvec.py - probabilistic vector clocks for Mercurial # # Copyright 2012 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. ''' A "pvec" is a changeset property based on the theory of vector clocks that can be compared to discover relatedness without consulting a graph. This can be useful for tasks like determining how a disconnected patch relates to a repository. Currently a pvec consist of 448 bits, of which 24 are 'depth' and the remainder are a bit vector. It is represented as a 70-character base85 string. Construction: - a root changeset has a depth of 0 and a bit vector based on its hash - a normal commit has a changeset where depth is increased by one and one bit vector bit is flipped based on its hash - a merge changeset pvec is constructed by copying changes from one pvec into the other to balance its depth Properties: - for linear changes, difference in depth is always <= hamming distance - otherwise, changes are probably divergent - when hamming distance is < 200, we can reliably detect when pvecs are near Issues: - hamming distance ceases to work over distances of ~ 200 - detecting divergence is less accurate when the common ancestor is very close to either revision or total distance is high - this could probably be improved by modeling the relation between delta and hdist Uses: - a patch pvec can be used to locate the nearest available common ancestor for resolving conflicts - ordering of patches can be established without a DAG - two head pvecs can be compared to determine whether push/pull/merge is needed and approximately how many changesets are involved - can be used to find a heuristic divergence measure between changesets on different branches ''' from __future__ import absolute_import from .node import nullrev from . import ( base85, util, ) _size = 448 # 70 chars b85-encoded _bytes = _size / 8 _depthbits = 24 _depthbytes = _depthbits / 8 _vecbytes = _bytes - _depthbytes _vecbits = _vecbytes * 8 _radius = (_vecbits - 30) / 2 # high probability vectors are related def _bin(bs): '''convert a bytestring to a long''' v = 0 for b in bs: v = v * 256 + ord(b) return v def _str(v, l): bs = "" for p in xrange(l): bs = chr(v & 255) + bs v >>= 8 return bs def _split(b): '''depth and bitvec''' return _bin(b[:_depthbytes]), _bin(b[_depthbytes:]) def _join(depth, bitvec): return _str(depth, _depthbytes) + _str(bitvec, _vecbytes) def _hweight(x): c = 0 while x: if x & 1: c += 1 x >>= 1 return c _htab = [_hweight(x) for x in xrange(256)] def _hamming(a, b): '''find the hamming distance between two longs''' d = a ^ b c = 0 while d: c += _htab[d & 0xff] d >>= 8 return c def _mergevec(x, y, c): # Ideally, this function would be x ^ y ^ ancestor, but finding # ancestors is a nuisance. So instead we find the minimal number # of changes to balance the depth and hamming distance d1, v1 = x d2, v2 = y if d1 < d2: d1, d2, v1, v2 = d2, d1, v2, v1 hdist = _hamming(v1, v2) ddist = d1 - d2 v = v1 m = v1 ^ v2 # mask of different bits i = 1 if hdist > ddist: # if delta = 10 and hdist = 100, then we need to go up 55 steps # to the ancestor and down 45 changes = (hdist - ddist + 1) / 2 else: # must make at least one change changes = 1 depth = d1 + changes # copy changes from v2 if m: while changes: if m & i: v ^= i changes -= 1 i <<= 1 else: v = _flipbit(v, c) return depth, v def _flipbit(v, node): # converting bit strings to longs is slow bit = (hash(node) & 0xffffffff) % _vecbits return v ^ (1< delta: return False return True def __gt__(self, b): return b < self def __or__(self, b): delta = abs(b._depth - self._depth) if _hamming(self._vec, b._vec) <= delta: return False return True def __sub__(self, b): if self | b: raise ValueError("concurrent pvecs") return self._depth - b._depth def distance(self, b): d = abs(b._depth - self._depth) h = _hamming(self._vec, b._vec) return max(d, h) def near(self, b): dist = abs(b.depth - self._depth) if dist > _radius or _hamming(self._vec, b._vec) > _radius: return False mercurial-3.7.3/mercurial/pure/0000755000175000017500000000000012676531544016066 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/pure/diffhelpers.py0000644000175000017500000000310612676531525020732 0ustar mpmmpm00000000000000# diffhelpers.py - pure Python implementation of diffhelpers.c # # Copyright 2009 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import def addlines(fp, hunk, lena, lenb, a, b): while True: todoa = lena - len(a) todob = lenb - len(b) num = max(todoa, todob) if num == 0: break for i in xrange(num): s = fp.readline() c = s[0] if s == "\\ No newline at end of file\n": fix_newline(hunk, a, b) continue if c == "\n": # Some patches may be missing the control char # on empty lines. Supply a leading space. s = " \n" hunk.append(s) if c == "+": b.append(s[1:]) elif c == "-": a.append(s) else: b.append(s[1:]) a.append(s) return 0 def fix_newline(hunk, a, b): l = hunk[-1] # tolerate CRLF in last line if l.endswith('\r\n'): hline = l[:-2] else: hline = l[:-1] c = hline[0] if c in " +": b[-1] = hline[1:] if c in " -": a[-1] = hline hunk[-1] = hline return 0 def testhunk(a, b, bstart): alen = len(a) blen = len(b) if alen > blen - bstart: return -1 for i in xrange(alen): if a[i][1:] != b[i + bstart]: return -1 return 0 mercurial-3.7.3/mercurial/pure/mpatch.py0000644000175000017500000000624212676531525017717 0ustar mpmmpm00000000000000# mpatch.py - Python implementation of mpatch.c # # Copyright 2009 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import cStringIO import struct StringIO = cStringIO.StringIO # This attempts to apply a series of patches in time proportional to # the total size of the patches, rather than patches * len(text). This # means rather than shuffling strings around, we shuffle around # pointers to fragments with fragment lists. # # When the fragment lists get too long, we collapse them. To do this # efficiently, we do all our operations inside a buffer created by # mmap and simply use memmove. This avoids creating a bunch of large # temporary string buffers. def patches(a, bins): if not bins: return a plens = [len(x) for x in bins] pl = sum(plens) bl = len(a) + pl tl = bl + bl + pl # enough for the patches and two working texts b1, b2 = 0, bl if not tl: return a m = StringIO() def move(dest, src, count): """move count bytes from src to dest The file pointer is left at the end of dest. """ m.seek(src) buf = m.read(count) m.seek(dest) m.write(buf) # load our original text m.write(a) frags = [(len(a), b1)] # copy all the patches into our segment so we can memmove from them pos = b2 + bl m.seek(pos) for p in bins: m.write(p) def pull(dst, src, l): # pull l bytes from src while l: f = src.pop() if f[0] > l: # do we need to split? src.append((f[0] - l, f[1] + l)) dst.append((l, f[1])) return dst.append(f) l -= f[0] def collect(buf, list): start = buf for l, p in reversed(list): move(buf, p, l) buf += l return (buf - start, start) for plen in plens: # if our list gets too long, execute it if len(frags) > 128: b2, b1 = b1, b2 frags = [collect(b1, frags)] new = [] end = pos + plen last = 0 while pos < end: m.seek(pos) p1, p2, l = struct.unpack(">lll", m.read(12)) pull(new, frags, p1 - last) # what didn't change pull([], frags, p2 - p1) # what got deleted new.append((l, pos + 12)) # what got added pos += l + 12 last = p2 frags.extend(reversed(new)) # what was left at the end t = collect(b2, frags) m.seek(t[1]) return m.read(t[0]) def patchedsize(orig, delta): outlen, last, bin = 0, 0, 0 binend = len(delta) data = 12 while data <= binend: decode = delta[bin:bin + 12] start, end, length = struct.unpack(">lll", decode) if start > end: break bin = data + length data = bin + 12 outlen += start - last last = end outlen += length if bin != binend: raise ValueError("patch cannot be decoded") outlen += orig - last return outlen mercurial-3.7.3/mercurial/pure/parsers.py0000644000175000017500000000635112676531524020122 0ustar mpmmpm00000000000000# parsers.py - Python implementation of parsers.c # # Copyright 2009 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import cStringIO import struct import zlib from .node import nullid _pack = struct.pack _unpack = struct.unpack _compress = zlib.compress _decompress = zlib.decompress # Some code below makes tuples directly because it's more convenient. However, # code outside this module should always use dirstatetuple. def dirstatetuple(*x): # x is a tuple return x def parse_index2(data, inline): def gettype(q): return int(q & 0xFFFF) def offset_type(offset, type): return long(long(offset) << 16 | type) indexformatng = ">Qiiiiii20s12x" s = struct.calcsize(indexformatng) index = [] cache = None off = 0 l = len(data) - s append = index.append if inline: cache = (0, data) while off <= l: e = _unpack(indexformatng, data[off:off + s]) append(e) if e[1] < 0: break off += e[1] + s else: while off <= l: e = _unpack(indexformatng, data[off:off + s]) append(e) off += s if off != len(data): raise ValueError('corrupt index file') if index: e = list(index[0]) type = gettype(e[0]) e[0] = offset_type(0, type) index[0] = tuple(e) # add the magic null revision at -1 index.append((0, 0, 0, -1, -1, -1, -1, nullid)) return index, cache def parse_dirstate(dmap, copymap, st): parents = [st[:20], st[20: 40]] # dereference fields so they will be local in loop format = ">cllll" e_size = struct.calcsize(format) pos1 = 40 l = len(st) # the inner loop while pos1 < l: pos2 = pos1 + e_size e = _unpack(">cllll", st[pos1:pos2]) # a literal here is faster pos1 = pos2 + e[4] f = st[pos2:pos1] if '\0' in f: f, c = f.split('\0') copymap[f] = c dmap[f] = e[:4] return parents def pack_dirstate(dmap, copymap, pl, now): now = int(now) cs = cStringIO.StringIO() write = cs.write write("".join(pl)) for f, e in dmap.iteritems(): if e[0] == 'n' and e[3] == now: # The file was last modified "simultaneously" with the current # write to dirstate (i.e. within the same second for file- # systems with a granularity of 1 sec). This commonly happens # for at least a couple of files on 'update'. # The user could change the file without changing its size # within the same second. Invalidate the file's mtime in # dirstate, forcing future 'status' calls to compare the # contents of the file if the size is the same. This prevents # mistakenly treating such files as clean. e = dirstatetuple(e[0], e[1], e[2], -1) dmap[f] = e if f in copymap: f = "%s\0%s" % (f, copymap[f]) e = _pack(">cllll", e[0], e[1], e[2], e[3], len(f)) write(e) write(f) return cs.getvalue() mercurial-3.7.3/mercurial/pure/base85.py0000644000175000017500000000370512676531525017533 0ustar mpmmpm00000000000000# base85.py: pure python base85 codec # # Copyright (C) 2009 Brendan Cully # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import struct _b85chars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" \ "abcdefghijklmnopqrstuvwxyz!#$%&()*+-;<=>?@^_`{|}~" _b85chars2 = [(a + b) for a in _b85chars for b in _b85chars] _b85dec = {} def _mkb85dec(): for i, c in enumerate(_b85chars): _b85dec[c] = i def b85encode(text, pad=False): """encode text in base85 format""" l = len(text) r = l % 4 if r: text += '\0' * (4 - r) longs = len(text) >> 2 words = struct.unpack('>%dL' % (longs), text) out = ''.join(_b85chars[(word // 52200625) % 85] + _b85chars2[(word // 7225) % 7225] + _b85chars2[word % 7225] for word in words) if pad: return out # Trim padding olen = l % 4 if olen: olen += 1 olen += l // 4 * 5 return out[:olen] def b85decode(text): """decode base85-encoded text""" if not _b85dec: _mkb85dec() l = len(text) out = [] for i in range(0, len(text), 5): chunk = text[i:i + 5] acc = 0 for j, c in enumerate(chunk): try: acc = acc * 85 + _b85dec[c] except KeyError: raise ValueError('bad base85 character at position %d' % (i + j)) if acc > 4294967295: raise ValueError('Base85 overflow in hunk starting at byte %d' % i) out.append(acc) # Pad final chunk if necessary cl = l % 5 if cl: acc *= 85 ** (5 - cl) if cl > 1: acc += 0xffffff >> (cl - 2) * 8 out[-1] = acc out = struct.pack('>%dL' % (len(out)), *out) if cl: out = out[:-(5 - cl)] return out mercurial-3.7.3/mercurial/pure/osutil.py0000644000175000017500000002141312676531525017757 0ustar mpmmpm00000000000000# osutil.py - pure Python version of osutil.c # # Copyright 2009 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import ctypes import ctypes.util import os import socket import stat as statmod import sys def _mode_to_kind(mode): if statmod.S_ISREG(mode): return statmod.S_IFREG if statmod.S_ISDIR(mode): return statmod.S_IFDIR if statmod.S_ISLNK(mode): return statmod.S_IFLNK if statmod.S_ISBLK(mode): return statmod.S_IFBLK if statmod.S_ISCHR(mode): return statmod.S_IFCHR if statmod.S_ISFIFO(mode): return statmod.S_IFIFO if statmod.S_ISSOCK(mode): return statmod.S_IFSOCK return mode def listdir(path, stat=False, skip=None): '''listdir(path, stat=False) -> list_of_tuples Return a sorted list containing information about the entries in the directory. If stat is True, each element is a 3-tuple: (name, type, stat object) Otherwise, each element is a 2-tuple: (name, type) ''' result = [] prefix = path if not prefix.endswith(os.sep): prefix += os.sep names = os.listdir(path) names.sort() for fn in names: st = os.lstat(prefix + fn) if fn == skip and statmod.S_ISDIR(st.st_mode): return [] if stat: result.append((fn, _mode_to_kind(st.st_mode), st)) else: result.append((fn, _mode_to_kind(st.st_mode))) return result if os.name != 'nt': posixfile = open _SCM_RIGHTS = 0x01 _socklen_t = ctypes.c_uint if sys.platform == 'linux2': # socket.h says "the type should be socklen_t but the definition of # the kernel is incompatible with this." _cmsg_len_t = ctypes.c_size_t _msg_controllen_t = ctypes.c_size_t _msg_iovlen_t = ctypes.c_size_t else: _cmsg_len_t = _socklen_t _msg_controllen_t = _socklen_t _msg_iovlen_t = ctypes.c_int class _iovec(ctypes.Structure): _fields_ = [ ('iov_base', ctypes.c_void_p), ('iov_len', ctypes.c_size_t), ] class _msghdr(ctypes.Structure): _fields_ = [ ('msg_name', ctypes.c_void_p), ('msg_namelen', _socklen_t), ('msg_iov', ctypes.POINTER(_iovec)), ('msg_iovlen', _msg_iovlen_t), ('msg_control', ctypes.c_void_p), ('msg_controllen', _msg_controllen_t), ('msg_flags', ctypes.c_int), ] class _cmsghdr(ctypes.Structure): _fields_ = [ ('cmsg_len', _cmsg_len_t), ('cmsg_level', ctypes.c_int), ('cmsg_type', ctypes.c_int), ('cmsg_data', ctypes.c_ubyte * 0), ] _libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True) _recvmsg = getattr(_libc, 'recvmsg', None) if _recvmsg: _recvmsg.restype = getattr(ctypes, 'c_ssize_t', ctypes.c_long) _recvmsg.argtypes = (ctypes.c_int, ctypes.POINTER(_msghdr), ctypes.c_int) else: # recvmsg isn't always provided by libc; such systems are unsupported def _recvmsg(sockfd, msg, flags): raise NotImplementedError('unsupported platform') def _CMSG_FIRSTHDR(msgh): if msgh.msg_controllen < ctypes.sizeof(_cmsghdr): return cmsgptr = ctypes.cast(msgh.msg_control, ctypes.POINTER(_cmsghdr)) return cmsgptr.contents # The pure version is less portable than the native version because the # handling of socket ancillary data heavily depends on C preprocessor. # Also, some length fields are wrongly typed in Linux kernel. def recvfds(sockfd): """receive list of file descriptors via socket""" dummy = (ctypes.c_ubyte * 1)() iov = _iovec(ctypes.cast(dummy, ctypes.c_void_p), ctypes.sizeof(dummy)) cbuf = ctypes.create_string_buffer(256) msgh = _msghdr(None, 0, ctypes.pointer(iov), 1, ctypes.cast(cbuf, ctypes.c_void_p), ctypes.sizeof(cbuf), 0) r = _recvmsg(sockfd, ctypes.byref(msgh), 0) if r < 0: e = ctypes.get_errno() raise OSError(e, os.strerror(e)) # assumes that the first cmsg has fds because it isn't easy to write # portable CMSG_NXTHDR() with ctypes. cmsg = _CMSG_FIRSTHDR(msgh) if not cmsg: return [] if (cmsg.cmsg_level != socket.SOL_SOCKET or cmsg.cmsg_type != _SCM_RIGHTS): return [] rfds = ctypes.cast(cmsg.cmsg_data, ctypes.POINTER(ctypes.c_int)) rfdscount = ((cmsg.cmsg_len - _cmsghdr.cmsg_data.offset) / ctypes.sizeof(ctypes.c_int)) return [rfds[i] for i in xrange(rfdscount)] else: import msvcrt _kernel32 = ctypes.windll.kernel32 _DWORD = ctypes.c_ulong _LPCSTR = _LPSTR = ctypes.c_char_p _HANDLE = ctypes.c_void_p _INVALID_HANDLE_VALUE = _HANDLE(-1).value # CreateFile _FILE_SHARE_READ = 0x00000001 _FILE_SHARE_WRITE = 0x00000002 _FILE_SHARE_DELETE = 0x00000004 _CREATE_ALWAYS = 2 _OPEN_EXISTING = 3 _OPEN_ALWAYS = 4 _GENERIC_READ = 0x80000000 _GENERIC_WRITE = 0x40000000 _FILE_ATTRIBUTE_NORMAL = 0x80 # open_osfhandle flags _O_RDONLY = 0x0000 _O_RDWR = 0x0002 _O_APPEND = 0x0008 _O_TEXT = 0x4000 _O_BINARY = 0x8000 # types of parameters of C functions used (required by pypy) _kernel32.CreateFileA.argtypes = [_LPCSTR, _DWORD, _DWORD, ctypes.c_void_p, _DWORD, _DWORD, _HANDLE] _kernel32.CreateFileA.restype = _HANDLE def _raiseioerror(name): err = ctypes.WinError() raise IOError(err.errno, '%s: %s' % (name, err.strerror)) class posixfile(object): '''a file object aiming for POSIX-like semantics CPython's open() returns a file that was opened *without* setting the _FILE_SHARE_DELETE flag, which causes rename and unlink to abort. This even happens if any hardlinked copy of the file is in open state. We set _FILE_SHARE_DELETE here, so files opened with posixfile can be renamed and deleted while they are held open. Note that if a file opened with posixfile is unlinked, the file remains but cannot be opened again or be recreated under the same name, until all reading processes have closed the file.''' def __init__(self, name, mode='r', bufsize=-1): if 'b' in mode: flags = _O_BINARY else: flags = _O_TEXT m0 = mode[0] if m0 == 'r' and '+' not in mode: flags |= _O_RDONLY access = _GENERIC_READ else: # work around http://support.microsoft.com/kb/899149 and # set _O_RDWR for 'w' and 'a', even if mode has no '+' flags |= _O_RDWR access = _GENERIC_READ | _GENERIC_WRITE if m0 == 'r': creation = _OPEN_EXISTING elif m0 == 'w': creation = _CREATE_ALWAYS elif m0 == 'a': creation = _OPEN_ALWAYS flags |= _O_APPEND else: raise ValueError("invalid mode: %s" % mode) fh = _kernel32.CreateFileA(name, access, _FILE_SHARE_READ | _FILE_SHARE_WRITE | _FILE_SHARE_DELETE, None, creation, _FILE_ATTRIBUTE_NORMAL, None) if fh == _INVALID_HANDLE_VALUE: _raiseioerror(name) fd = msvcrt.open_osfhandle(fh, flags) if fd == -1: _kernel32.CloseHandle(fh) _raiseioerror(name) f = os.fdopen(fd, mode, bufsize) # unfortunately, f.name is '' at this point -- so we store # the name on this wrapper. We cannot just assign to f.name, # because that attribute is read-only. object.__setattr__(self, 'name', name) object.__setattr__(self, '_file', f) def __iter__(self): return self._file def __getattr__(self, name): return getattr(self._file, name) def __setattr__(self, name, value): '''mimics the read-only attributes of Python file objects by raising 'TypeError: readonly attribute' if someone tries: f = posixfile('foo.txt') f.name = 'bla' ''' return self._file.__setattr__(name, value) def __enter__(self): return self._file.__enter__() def __exit__(self, exc_type, exc_value, exc_tb): return self._file.__exit__(exc_type, exc_value, exc_tb) mercurial-3.7.3/mercurial/pure/bdiff.py0000644000175000017500000000474412676531524017521 0ustar mpmmpm00000000000000# bdiff.py - Python implementation of bdiff.c # # Copyright 2009 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import array import difflib import re import struct def splitnewlines(text): '''like str.splitlines, but only split on newlines.''' lines = [l + '\n' for l in text.split('\n')] if lines: if lines[-1] == '\n': lines.pop() else: lines[-1] = lines[-1][:-1] return lines def _normalizeblocks(a, b, blocks): prev = None r = [] for curr in blocks: if prev is None: prev = curr continue shift = 0 a1, b1, l1 = prev a1end = a1 + l1 b1end = b1 + l1 a2, b2, l2 = curr a2end = a2 + l2 b2end = b2 + l2 if a1end == a2: while (a1end + shift < a2end and a[a1end + shift] == b[b1end + shift]): shift += 1 elif b1end == b2: while (b1end + shift < b2end and a[a1end + shift] == b[b1end + shift]): shift += 1 r.append((a1, b1, l1 + shift)) prev = a2 + shift, b2 + shift, l2 - shift r.append(prev) return r def _tostring(c): if type(c) is array.array: # this copy overhead isn't ideal return c.tostring() return str(c) def bdiff(a, b): a = _tostring(a).splitlines(True) b = _tostring(b).splitlines(True) if not a: s = "".join(b) return s and (struct.pack(">lll", 0, 0, len(s)) + s) bin = [] p = [0] for i in a: p.append(p[-1] + len(i)) d = difflib.SequenceMatcher(None, a, b).get_matching_blocks() d = _normalizeblocks(a, b, d) la = 0 lb = 0 for am, bm, size in d: s = "".join(b[lb:bm]) if am > la or s: bin.append(struct.pack(">lll", p[la], p[am], len(s)) + s) la = am + size lb = bm + size return "".join(bin) def blocks(a, b): an = splitnewlines(a) bn = splitnewlines(b) d = difflib.SequenceMatcher(None, an, bn).get_matching_blocks() d = _normalizeblocks(an, bn, d) return [(i, i + n, j, j + n) for (i, j, n) in d] def fixws(text, allws): if allws: text = re.sub('[ \t\r]+', '', text) else: text = re.sub('[ \t\r]+', ' ', text) text = text.replace(' \n', '\n') return text mercurial-3.7.3/mercurial/pure/__init__.py0000644000175000017500000000000012676531525020164 0ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/diffhelpers.c0000644000175000017500000001133712676531525017556 0ustar mpmmpm00000000000000/* * diffhelpers.c - helper routines for mpatch * * Copyright 2007 Chris Mason * * This software may be used and distributed according to the terms * of the GNU General Public License v2, incorporated herein by reference. */ #include #include #include #include "util.h" static char diffhelpers_doc[] = "Efficient diff parsing"; static PyObject *diffhelpers_Error; /* fixup the last lines of a and b when the patch has no newline at eof */ static void _fix_newline(PyObject *hunk, PyObject *a, PyObject *b) { Py_ssize_t hunksz = PyList_Size(hunk); PyObject *s = PyList_GET_ITEM(hunk, hunksz-1); char *l = PyBytes_AsString(s); Py_ssize_t alen = PyList_Size(a); Py_ssize_t blen = PyList_Size(b); char c = l[0]; PyObject *hline; Py_ssize_t sz = PyBytes_GET_SIZE(s); if (sz > 1 && l[sz-2] == '\r') /* tolerate CRLF in last line */ sz -= 1; hline = PyBytes_FromStringAndSize(l, sz-1); if (!hline) { return; } if (c == ' ' || c == '+') { PyObject *rline = PyBytes_FromStringAndSize(l + 1, sz - 2); PyList_SetItem(b, blen-1, rline); } if (c == ' ' || c == '-') { Py_INCREF(hline); PyList_SetItem(a, alen-1, hline); } PyList_SetItem(hunk, hunksz-1, hline); } /* python callable form of _fix_newline */ static PyObject * fix_newline(PyObject *self, PyObject *args) { PyObject *hunk, *a, *b; if (!PyArg_ParseTuple(args, "OOO", &hunk, &a, &b)) return NULL; _fix_newline(hunk, a, b); return Py_BuildValue("l", 0); } #if (PY_VERSION_HEX < 0x02050000) static const char *addlines_format = "OOiiOO"; #else static const char *addlines_format = "OOnnOO"; #endif /* * read lines from fp into the hunk. The hunk is parsed into two arrays * a and b. a gets the old state of the text, b gets the new state * The control char from the hunk is saved when inserting into a, but not b * (for performance while deleting files) */ static PyObject * addlines(PyObject *self, PyObject *args) { PyObject *fp, *hunk, *a, *b, *x; Py_ssize_t i; Py_ssize_t lena, lenb; Py_ssize_t num; Py_ssize_t todoa, todob; char *s, c; PyObject *l; if (!PyArg_ParseTuple(args, addlines_format, &fp, &hunk, &lena, &lenb, &a, &b)) return NULL; while (1) { todoa = lena - PyList_Size(a); todob = lenb - PyList_Size(b); num = todoa > todob ? todoa : todob; if (num == 0) break; for (i = 0; i < num; i++) { x = PyFile_GetLine(fp, 0); s = PyBytes_AsString(x); c = *s; if (strcmp(s, "\\ No newline at end of file\n") == 0) { _fix_newline(hunk, a, b); continue; } if (c == '\n') { /* Some patches may be missing the control char * on empty lines. Supply a leading space. */ Py_DECREF(x); x = PyBytes_FromString(" \n"); } PyList_Append(hunk, x); if (c == '+') { l = PyBytes_FromString(s + 1); PyList_Append(b, l); Py_DECREF(l); } else if (c == '-') { PyList_Append(a, x); } else { l = PyBytes_FromString(s + 1); PyList_Append(b, l); Py_DECREF(l); PyList_Append(a, x); } Py_DECREF(x); } } return Py_BuildValue("l", 0); } /* * compare the lines in a with the lines in b. a is assumed to have * a control char at the start of each line, this char is ignored in the * compare */ static PyObject * testhunk(PyObject *self, PyObject *args) { PyObject *a, *b; long bstart; Py_ssize_t alen, blen; Py_ssize_t i; char *sa, *sb; if (!PyArg_ParseTuple(args, "OOl", &a, &b, &bstart)) return NULL; alen = PyList_Size(a); blen = PyList_Size(b); if (alen > blen - bstart || bstart < 0) { return Py_BuildValue("l", -1); } for (i = 0; i < alen; i++) { sa = PyBytes_AsString(PyList_GET_ITEM(a, i)); sb = PyBytes_AsString(PyList_GET_ITEM(b, i + bstart)); if (strcmp(sa + 1, sb) != 0) return Py_BuildValue("l", -1); } return Py_BuildValue("l", 0); } static PyMethodDef methods[] = { {"addlines", addlines, METH_VARARGS, "add lines to a hunk\n"}, {"fix_newline", fix_newline, METH_VARARGS, "fixup newline counters\n"}, {"testhunk", testhunk, METH_VARARGS, "test lines in a hunk\n"}, {NULL, NULL} }; #ifdef IS_PY3K static struct PyModuleDef diffhelpers_module = { PyModuleDef_HEAD_INIT, "diffhelpers", diffhelpers_doc, -1, methods }; PyMODINIT_FUNC PyInit_diffhelpers(void) { PyObject *m; m = PyModule_Create(&diffhelpers_module); if (m == NULL) return NULL; diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError", NULL, NULL); Py_INCREF(diffhelpers_Error); PyModule_AddObject(m, "diffhelpersError", diffhelpers_Error); return m; } #else PyMODINIT_FUNC initdiffhelpers(void) { Py_InitModule3("diffhelpers", methods, diffhelpers_doc); diffhelpers_Error = PyErr_NewException("diffhelpers.diffhelpersError", NULL, NULL); } #endif mercurial-3.7.3/mercurial/streamclone.py0000644000175000017500000003243012676531525020002 0ustar mpmmpm00000000000000# streamclone.py - producing and consuming streaming repository data # # Copyright 2015 Gregory Szorc # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import struct import time from .i18n import _ from . import ( branchmap, error, store, util, ) def canperformstreamclone(pullop, bailifbundle2supported=False): """Whether it is possible to perform a streaming clone as part of pull. ``bailifbundle2supported`` will cause the function to return False if bundle2 stream clones are supported. It should only be called by the legacy stream clone code path. Returns a tuple of (supported, requirements). ``supported`` is True if streaming clone is supported and False otherwise. ``requirements`` is a set of repo requirements from the remote, or ``None`` if stream clone isn't supported. """ repo = pullop.repo remote = pullop.remote bundle2supported = False if pullop.canusebundle2: if 'v1' in pullop.remotebundle2caps.get('stream', []): bundle2supported = True # else # Server doesn't support bundle2 stream clone or doesn't support # the versions we support. Fall back and possibly allow legacy. # Ensures legacy code path uses available bundle2. if bailifbundle2supported and bundle2supported: return False, None # Ensures bundle2 doesn't try to do a stream clone if it isn't supported. #elif not bailifbundle2supported and not bundle2supported: # return False, None # Streaming clone only works on empty repositories. if len(repo): return False, None # Streaming clone only works if all data is being requested. if pullop.heads: return False, None streamrequested = pullop.streamclonerequested # If we don't have a preference, let the server decide for us. This # likely only comes into play in LANs. if streamrequested is None: # The server can advertise whether to prefer streaming clone. streamrequested = remote.capable('stream-preferred') if not streamrequested: return False, None # In order for stream clone to work, the client has to support all the # requirements advertised by the server. # # The server advertises its requirements via the "stream" and "streamreqs" # capability. "stream" (a value-less capability) is advertised if and only # if the only requirement is "revlogv1." Else, the "streamreqs" capability # is advertised and contains a comma-delimited list of requirements. requirements = set() if remote.capable('stream'): requirements.add('revlogv1') else: streamreqs = remote.capable('streamreqs') # This is weird and shouldn't happen with modern servers. if not streamreqs: return False, None streamreqs = set(streamreqs.split(',')) # Server requires something we don't support. Bail. if streamreqs - repo.supportedformats: return False, None requirements = streamreqs return True, requirements def maybeperformlegacystreamclone(pullop): """Possibly perform a legacy stream clone operation. Legacy stream clones are performed as part of pull but before all other operations. A legacy stream clone will not be performed if a bundle2 stream clone is supported. """ supported, requirements = canperformstreamclone(pullop) if not supported: return repo = pullop.repo remote = pullop.remote # Save remote branchmap. We will use it later to speed up branchcache # creation. rbranchmap = None if remote.capable('branchmap'): rbranchmap = remote.branchmap() repo.ui.status(_('streaming all changes\n')) fp = remote.stream_out() l = fp.readline() try: resp = int(l) except ValueError: raise error.ResponseError( _('unexpected response from remote server:'), l) if resp == 1: raise error.Abort(_('operation forbidden by server')) elif resp == 2: raise error.Abort(_('locking the remote repository failed')) elif resp != 0: raise error.Abort(_('the server sent an unknown error code')) l = fp.readline() try: filecount, bytecount = map(int, l.split(' ', 1)) except (ValueError, TypeError): raise error.ResponseError( _('unexpected response from remote server:'), l) with repo.lock(): consumev1(repo, fp, filecount, bytecount) # new requirements = old non-format requirements + # new format-related remote requirements # requirements from the streamed-in repository repo.requirements = requirements | ( repo.requirements - repo.supportedformats) repo._applyopenerreqs() repo._writerequirements() if rbranchmap: branchmap.replacecache(repo, rbranchmap) repo.invalidate() def allowservergeneration(ui): """Whether streaming clones are allowed from the server.""" return ui.configbool('server', 'uncompressed', True, untrusted=True) # This is it's own function so extensions can override it. def _walkstreamfiles(repo): return repo.store.walk() def generatev1(repo): """Emit content for version 1 of a streaming clone. This returns a 3-tuple of (file count, byte size, data iterator). The data iterator consists of N entries for each file being transferred. Each file entry starts as a line with the file name and integer size delimited by a null byte. The raw file data follows. Following the raw file data is the next file entry, or EOF. When used on the wire protocol, an additional line indicating protocol success will be prepended to the stream. This function is not responsible for adding it. This function will obtain a repository lock to ensure a consistent view of the store is captured. It therefore may raise LockError. """ entries = [] total_bytes = 0 # Get consistent snapshot of repo, lock during scan. with repo.lock(): repo.ui.debug('scanning\n') for name, ename, size in _walkstreamfiles(repo): if size: entries.append((name, size)) total_bytes += size repo.ui.debug('%d files, %d bytes to transfer\n' % (len(entries), total_bytes)) svfs = repo.svfs oldaudit = svfs.mustaudit debugflag = repo.ui.debugflag svfs.mustaudit = False def emitrevlogdata(): try: for name, size in entries: if debugflag: repo.ui.debug('sending %s (%d bytes)\n' % (name, size)) # partially encode name over the wire for backwards compat yield '%s\0%d\n' % (store.encodedir(name), size) if size <= 65536: with svfs(name, 'rb') as fp: yield fp.read(size) else: for chunk in util.filechunkiter(svfs(name), limit=size): yield chunk finally: svfs.mustaudit = oldaudit return len(entries), total_bytes, emitrevlogdata() def generatev1wireproto(repo): """Emit content for version 1 of streaming clone suitable for the wire. This is the data output from ``generatev1()`` with a header line indicating file count and byte size. """ filecount, bytecount, it = generatev1(repo) yield '%d %d\n' % (filecount, bytecount) for chunk in it: yield chunk def generatebundlev1(repo, compression='UN'): """Emit content for version 1 of a stream clone bundle. The first 4 bytes of the output ("HGS1") denote this as stream clone bundle version 1. The next 2 bytes indicate the compression type. Only "UN" is currently supported. The next 16 bytes are two 64-bit big endian unsigned integers indicating file count and byte count, respectively. The next 2 bytes is a 16-bit big endian unsigned short declaring the length of the requirements string, including a trailing \0. The following N bytes are the requirements string, which is ASCII containing a comma-delimited list of repo requirements that are needed to support the data. The remaining content is the output of ``generatev1()`` (which may be compressed in the future). Returns a tuple of (requirements, data generator). """ if compression != 'UN': raise ValueError('we do not support the compression argument yet') requirements = repo.requirements & repo.supportedformats requires = ','.join(sorted(requirements)) def gen(): yield 'HGS1' yield compression filecount, bytecount, it = generatev1(repo) repo.ui.status(_('writing %d bytes for %d files\n') % (bytecount, filecount)) yield struct.pack('>QQ', filecount, bytecount) yield struct.pack('>H', len(requires) + 1) yield requires + '\0' # This is where we'll add compression in the future. assert compression == 'UN' seen = 0 repo.ui.progress(_('bundle'), 0, total=bytecount) for chunk in it: seen += len(chunk) repo.ui.progress(_('bundle'), seen, total=bytecount) yield chunk repo.ui.progress(_('bundle'), None) return requirements, gen() def consumev1(repo, fp, filecount, bytecount): """Apply the contents from version 1 of a streaming clone file handle. This takes the output from "streamout" and applies it to the specified repository. Like "streamout," the status line added by the wire protocol is not handled by this function. """ with repo.lock(): repo.ui.status(_('%d files to transfer, %s of data\n') % (filecount, util.bytecount(bytecount))) handled_bytes = 0 repo.ui.progress(_('clone'), 0, total=bytecount) start = time.time() with repo.transaction('clone'): with repo.svfs.backgroundclosing(repo.ui, expectedcount=filecount): for i in xrange(filecount): # XXX doesn't support '\n' or '\r' in filenames l = fp.readline() try: name, size = l.split('\0', 1) size = int(size) except (ValueError, TypeError): raise error.ResponseError( _('unexpected response from remote server:'), l) if repo.ui.debugflag: repo.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size))) # for backwards compat, name was partially encoded path = store.decodedir(name) with repo.svfs(path, 'w', backgroundclose=True) as ofp: for chunk in util.filechunkiter(fp, limit=size): handled_bytes += len(chunk) repo.ui.progress(_('clone'), handled_bytes, total=bytecount) ofp.write(chunk) # Writing straight to files circumvented the inmemory caches repo.invalidate() elapsed = time.time() - start if elapsed <= 0: elapsed = 0.001 repo.ui.progress(_('clone'), None) repo.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') % (util.bytecount(bytecount), elapsed, util.bytecount(bytecount / elapsed))) def readbundle1header(fp): compression = fp.read(2) if compression != 'UN': raise error.Abort(_('only uncompressed stream clone bundles are ' 'supported; got %s') % compression) filecount, bytecount = struct.unpack('>QQ', fp.read(16)) requireslen = struct.unpack('>H', fp.read(2))[0] requires = fp.read(requireslen) if not requires.endswith('\0'): raise error.Abort(_('malformed stream clone bundle: ' 'requirements not properly encoded')) requirements = set(requires.rstrip('\0').split(',')) return filecount, bytecount, requirements def applybundlev1(repo, fp): """Apply the content from a stream clone bundle version 1. We assume the 4 byte header has been read and validated and the file handle is at the 2 byte compression identifier. """ if len(repo): raise error.Abort(_('cannot apply stream clone bundle on non-empty ' 'repo')) filecount, bytecount, requirements = readbundle1header(fp) missingreqs = requirements - repo.supportedformats if missingreqs: raise error.Abort(_('unable to apply stream clone: ' 'unsupported format: %s') % ', '.join(sorted(missingreqs))) consumev1(repo, fp, filecount, bytecount) class streamcloneapplier(object): """Class to manage applying streaming clone bundles. We need to wrap ``applybundlev1()`` in a dedicated type to enable bundle readers to perform bundle type-specific functionality. """ def __init__(self, fh): self._fh = fh def apply(self, repo): return applybundlev1(repo, self._fh) mercurial-3.7.3/mercurial/manifest.c0000644000175000017500000006056612676531525017101 0ustar mpmmpm00000000000000/* * manifest.c - manifest type that does on-demand parsing. * * Copyright 2015, Google Inc. * * This software may be used and distributed according to the terms of * the GNU General Public License, incorporated herein by reference. */ #include #include #include #include #include "util.h" #define DEFAULT_LINES 100000 typedef struct { char *start; Py_ssize_t len; /* length of line including terminal newline */ char hash_suffix; bool from_malloc; bool deleted; } line; typedef struct { PyObject_HEAD PyObject *pydata; line *lines; int numlines; /* number of line entries */ int livelines; /* number of non-deleted lines */ int maxlines; /* allocated number of lines */ bool dirty; } lazymanifest; #define MANIFEST_OOM -1 #define MANIFEST_NOT_SORTED -2 #define MANIFEST_MALFORMED -3 /* defined in parsers.c */ PyObject *unhexlify(const char *str, int len); /* get the length of the path for a line */ static size_t pathlen(line *l) { return strlen(l->start); } /* get the node value of a single line */ static PyObject *nodeof(line *l) { char *s = l->start; ssize_t llen = pathlen(l); PyObject *hash = unhexlify(s + llen + 1, 40); if (!hash) { return NULL; } if (l->hash_suffix != '\0') { char newhash[21]; memcpy(newhash, PyString_AsString(hash), 20); Py_DECREF(hash); newhash[20] = l->hash_suffix; hash = PyString_FromStringAndSize(newhash, 21); } return hash; } /* get the node hash and flags of a line as a tuple */ static PyObject *hashflags(line *l) { char *s = l->start; size_t plen = pathlen(l); PyObject *hash = nodeof(l); /* 40 for hash, 1 for null byte, 1 for newline */ size_t hplen = plen + 42; Py_ssize_t flen = l->len - hplen; PyObject *flags; PyObject *tup; if (!hash) return NULL; flags = PyString_FromStringAndSize(s + hplen - 1, flen); if (!flags) { Py_DECREF(hash); return NULL; } tup = PyTuple_Pack(2, hash, flags); Py_DECREF(flags); Py_DECREF(hash); return tup; } /* if we're about to run out of space in the line index, add more */ static bool realloc_if_full(lazymanifest *self) { if (self->numlines == self->maxlines) { self->maxlines *= 2; self->lines = realloc(self->lines, self->maxlines * sizeof(line)); } return !!self->lines; } /* * Find the line boundaries in the manifest that 'data' points to and store * information about each line in 'self'. */ static int find_lines(lazymanifest *self, char *data, Py_ssize_t len) { char *prev = NULL; while (len > 0) { line *l; char *next = memchr(data, '\n', len); if (!next) { return MANIFEST_MALFORMED; } next++; /* advance past newline */ if (!realloc_if_full(self)) { return MANIFEST_OOM; /* no memory */ } if (prev && strcmp(prev, data) > -1) { /* This data isn't sorted, so we have to abort. */ return MANIFEST_NOT_SORTED; } l = self->lines + ((self->numlines)++); l->start = data; l->len = next - data; l->hash_suffix = '\0'; l->from_malloc = false; l->deleted = false; len = len - l->len; prev = data; data = next; } self->livelines = self->numlines; return 0; } static int lazymanifest_init(lazymanifest *self, PyObject *args) { char *data; Py_ssize_t len; int err, ret; PyObject *pydata; if (!PyArg_ParseTuple(args, "S", &pydata)) { return -1; } err = PyString_AsStringAndSize(pydata, &data, &len); self->dirty = false; if (err == -1) return -1; self->pydata = pydata; Py_INCREF(self->pydata); Py_BEGIN_ALLOW_THREADS self->lines = malloc(DEFAULT_LINES * sizeof(line)); self->maxlines = DEFAULT_LINES; self->numlines = 0; if (!self->lines) ret = MANIFEST_OOM; else ret = find_lines(self, data, len); Py_END_ALLOW_THREADS switch (ret) { case 0: break; case MANIFEST_OOM: PyErr_NoMemory(); break; case MANIFEST_NOT_SORTED: PyErr_Format(PyExc_ValueError, "Manifest lines not in sorted order."); break; case MANIFEST_MALFORMED: PyErr_Format(PyExc_ValueError, "Manifest did not end in a newline."); break; default: PyErr_Format(PyExc_ValueError, "Unknown problem parsing manifest."); } return ret == 0 ? 0 : -1; } static void lazymanifest_dealloc(lazymanifest *self) { /* free any extra lines we had to allocate */ int i; for (i = 0; i < self->numlines; i++) { if (self->lines[i].from_malloc) { free(self->lines[i].start); } } if (self->lines) { free(self->lines); self->lines = NULL; } if (self->pydata) { Py_DECREF(self->pydata); self->pydata = NULL; } PyObject_Del(self); } /* iteration support */ typedef struct { PyObject_HEAD lazymanifest *m; Py_ssize_t pos; } lmIter; static void lmiter_dealloc(PyObject *o) { lmIter *self = (lmIter *)o; Py_DECREF(self->m); PyObject_Del(self); } static line *lmiter_nextline(lmIter *self) { do { self->pos++; if (self->pos >= self->m->numlines) { return NULL; } /* skip over deleted manifest entries */ } while (self->m->lines[self->pos].deleted); return self->m->lines + self->pos; } static PyObject *lmiter_iterentriesnext(PyObject *o) { size_t pl; line *l; Py_ssize_t consumed; PyObject *ret = NULL, *path = NULL, *hash = NULL, *flags = NULL; l = lmiter_nextline((lmIter *)o); if (!l) { goto done; } pl = pathlen(l); path = PyString_FromStringAndSize(l->start, pl); hash = nodeof(l); consumed = pl + 41; flags = PyString_FromStringAndSize(l->start + consumed, l->len - consumed - 1); if (!path || !hash || !flags) { goto done; } ret = PyTuple_Pack(3, path, hash, flags); done: Py_XDECREF(path); Py_XDECREF(hash); Py_XDECREF(flags); return ret; } static PyTypeObject lazymanifestEntriesIterator = { PyObject_HEAD_INIT(NULL) 0, /*ob_size */ "parsers.lazymanifest.entriesiterator", /*tp_name */ sizeof(lmIter), /*tp_basicsize */ 0, /*tp_itemsize */ lmiter_dealloc, /*tp_dealloc */ 0, /*tp_print */ 0, /*tp_getattr */ 0, /*tp_setattr */ 0, /*tp_compare */ 0, /*tp_repr */ 0, /*tp_as_number */ 0, /*tp_as_sequence */ 0, /*tp_as_mapping */ 0, /*tp_hash */ 0, /*tp_call */ 0, /*tp_str */ 0, /*tp_getattro */ 0, /*tp_setattro */ 0, /*tp_as_buffer */ /* tp_flags: Py_TPFLAGS_HAVE_ITER tells python to use tp_iter and tp_iternext fields. */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_ITER, "Iterator for 3-tuples in a lazymanifest.", /* tp_doc */ 0, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter: __iter__() method */ lmiter_iterentriesnext, /* tp_iternext: next() method */ }; static PyObject *lmiter_iterkeysnext(PyObject *o) { size_t pl; line *l = lmiter_nextline((lmIter *)o); if (!l) { return NULL; } pl = pathlen(l); return PyString_FromStringAndSize(l->start, pl); } static PyTypeObject lazymanifestKeysIterator = { PyObject_HEAD_INIT(NULL) 0, /*ob_size */ "parsers.lazymanifest.keysiterator", /*tp_name */ sizeof(lmIter), /*tp_basicsize */ 0, /*tp_itemsize */ lmiter_dealloc, /*tp_dealloc */ 0, /*tp_print */ 0, /*tp_getattr */ 0, /*tp_setattr */ 0, /*tp_compare */ 0, /*tp_repr */ 0, /*tp_as_number */ 0, /*tp_as_sequence */ 0, /*tp_as_mapping */ 0, /*tp_hash */ 0, /*tp_call */ 0, /*tp_str */ 0, /*tp_getattro */ 0, /*tp_setattro */ 0, /*tp_as_buffer */ /* tp_flags: Py_TPFLAGS_HAVE_ITER tells python to use tp_iter and tp_iternext fields. */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_ITER, "Keys iterator for a lazymanifest.", /* tp_doc */ 0, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ PyObject_SelfIter, /* tp_iter: __iter__() method */ lmiter_iterkeysnext, /* tp_iternext: next() method */ }; static lazymanifest *lazymanifest_copy(lazymanifest *self); static PyObject *lazymanifest_getentriesiter(lazymanifest *self) { lmIter *i = NULL; lazymanifest *t = lazymanifest_copy(self); if (!t) { PyErr_NoMemory(); return NULL; } i = PyObject_New(lmIter, &lazymanifestEntriesIterator); if (i) { i->m = t; i->pos = -1; } else { Py_DECREF(t); PyErr_NoMemory(); } return (PyObject *)i; } static PyObject *lazymanifest_getkeysiter(lazymanifest *self) { lmIter *i = NULL; lazymanifest *t = lazymanifest_copy(self); if (!t) { PyErr_NoMemory(); return NULL; } i = PyObject_New(lmIter, &lazymanifestKeysIterator); if (i) { i->m = t; i->pos = -1; } else { Py_DECREF(t); PyErr_NoMemory(); } return (PyObject *)i; } /* __getitem__ and __setitem__ support */ static Py_ssize_t lazymanifest_size(lazymanifest *self) { return self->livelines; } static int linecmp(const void *left, const void *right) { return strcmp(((const line *)left)->start, ((const line *)right)->start); } static PyObject *lazymanifest_getitem(lazymanifest *self, PyObject *key) { line needle; line *hit; if (!PyString_Check(key)) { PyErr_Format(PyExc_TypeError, "getitem: manifest keys must be a string."); return NULL; } needle.start = PyString_AsString(key); hit = bsearch(&needle, self->lines, self->numlines, sizeof(line), &linecmp); if (!hit || hit->deleted) { PyErr_Format(PyExc_KeyError, "No such manifest entry."); return NULL; } return hashflags(hit); } static int lazymanifest_delitem(lazymanifest *self, PyObject *key) { line needle; line *hit; if (!PyString_Check(key)) { PyErr_Format(PyExc_TypeError, "delitem: manifest keys must be a string."); return -1; } needle.start = PyString_AsString(key); hit = bsearch(&needle, self->lines, self->numlines, sizeof(line), &linecmp); if (!hit || hit->deleted) { PyErr_Format(PyExc_KeyError, "Tried to delete nonexistent manifest entry."); return -1; } self->dirty = true; hit->deleted = true; self->livelines--; return 0; } /* Do a binary search for the insertion point for new, creating the * new entry if needed. */ static int internalsetitem(lazymanifest *self, line *new) { int start = 0, end = self->numlines; while (start < end) { int pos = start + (end - start) / 2; int c = linecmp(new, self->lines + pos); if (c < 0) end = pos; else if (c > 0) start = pos + 1; else { if (self->lines[pos].deleted) self->livelines++; if (self->lines[pos].from_malloc) free(self->lines[pos].start); start = pos; goto finish; } } /* being here means we need to do an insert */ if (!realloc_if_full(self)) { PyErr_NoMemory(); return -1; } memmove(self->lines + start + 1, self->lines + start, (self->numlines - start) * sizeof(line)); self->numlines++; self->livelines++; finish: self->lines[start] = *new; self->dirty = true; return 0; } static int lazymanifest_setitem( lazymanifest *self, PyObject *key, PyObject *value) { char *path; Py_ssize_t plen; PyObject *pyhash; Py_ssize_t hlen; char *hash; PyObject *pyflags; char *flags; Py_ssize_t flen; size_t dlen; char *dest; int i; line new; if (!PyString_Check(key)) { PyErr_Format(PyExc_TypeError, "setitem: manifest keys must be a string."); return -1; } if (!value) { return lazymanifest_delitem(self, key); } if (!PyTuple_Check(value) || PyTuple_Size(value) != 2) { PyErr_Format(PyExc_TypeError, "Manifest values must be a tuple of (node, flags)."); return -1; } if (PyString_AsStringAndSize(key, &path, &plen) == -1) { return -1; } pyhash = PyTuple_GetItem(value, 0); if (!PyString_Check(pyhash)) { PyErr_Format(PyExc_TypeError, "node must be a 20-byte string"); return -1; } hlen = PyString_Size(pyhash); /* Some parts of the codebase try and set 21 or 22 * byte "hash" values in order to perturb things for * status. We have to preserve at least the 21st * byte. Sigh. If there's a 22nd byte, we drop it on * the floor, which works fine. */ if (hlen != 20 && hlen != 21 && hlen != 22) { PyErr_Format(PyExc_TypeError, "node must be a 20-byte string"); return -1; } hash = PyString_AsString(pyhash); pyflags = PyTuple_GetItem(value, 1); if (!PyString_Check(pyflags) || PyString_Size(pyflags) > 1) { PyErr_Format(PyExc_TypeError, "flags must a 0 or 1 byte string"); return -1; } if (PyString_AsStringAndSize(pyflags, &flags, &flen) == -1) { return -1; } /* one null byte and one newline */ dlen = plen + 41 + flen + 1; dest = malloc(dlen); if (!dest) { PyErr_NoMemory(); return -1; } memcpy(dest, path, plen + 1); for (i = 0; i < 20; i++) { /* Cast to unsigned, so it will not get sign-extended when promoted * to int (as is done when passing to a variadic function) */ sprintf(dest + plen + 1 + (i * 2), "%02x", (unsigned char)hash[i]); } memcpy(dest + plen + 41, flags, flen); dest[plen + 41 + flen] = '\n'; new.start = dest; new.len = dlen; new.hash_suffix = '\0'; if (hlen > 20) { new.hash_suffix = hash[20]; } new.from_malloc = true; /* is `start` a pointer we allocated? */ new.deleted = false; /* is this entry deleted? */ if (internalsetitem(self, &new)) { return -1; } return 0; } static PyMappingMethods lazymanifest_mapping_methods = { (lenfunc)lazymanifest_size, /* mp_length */ (binaryfunc)lazymanifest_getitem, /* mp_subscript */ (objobjargproc)lazymanifest_setitem, /* mp_ass_subscript */ }; /* sequence methods (important or __contains__ builds an iterator) */ static int lazymanifest_contains(lazymanifest *self, PyObject *key) { line needle; line *hit; if (!PyString_Check(key)) { /* Our keys are always strings, so if the contains * check is for a non-string, just return false. */ return 0; } needle.start = PyString_AsString(key); hit = bsearch(&needle, self->lines, self->numlines, sizeof(line), &linecmp); if (!hit || hit->deleted) { return 0; } return 1; } static PySequenceMethods lazymanifest_seq_meths = { (lenfunc)lazymanifest_size, /* sq_length */ 0, /* sq_concat */ 0, /* sq_repeat */ 0, /* sq_item */ 0, /* sq_slice */ 0, /* sq_ass_item */ 0, /* sq_ass_slice */ (objobjproc)lazymanifest_contains, /* sq_contains */ 0, /* sq_inplace_concat */ 0, /* sq_inplace_repeat */ }; /* Other methods (copy, diff, etc) */ static PyTypeObject lazymanifestType; /* If the manifest has changes, build the new manifest text and reindex it. */ static int compact(lazymanifest *self) { int i; ssize_t need = 0; char *data; line *src, *dst; PyObject *pydata; if (!self->dirty) return 0; for (i = 0; i < self->numlines; i++) { if (!self->lines[i].deleted) { need += self->lines[i].len; } } pydata = PyString_FromStringAndSize(NULL, need); if (!pydata) return -1; data = PyString_AsString(pydata); if (!data) { return -1; } src = self->lines; dst = self->lines; for (i = 0; i < self->numlines; i++, src++) { char *tofree = NULL; if (src->from_malloc) { tofree = src->start; } if (!src->deleted) { memcpy(data, src->start, src->len); *dst = *src; dst->start = data; dst->from_malloc = false; data += dst->len; dst++; } free(tofree); } Py_DECREF(self->pydata); self->pydata = pydata; self->numlines = self->livelines; self->dirty = false; return 0; } static PyObject *lazymanifest_text(lazymanifest *self) { if (compact(self) != 0) { PyErr_NoMemory(); return NULL; } Py_INCREF(self->pydata); return self->pydata; } static lazymanifest *lazymanifest_copy(lazymanifest *self) { lazymanifest *copy = NULL; if (compact(self) != 0) { goto nomem; } copy = PyObject_New(lazymanifest, &lazymanifestType); if (!copy) { goto nomem; } copy->numlines = self->numlines; copy->livelines = self->livelines; copy->dirty = false; copy->lines = malloc(self->maxlines *sizeof(line)); if (!copy->lines) { goto nomem; } memcpy(copy->lines, self->lines, self->numlines * sizeof(line)); copy->maxlines = self->maxlines; copy->pydata = self->pydata; Py_INCREF(copy->pydata); return copy; nomem: PyErr_NoMemory(); Py_XDECREF(copy); return NULL; } static lazymanifest *lazymanifest_filtercopy( lazymanifest *self, PyObject *matchfn) { lazymanifest *copy = NULL; int i; if (!PyCallable_Check(matchfn)) { PyErr_SetString(PyExc_TypeError, "matchfn must be callable"); return NULL; } /* compact ourselves first to avoid double-frees later when we * compact tmp so that it doesn't have random pointers to our * underlying from_malloc-data (self->pydata is safe) */ if (compact(self) != 0) { goto nomem; } copy = PyObject_New(lazymanifest, &lazymanifestType); if (!copy) { goto nomem; } copy->dirty = true; copy->lines = malloc(self->maxlines * sizeof(line)); if (!copy->lines) { goto nomem; } copy->maxlines = self->maxlines; copy->numlines = 0; copy->pydata = self->pydata; Py_INCREF(self->pydata); for (i = 0; i < self->numlines; i++) { PyObject *arglist = NULL, *result = NULL; arglist = Py_BuildValue("(s)", self->lines[i].start); if (!arglist) { return NULL; } result = PyObject_CallObject(matchfn, arglist); Py_DECREF(arglist); /* if the callback raised an exception, just let it * through and give up */ if (!result) { free(copy->lines); Py_DECREF(self->pydata); return NULL; } if (PyObject_IsTrue(result)) { assert(!(self->lines[i].from_malloc)); copy->lines[copy->numlines++] = self->lines[i]; } Py_DECREF(result); } copy->livelines = copy->numlines; return copy; nomem: PyErr_NoMemory(); Py_XDECREF(copy); return NULL; } static PyObject *lazymanifest_diff(lazymanifest *self, PyObject *args) { lazymanifest *other; PyObject *pyclean = NULL; bool listclean; PyObject *emptyTup = NULL, *ret = NULL; PyObject *es; int sneedle = 0, oneedle = 0; if (!PyArg_ParseTuple(args, "O!|O", &lazymanifestType, &other, &pyclean)) { return NULL; } listclean = (!pyclean) ? false : PyObject_IsTrue(pyclean); es = PyString_FromString(""); if (!es) { goto nomem; } emptyTup = PyTuple_Pack(2, Py_None, es); Py_DECREF(es); if (!emptyTup) { goto nomem; } ret = PyDict_New(); if (!ret) { goto nomem; } while (sneedle != self->numlines || oneedle != other->numlines) { line *left = self->lines + sneedle; line *right = other->lines + oneedle; int result; PyObject *key; PyObject *outer; /* If we're looking at a deleted entry and it's not * the end of the manifest, just skip it. */ if (left->deleted && sneedle < self->numlines) { sneedle++; continue; } if (right->deleted && oneedle < other->numlines) { oneedle++; continue; } /* if we're at the end of either manifest, then we * know the remaining items are adds so we can skip * the strcmp. */ if (sneedle == self->numlines) { result = 1; } else if (oneedle == other->numlines) { result = -1; } else { result = linecmp(left, right); } key = result <= 0 ? PyString_FromString(left->start) : PyString_FromString(right->start); if (!key) goto nomem; if (result < 0) { PyObject *l = hashflags(left); if (!l) { goto nomem; } outer = PyTuple_Pack(2, l, emptyTup); Py_DECREF(l); if (!outer) { goto nomem; } PyDict_SetItem(ret, key, outer); Py_DECREF(outer); sneedle++; } else if (result > 0) { PyObject *r = hashflags(right); if (!r) { goto nomem; } outer = PyTuple_Pack(2, emptyTup, r); Py_DECREF(r); if (!outer) { goto nomem; } PyDict_SetItem(ret, key, outer); Py_DECREF(outer); oneedle++; } else { /* file exists in both manifests */ if (left->len != right->len || memcmp(left->start, right->start, left->len) || left->hash_suffix != right->hash_suffix) { PyObject *l = hashflags(left); PyObject *r; if (!l) { goto nomem; } r = hashflags(right); if (!r) { Py_DECREF(l); goto nomem; } outer = PyTuple_Pack(2, l, r); Py_DECREF(l); Py_DECREF(r); if (!outer) { goto nomem; } PyDict_SetItem(ret, key, outer); Py_DECREF(outer); } else if (listclean) { PyDict_SetItem(ret, key, Py_None); } sneedle++; oneedle++; } Py_DECREF(key); } Py_DECREF(emptyTup); return ret; nomem: PyErr_NoMemory(); Py_XDECREF(ret); Py_XDECREF(emptyTup); return NULL; } static PyMethodDef lazymanifest_methods[] = { {"iterkeys", (PyCFunction)lazymanifest_getkeysiter, METH_NOARGS, "Iterate over file names in this lazymanifest."}, {"iterentries", (PyCFunction)lazymanifest_getentriesiter, METH_NOARGS, "Iterate over (path, nodeid, flags) typles in this lazymanifest."}, {"copy", (PyCFunction)lazymanifest_copy, METH_NOARGS, "Make a copy of this lazymanifest."}, {"filtercopy", (PyCFunction)lazymanifest_filtercopy, METH_O, "Make a copy of this manifest filtered by matchfn."}, {"diff", (PyCFunction)lazymanifest_diff, METH_VARARGS, "Compare this lazymanifest to another one."}, {"text", (PyCFunction)lazymanifest_text, METH_NOARGS, "Encode this manifest to text."}, {NULL}, }; static PyTypeObject lazymanifestType = { PyObject_HEAD_INIT(NULL) 0, /* ob_size */ "parsers.lazymanifest", /* tp_name */ sizeof(lazymanifest), /* tp_basicsize */ 0, /* tp_itemsize */ (destructor)lazymanifest_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ 0, /* tp_compare */ 0, /* tp_repr */ 0, /* tp_as_number */ &lazymanifest_seq_meths, /* tp_as_sequence */ &lazymanifest_mapping_methods, /* tp_as_mapping */ 0, /* tp_hash */ 0, /* tp_call */ 0, /* tp_str */ 0, /* tp_getattro */ 0, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_SEQUENCE_IN, /* tp_flags */ "TODO(augie)", /* tp_doc */ 0, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ (getiterfunc)lazymanifest_getkeysiter, /* tp_iter */ 0, /* tp_iternext */ lazymanifest_methods, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ 0, /* tp_dict */ 0, /* tp_descr_get */ 0, /* tp_descr_set */ 0, /* tp_dictoffset */ (initproc)lazymanifest_init, /* tp_init */ 0, /* tp_alloc */ }; void manifest_module_init(PyObject * mod) { lazymanifestType.tp_new = PyType_GenericNew; if (PyType_Ready(&lazymanifestType) < 0) return; Py_INCREF(&lazymanifestType); PyModule_AddObject(mod, "lazymanifest", (PyObject *)&lazymanifestType); } mercurial-3.7.3/mercurial/templates/0000755000175000017500000000000012676531544017111 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/templates/rss/0000755000175000017500000000000012676531544017720 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/templates/rss/filelogentry.tmpl0000644000175000017500000000047212676531525023323 0ustar mpmmpm00000000000000 {desc|strip|firstline|strip|escape} {urlbase}{url|urlescape}log{node|short}/{file|urlescape} {author|obfuscate} {date|rfc822date} mercurial-3.7.3/mercurial/templates/rss/header.tmpl0000644000175000017500000000022512676531525022044 0ustar mpmmpm00000000000000 {urlbase}{url|urlescape} en-us mercurial-3.7.3/mercurial/templates/rss/changelogentry.tmpl0000644000175000017500000000244512676531525023633 0ustar mpmmpm00000000000000 {inbranch%"{if(name, '[{name|escape}] ')}"}{branches%"{if(name, '[{name|escape}] ')}"}{desc|strip|firstline|strip|escape} {urlbase}{url|urlescape}rev/{node|short} {urlbase}{url|urlescape}rev/{node|short} changeset {node|short} branch {inbranch%"{name|escape}"}{branches%"{name|escape}"} bookmark {bookmarks%"{name|escape}"} tag {tags%"{name|escape}"} user {author|obfuscate} description {desc|strip|escape|websub|addbreaks|nonempty} files {files} ]]> {author|obfuscate} {date|rfc822date} mercurial-3.7.3/mercurial/templates/rss/bookmarks.tmpl0000644000175000017500000000024412676531525022605 0ustar mpmmpm00000000000000{header} {repo|escape}: bookmarks {repo|escape} bookmark history {entries%bookmarkentry} mercurial-3.7.3/mercurial/templates/rss/map0000644000175000017500000000102212676531525020412 0ustar mpmmpm00000000000000default = 'changelog' mimetype = 'text/xml; charset={encoding}' header = header.tmpl changelog = changelog.tmpl changelogentry = changelogentry.tmpl filelog = filelog.tmpl filelogentry = filelogentry.tmpl tags = tags.tmpl tagentry = tagentry.tmpl bookmarks = bookmarks.tmpl bookmarkentry = bookmarkentry.tmpl branches = branches.tmpl branchentry = branchentry.tmpl error = error.tmpl filedifflink = '{file|escape}
    ' fileellipses = '{file|escape}
    ' filenodelink = '{file|escape}
    ' filenolink = '{file|escape}
    ' mercurial-3.7.3/mercurial/templates/rss/changelog.tmpl0000644000175000017500000000023312676531525022542 0ustar mpmmpm00000000000000{header} {repo|escape} Changelog {repo|escape} Changelog {entries%changelogentry} mercurial-3.7.3/mercurial/templates/rss/tags.tmpl0000644000175000017500000000023212676531525021550 0ustar mpmmpm00000000000000{header} {repo|escape}: tags {repo|escape} tag history {entriesnotip%tagentry} mercurial-3.7.3/mercurial/templates/rss/branchentry.tmpl0000644000175000017500000000034012676531525023131 0ustar mpmmpm00000000000000 {branch|escape} {urlbase}{url|urlescape}rev/{node|short} {date|rfc822date} mercurial-3.7.3/mercurial/templates/rss/error.tmpl0000644000175000017500000000036112676531525021746 0ustar mpmmpm00000000000000{header} Error Error Error {error|escape} https://mercurial-scm.org/#error mercurial-3.7.3/mercurial/templates/rss/filelog.tmpl0000644000175000017500000000025612676531525022241 0ustar mpmmpm00000000000000{header} {repo|escape}: {file|escape} history {file|escape} revision history {entries%filelogentry} mercurial-3.7.3/mercurial/templates/rss/branches.tmpl0000644000175000017500000000023612676531525022403 0ustar mpmmpm00000000000000{header} {repo|escape}: branches {repo|escape} branch history {entries%branchentry} mercurial-3.7.3/mercurial/templates/rss/bookmarkentry.tmpl0000644000175000017500000000034412676531525023505 0ustar mpmmpm00000000000000 {bookmark|escape} {urlbase}{url|urlescape}rev/{node|short} {date|rfc822date} mercurial-3.7.3/mercurial/templates/rss/tagentry.tmpl0000644000175000017500000000033212676531525022450 0ustar mpmmpm00000000000000 {tag|escape} {urlbase}{url|urlescape}rev/{node|short} {date|rfc822date} mercurial-3.7.3/mercurial/templates/map-cmdline.phases0000644000175000017500000000037012676531525022503 0ustar mpmmpm00000000000000%include map-cmdline.default changeset = '{cset}{branches}{bookmarks}{tags}{lphase}{parents}{user}{ldate}{summary}\n' changeset_verbose = '{cset}{branches}{bookmarks}{tags}{lphase}{parents}{user}{ldate}{lfiles}{lfile_copies_switch}{description}\n' mercurial-3.7.3/mercurial/templates/map-cmdline.compact0000644000175000017500000000071412676531525022650 0ustar mpmmpm00000000000000changeset = '{rev}{tags}{bookmarks}{parents} {node|short} {date|isodate} {author|user}\n {desc|firstline|strip}\n\n' changeset_quiet = '{rev}:{node|short}\n' changeset_verbose = '{rev}{tags}{parents} {node|short} {date|isodate} {author}\n {desc|strip}\n\n' start_tags = '[' tag = '{tag},' last_tag = '{tag}]' start_parents = ':' parent = '{rev},' last_parent = '{rev}' start_bookmarks = '[' bookmark = '{bookmark},' last_bookmark = '{bookmark}]' mercurial-3.7.3/mercurial/templates/map-cmdline.xml0000644000175000017500000000311412676531525022017 0ustar mpmmpm00000000000000docheader = '\n\n' docfooter = '\n' changeset = '\n{branches}{bookmarks}{tags}{parents}{author|person|xmlescape}\n{date|rfc3339date}\n{desc|xmlescape}\n\n' changeset_verbose = '\n{branches}{bookmarks}{tags}{parents}{author|person|xmlescape}\n{date|rfc3339date}\n{desc|xmlescape}\n\n{file_adds}{file_dels}{file_mods}\n{file_copies}\n' changeset_debug = '\n{branches}{bookmarks}{tags}{parents}{author|person|xmlescape}\n{date|rfc3339date}\n{desc|xmlescape}\n\n{file_adds}{file_dels}{file_mods}\n{file_copies}{extras}\n' file_add = '{file_add|xmlescape}\n' file_mod = '{file_mod|xmlescape}\n' file_del = '{file_del|xmlescape}\n' start_file_copies = '\n' file_copy = '{name|xmlescape}\n' end_file_copies = '\n' parent = '\n' branch = '{branch|xmlescape}\n' tag = '{tag|xmlescape}\n' bookmark = '{bookmark|xmlescape}\n' extra = '{value|xmlescape}\n' mercurial-3.7.3/mercurial/templates/atom/0000755000175000017500000000000012676531544020051 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/templates/atom/header.tmpl0000644000175000017500000000012612676531525022175 0ustar mpmmpm00000000000000 mercurial-3.7.3/mercurial/templates/atom/changelogentry.tmpl0000644000175000017500000000256612676531525023770 0ustar mpmmpm00000000000000 {inbranch%"{if(name, '[{name|escape}] ')}"}{branches%"{if(name, '[{name|escape}] ')}"}{desc|strip|firstline|strip|escape|nonempty} {urlbase}{url|urlescape}#changeset-{node} {author|person|escape} {author|email|obfuscate} {date|rfc3339date} {date|rfc3339date}
    changeset {node|short}
    branch {inbranch%"{name|escape}"}{branches%"{name|escape}"}
    bookmark {bookmarks%"{name|escape}"}
    tag {tags%"{name|escape}"}
    user {author|obfuscate}
    description {desc|strip|escape|websub|addbreaks|nonempty}
    files {files}
    mercurial-3.7.3/mercurial/templates/atom/bookmarks.tmpl0000644000175000017500000000056512676531525022744 0ustar mpmmpm00000000000000{header} {urlbase}{url|urlescape} {repo|escape}: bookmarks {repo|escape} bookmark history Mercurial SCM {latestentry%feedupdated} {entries%bookmarkentry}
    mercurial-3.7.3/mercurial/templates/atom/map0000644000175000017500000000112412676531525020546 0ustar mpmmpm00000000000000default = 'changelog' feedupdated = '{date|rfc3339date}' mimetype = 'application/atom+xml; charset={encoding}' header = header.tmpl changelog = changelog.tmpl changelogentry = changelogentry.tmpl filelog = filelog.tmpl filelogentry = filelogentry.tmpl tags = tags.tmpl tagentry = tagentry.tmpl bookmarks = bookmarks.tmpl bookmarkentry = bookmarkentry.tmpl branches = branches.tmpl branchentry = branchentry.tmpl error = error.tmpl filedifflink = '{file|escape}
    ' fileellipses = '{file|escape}
    ' filenodelink = '{file|escape}
    ' filenolink = '{file|escape}
    ' mercurial-3.7.3/mercurial/templates/atom/changelog.tmpl0000644000175000017500000000043212676531525022674 0ustar mpmmpm00000000000000{header} {urlbase}{url|urlescape} {repo|escape} Changelog {latestentry%feedupdated} {entries%changelogentry} mercurial-3.7.3/mercurial/templates/atom/tags.tmpl0000644000175000017500000000054112676531525021704 0ustar mpmmpm00000000000000{header} {urlbase}{url|urlescape} {repo|escape}: tags {repo|escape} tag history Mercurial SCM {latestentry%feedupdated} {entriesnotip%tagentry} mercurial-3.7.3/mercurial/templates/atom/branchentry.tmpl0000644000175000017500000000052112676531525023263 0ustar mpmmpm00000000000000 {branch|escape} {urlbase}{url|urlescape}#branch-{node} {date|rfc3339date} {date|rfc3339date} mercurial-3.7.3/mercurial/templates/atom/error.tmpl0000644000175000017500000000074412676531525022104 0ustar mpmmpm00000000000000{header} {urlbase}{url|urlescape} Error 1970-01-01T00:00:00+00:00 Error https://mercurial-scm.org/#error mercurial 1970-01-01T00:00:00+00:00 {error|escape} mercurial-3.7.3/mercurial/templates/atom/filelog.tmpl0000644000175000017500000000041112676531525022363 0ustar mpmmpm00000000000000{header} {urlbase}{url|urlescape}atom-log/tip/{file|escape} {repo|escape}: {file|escape} history {latestentry%feedupdated} {entries%changelogentry} mercurial-3.7.3/mercurial/templates/atom/branches.tmpl0000644000175000017500000000054712676531525022541 0ustar mpmmpm00000000000000{header} {urlbase}{url|urlescape} {repo|escape}: branches {repo|escape} branch history Mercurial SCM {latestentry%feedupdated} {entries%branchentry} mercurial-3.7.3/mercurial/templates/atom/bookmarkentry.tmpl0000644000175000017500000000050112676531525023631 0ustar mpmmpm00000000000000 {bookmark|escape} {urlbase}{url|urlescape}#bookmark-{node} {date|rfc3339date} {date|rfc3339date} {bookmark|strip|escape} mercurial-3.7.3/mercurial/templates/atom/tagentry.tmpl0000644000175000017500000000046212676531525022605 0ustar mpmmpm00000000000000 {tag|escape} {urlbase}{url|urlescape}#tag-{node} {date|rfc3339date} {date|rfc3339date} {tag|strip|escape} mercurial-3.7.3/mercurial/templates/raw/0000755000175000017500000000000012676531544017702 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/templates/raw/search.tmpl0000644000175000017500000000015712676531525022047 0ustar mpmmpm00000000000000{header} # HG changesets search # Node ID {node} # Query "{query}" # Mode {modedesc} {entries%changelogentry} mercurial-3.7.3/mercurial/templates/raw/filediff.tmpl0000644000175000017500000000003312676531525022343 0ustar mpmmpm00000000000000{header} {diff} {footer} mercurial-3.7.3/mercurial/templates/raw/manifest.tmpl0000644000175000017500000000007212676531525022404 0ustar mpmmpm00000000000000{header} {dentries%direntry}{fentries%fileentry} {footer} mercurial-3.7.3/mercurial/templates/raw/notfound.tmpl0000644000175000017500000000005412676531525022432 0ustar mpmmpm00000000000000{header} error: repository {repo} not found mercurial-3.7.3/mercurial/templates/raw/fileannotate.tmpl0000644000175000017500000000005412676531525023247 0ustar mpmmpm00000000000000{header} {annotate%annotateline} {footer} mercurial-3.7.3/mercurial/templates/raw/logentry.tmpl0000644000175000017500000000025412676531525022443 0ustar mpmmpm00000000000000changeset: {node} revision: {rev} user: {author} date: {date|rfc822date} summary: {desc} {branches%branchname}{tags%tagname}{bookmarks%bookmarkname} mercurial-3.7.3/mercurial/templates/raw/map0000644000175000017500000000227312676531525020405 0ustar mpmmpm00000000000000default = 'shortlog' shortlog = "'raw' is not a browsable style" changelog = changelog.tmpl changelogentry = logentry.tmpl search = search.tmpl searchentry = logentry.tmpl mimetype = 'text/plain; charset={encoding}' header = '' footer = '' changeset = changeset.tmpl difflineplus = '{line}' difflineminus = '{line}' difflineat = '{line}' diffline = '{line}' changesetparent = '# Parent {node}' changesetchild = '# Child {node}' filenodelink = '' filenolink = '' fileline = '{line}' diffblock = '{lines}' filediff = filediff.tmpl fileannotate = fileannotate.tmpl annotateline = '{author|user}@{rev}: {line}' manifest = manifest.tmpl direntry = 'drwxr-xr-x {basename}\n' fileentry = '{permissions|permissions} {size} {basename}\n' index = index.tmpl notfound = notfound.tmpl error = error.tmpl indexentry = '{url}\n' tags = '{entries%tagentry}' tagentry = '{tag} {node}\n' bookmarks = '{entries%bookmarkentry}' bookmarkentry = '{bookmark} {node}\n' branches = '{entries%branchentry}' branchentry = '{branch} {node} {status}\n' graph = graph.tmpl graphnode = graphnode.tmpl graphedge = graphedge.tmpl bookmarkname = 'bookmark: {name}\n' branchname = 'branch: {name}\n' tagname = 'tag: {name}\n' mercurial-3.7.3/mercurial/templates/raw/changelog.tmpl0000644000175000017500000000010312676531525022520 0ustar mpmmpm00000000000000{header} # HG changelog # Node ID {node} {entries%changelogentry} mercurial-3.7.3/mercurial/templates/raw/graphedge.tmpl0000644000175000017500000000010612676531525022522 0ustar mpmmpm00000000000000edge: ({col}, {row}) -> ({nextcol}, {nextrow}) (color {color}) mercurial-3.7.3/mercurial/templates/raw/changeset.tmpl0000644000175000017500000000017412676531525022542 0ustar mpmmpm00000000000000{header} # HG changeset patch # User {author} # Date {date|hgdate} # Node ID {node} {parent%changesetparent} {desc} {diff} mercurial-3.7.3/mercurial/templates/raw/error.tmpl0000644000175000017500000000003012676531525021721 0ustar mpmmpm00000000000000{header} error: {error} mercurial-3.7.3/mercurial/templates/raw/index.tmpl0000644000175000017500000000003612676531525021705 0ustar mpmmpm00000000000000{header} {entries%indexentry} mercurial-3.7.3/mercurial/templates/raw/graphnode.tmpl0000644000175000017500000000031112676531525022541 0ustar mpmmpm00000000000000changeset: {node} user: {user} date: {age} summary: {desc} {branches%branchname}{tags%tagname}{bookmarks%bookmarkname} node: ({col}, {row}) (color {color}) {edges%graphedge} mercurial-3.7.3/mercurial/templates/raw/graph.tmpl0000644000175000017500000000011412676531525021674 0ustar mpmmpm00000000000000{header} # HG graph # Node ID {node} # Rows shown {rows} {nodes%graphnode} mercurial-3.7.3/mercurial/templates/map-cmdline.bisect0000644000175000017500000000152112676531525022470 0ustar mpmmpm00000000000000%include map-cmdline.default changeset = '{cset}{lbisect}{branches}{bookmarks}{tags}{parents}{user}{ldate}{summary}\n' changeset_quiet = '{lshortbisect} {rev}:{node|short}\n' changeset_verbose = '{cset}{lbisect}{branches}{bookmarks}{tags}{parents}{user}{ldate}{lfiles}{lfile_copies_switch}{description}\n' changeset_debug = '{fullcset}{lbisect}{branches}{bookmarks}{tags}{lphase}{parents}{manifest}{user}{ldate}{lfile_mods}{lfile_adds}{lfile_dels}{lfile_copies_switch}{extras}{description}\n' # We take the zeroth word in order to omit "(implicit)" in the label bisectlabel = ' bisect.{word('0', bisect)}' lbisect ='{label("log.bisect{if(bisect, bisectlabel)}", "bisect: {bisect}\n")}' lshortbisect ='{label("log.bisect{if(bisect, bisectlabel)}", "{bisect|shortbisect}")}' mercurial-3.7.3/mercurial/templates/monoblue/0000755000175000017500000000000012676531544020731 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/templates/monoblue/footer.tmpl0000644000175000017500000000104212676531525023121 0ustar mpmmpm00000000000000

    mercurial

    mercurial-3.7.3/mercurial/templates/monoblue/search.tmpl0000644000175000017500000000330412676531525023073 0ustar mpmmpm00000000000000{header} {repo|escape}: Search
    {entries} {footer} mercurial-3.7.3/mercurial/templates/monoblue/shortlog.tmpl0000644000175000017500000000443312676531525023473 0ustar mpmmpm00000000000000{header} {repo|escape}: shortlog
    {entries%shortlogentry}
    {changenav%navshort}
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/summary.tmpl0000644000175000017500000000575312676531525023335 0ustar mpmmpm00000000000000{header} {repo|escape}: Summary
    name
    {repo|escape}
    description
    {desc}
    owner
    {owner|obfuscate}
    last change
    {lastchange|rfc822date}

    Changes

    {shortlog}
    ...

    Tags

    {tags}
    ...

    Bookmarks

    {bookmarks%bookmarkentry}
    ...

    Branches

    {branches%branchentry}
    ...
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/filediff.tmpl0000644000175000017500000000533612676531525023405 0ustar mpmmpm00000000000000{header} {repo|escape}: diff {file|escape}

    {file|escape}

    {branch%filerevbranch}
    changeset {rev}
    {node|short}
    {parent%filediffparent} {child%filediffchild}
    {diff}
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/header.tmpl0000644000175000017500000000075712676531525023067 0ustar mpmmpm00000000000000 mercurial-3.7.3/mercurial/templates/monoblue/manifest.tmpl0000644000175000017500000000434012676531525023435 0ustar mpmmpm00000000000000{header} {repo|escape}: files

    {path|escape} {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}

    {dentries%direntry} {fentries%fileentry}
    drwxr-xr-x [up]
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/help.tmpl0000644000175000017500000000315312676531525022560 0ustar mpmmpm00000000000000{header} Help: {topic}
    {rstdoc(doc, "html")}
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/notfound.tmpl0000644000175000017500000000354612676531525023472 0ustar mpmmpm00000000000000{header} Mercurial repository not found

    The specified repository "{repo|escape}" is unknown, sorry.

    Please go back to the main repository list page.

    {footer} mercurial-3.7.3/mercurial/templates/monoblue/fileannotate.tmpl0000644000175000017500000000616212676531525024304 0ustar mpmmpm00000000000000{header} {repo|escape}: {file|escape}@{node|short} (annotated)

    {file|escape}

    {date|rfc822date}

    author
    {author|obfuscate}
    date
    {date|rfc822date}
    {branch%filerevbranch}
    changeset {rev}
    {node|short}
    {parent%fileannotateparent} {child%fileannotatechild}
    permissions
    {permissions|permissions}

    {desc|strip|escape|websub|addbreaks|nonempty}

    {annotate%annotateline}
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/changelogentry.tmpl0000644000175000017500000000103112676531525024632 0ustar mpmmpm00000000000000

    {desc|strip|firstline|escape|nonempty} {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}

    • {date|rfc822date}
    • by {author|obfuscate} [{date|rfc822date}] rev {rev}
    • {desc|strip|escape|websub|addbreaks|nonempty}
    mercurial-3.7.3/mercurial/templates/monoblue/bookmarks.tmpl0000644000175000017500000000324612676531525023623 0ustar mpmmpm00000000000000{header} {repo|escape}: Bookmarks
    {entries%bookmarkentry}
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/map0000644000175000017500000003034212676531525021432 0ustar mpmmpm00000000000000default = 'summary' mimetype = 'text/html; charset={encoding}' header = header.tmpl footer = footer.tmpl search = search.tmpl changelog = changelog.tmpl summary = summary.tmpl error = error.tmpl notfound = notfound.tmpl help = help.tmpl helptopics = helptopics.tmpl helpentry = ' {if(basename, '{basename|escape}', '{topic|escape}')} {summary|escape} ' naventry = '{label|escape} ' navshortentry = '{label|escape} ' navgraphentry = '{label|escape} ' filenaventry = '{label|escape} ' filedifflink = '{file|escape} ' filenodelink = ' {file|escape} file | annotate | diff | comparison | revisions ' filenolink = ' {file|escape} file | annotate | diff | comparison | revisions ' nav = '{before%naventry} {after%naventry}' navshort = '{before%navshortentry}{after%navshortentry}' navgraph = '{before%navgraphentry}{after%navgraphentry}' filenav = '{before%filenaventry}{after%filenaventry}' fileellipses = '...' changelogentry = changelogentry.tmpl searchentry = changelogentry.tmpl changeset = changeset.tmpl manifest = manifest.tmpl direntry = ' drwxr-xr-x {basename|escape} {emptydirs|escape} files ' fileentry = ' {permissions|permissions} {date|isodate} {size} {basename|escape} file | revisions | annotate ' filerevision = filerevision.tmpl fileannotate = fileannotate.tmpl filediff = filediff.tmpl filecomparison = filecomparison.tmpl filelog = filelog.tmpl fileline = ' {strip(line|escape, '\r\n')}' annotateline = ' {author|user}@{rev} {linenumber} {line|escape} ' difflineplus = ' {strip(line|escape, '\r\n')}' difflineminus = ' {strip(line|escape, '\r\n')}' difflineat = ' {strip(line|escape, '\r\n')}' diffline = ' {strip(line|escape, '\r\n')}' comparisonblock =' {lines} ' comparisonline = ' {leftlinenumber} {leftline|escape} {rightlinenumber} {rightline|escape} ' changesetlink = '{node|short}' changelogparent = ' parent {rev}: {changesetlink} ' changesetbranch = '
    branch
    {name|escape}
    ' changesetparent = '
    parent {rev}
    {changesetlink}
    ' changesetparentdiff = '
    parent {rev}
    {changesetlink} {ifeq(node, basenode, '(current diff)', '({difffrom})')}
    ' difffrom = 'diff' filerevbranch = '
    branch
    {name|escape}
    ' filerevparent = '
    parent {rev}
    {rename%filerename}{node|short}
    ' filerename = '{file|escape}@' filelogrename = '| base' fileannotateparent = '
    parent {rev}
    {rename%filerename}{node|short}
    ' changelogchild = '
    child {rev}:
    {node|short}
    ' changesetchild = '
    child {rev}
    {node|short}
    ' filerevchild = '
    child {rev}
    {node|short}
    ' fileannotatechild = '
    child {rev}
    {node|short}
    ' tags = tags.tmpl tagentry = ' {date|rfc822date} {tag|escape} changeset | changelog | files ' bookmarks = bookmarks.tmpl bookmarkentry = ' {date|rfc822date} {bookmark|escape} changeset | changelog | files ' branches = branches.tmpl branchentry = ' {date|rfc822date} {branch|escape} changeset | changelog | files ' diffblock = '
    {lines}
    ' filediffparent = '
    parent {rev}
    {node|short}
    ' filecompparent = '
    parent {rev}
    {node|short}
    ' filelogparent = ' parent {rev}:  {node|short} ' filediffchild = '
    child {rev}
    {node|short}
    ' filecompchild = '
    child {rev}
    {node|short}
    ' filelogchild = ' child {rev}:  {node|short} ' shortlog = shortlog.tmpl tagtag = '{name|escape} ' branchtag = '{name|escape} ' inbranchtag = '{name|escape} ' bookmarktag = '{name|escape} ' shortlogentry = ' {date|rfc822date} {author|person} {desc|strip|firstline|escape|nonempty} {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag} changeset | files ' filelogentry = ' {date|rfc822date} {author|person} {desc|strip|firstline|escape|nonempty} {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag} file | diff | annotate {rename%filelogrename} ' archiveentry = '
  • {type|escape}
  • ' indexentry = ' {name|escape} {description} {contact|obfuscate} {lastchange|rfc822date} {archives%indexarchiveentry} {if(isdirectory, '', '' )} \n' indexarchiveentry = '{type|escape} ' index = index.tmpl urlparameter = '{separator}{name}={value|urlescape}' hiddenformentry = '' graph = graph.tmpl breadcrumb = '> {name|escape} ' mercurial-3.7.3/mercurial/templates/monoblue/changelog.tmpl0000644000175000017500000000334312676531525023560 0ustar mpmmpm00000000000000{header} {repo|escape}: changelog
    {entries%changelogentry}
    {changenav%nav}
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/tags.tmpl0000644000175000017500000000320312676531525022562 0ustar mpmmpm00000000000000{header} {repo|escape}: Tags
    {entries%tagentry}
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/filecomparison.tmpl0000644000175000017500000000630712676531525024646 0ustar mpmmpm00000000000000{header} {repo|escape}: comparison {file|escape}

    {file|escape}

    {branch%filerevbranch}
    changeset {rev}
    {node|short}
    {parent%filecompparent} {child%filecompchild}
    equal deleted inserted replaced
    {comparison}
    {leftrev}:{leftnode|short} {rightrev}:{rightnode|short}
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/changeset.tmpl0000644000175000017500000000533512676531525023575 0ustar mpmmpm00000000000000{header} {repo|escape}: changeset {rev}:{node|short}

    {desc|strip|escape|firstline|nonempty} {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}

    {date|rfc822date}

    author
    {author|obfuscate}
    date
    {date|rfc822date}
    {branch%changesetbranch}
    changeset {rev}
    {node|short}
    {ifeq(count(parent), '2', parent%changesetparentdiff, parent%changesetparent)} {child%changesetchild}

    {desc|strip|escape|websub|addbreaks|nonempty}

    {files}
    {diff}
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/helptopics.tmpl0000644000175000017500000000421212676531525023777 0ustar mpmmpm00000000000000{header} Help: {title}
    {topics % helpentry} {if(earlycommands, ' {earlycommands % helpentry} ')} {if(othercommands, ' {othercommands % helpentry} ')}

    Topics

    Main Commands

    Other Commands

    {footer} mercurial-3.7.3/mercurial/templates/monoblue/error.tmpl0000644000175000017500000000331012676531525022754 0ustar mpmmpm00000000000000{header} {repo|escape}: Error

    {error|escape}

    {footer} mercurial-3.7.3/mercurial/templates/monoblue/index.tmpl0000644000175000017500000000175112676531525022741 0ustar mpmmpm00000000000000{header} Mercurial repositories index
    {entries%indexentry}
    Name Description Contact Last modified    

    mercurial

    mercurial-3.7.3/mercurial/templates/monoblue/filelog.tmpl0000644000175000017500000000455612676531525023261 0ustar mpmmpm00000000000000{header} {repo|escape}: File revisions
    {entries%filelogentry}
    {nav%filenav}
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/filerevision.tmpl0000644000175000017500000000620412676531525024326 0ustar mpmmpm00000000000000{header} {repo|escape}: {file|escape}@{node|short}

    {file|escape}

    {date|rfc822date}

    author
    {author|obfuscate}
    date
    {date|rfc822date}
    {branch%filerevbranch}
    changeset {rev}
    {node|short}
    {parent%filerevparent} {child%filerevchild}
    permissions
    {permissions|permissions}

    {desc|strip|escape|websub|addbreaks|nonempty}

    {text%fileline}
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/branches.tmpl0000644000175000017500000000323612676531525023417 0ustar mpmmpm00000000000000{header} {repo|escape}: Branches
    {entries%branchentry}
    {footer} mercurial-3.7.3/mercurial/templates/monoblue/graph.tmpl0000644000175000017500000001110212676531525022722 0ustar mpmmpm00000000000000{header} {repo|escape}: graph
    The revision graph only works with JavaScript-enabled browsers.
        less more | {changenav%navgraph}
        {footer} mercurial-3.7.3/mercurial/templates/spartan/0000755000175000017500000000000012676531544020561 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/templates/spartan/footer.tmpl0000644000175000017500000000033212676531525022752 0ustar mpmmpm00000000000000 {motd} mercurial-3.7.3/mercurial/templates/spartan/shortlogentry.tmpl0000644000175000017500000000043112676531525024377 0ustar mpmmpm00000000000000
        {date|rfc822date} {author|person} {desc|strip|firstline|escape|nonempty}
        mercurial-3.7.3/mercurial/templates/spartan/search.tmpl0000644000175000017500000000164312676531525022727 0ustar mpmmpm00000000000000{header} {repo|escape}: searching for {query|escape}
        changelog shortlog graph tags branches files {archives%archiveentry} help

        searching for {query|escape}

        {sessionvars%hiddenformentry}

        search:

        {entries}
        {sessionvars%hiddenformentry}

        search:

        {footer} mercurial-3.7.3/mercurial/templates/spartan/shortlog.tmpl0000644000175000017500000000300412676531525023314 0ustar mpmmpm00000000000000{header} {repo|escape}: shortlog
        changelog graph tags branches files {archives%archiveentry} help rss atom

        Mercurial {pathdef%breadcrumb} / shortlog

        {sessionvars%hiddenformentry}

        navigate: {changenav%navshort}

        {entries%shortlogentry}
        {sessionvars%hiddenformentry}

        navigate: {changenav%navshort}

        {footer} mercurial-3.7.3/mercurial/templates/spartan/filelogentry.tmpl0000644000175000017500000000155512676531525024167 0ustar mpmmpm00000000000000 {rename%filelogrename}
        {date|rfc822date}: {desc|strip|firstline|escape|nonempty}
        revision {filerev}: {node|short} (diff) (annotate)
        author: {author|obfuscate}
        date: {date|rfc822date}
        mercurial-3.7.3/mercurial/templates/spartan/filediff.tmpl0000644000175000017500000000247612676531525023237 0ustar mpmmpm00000000000000{header} {repo|escape}: {file|escape} diff

        Mercurial {pathdef%breadcrumb} / {file|escape}

        {parent%filediffparent} {child%filediffchild}
        revision {rev}: {node|short}
        {diff}
        {footer} mercurial-3.7.3/mercurial/templates/spartan/header.tmpl0000644000175000017500000000060512676531525022707 0ustar mpmmpm00000000000000 mercurial-3.7.3/mercurial/templates/spartan/manifest.tmpl0000644000175000017500000000211312676531525023261 0ustar mpmmpm00000000000000{header} {repo|escape}: files for changeset {node|short}
        changelog shortlog graph tags branches changeset {archives%archiveentry} help

        Mercurial {pathdef%breadcrumb} / files for changeset {node|short}: {path|escape}

        {dentries%direntry} {fentries%fileentry}
        drwxr-xr-x      [up]
        {footer} mercurial-3.7.3/mercurial/templates/spartan/notfound.tmpl0000644000175000017500000000040612676531525023312 0ustar mpmmpm00000000000000{header} Mercurial repository not found

        Mercurial repository not found

        The specified repository "{repo|escape}" is unknown, sorry. Please go back to the main repository list page. {footer} mercurial-3.7.3/mercurial/templates/spartan/fileannotate.tmpl0000644000175000017500000000326412676531525024134 0ustar mpmmpm00000000000000{header} {repo|escape}: {file|escape} annotate

        Mercurial {pathdef%breadcrumb} / annotate {file|escape}

        {parent%fileannotateparent} {child%fileannotatechild}
        changeset {rev}: {node|short}
        author: {author|obfuscate}
        date: {date|rfc822date}
        permissions: {permissions|permissions}
        description: {desc|strip|escape|websub|addbreaks|nonempty}
        {annotate%annotateline}
        {footer} mercurial-3.7.3/mercurial/templates/spartan/changelogentry.tmpl0000644000175000017500000000137012676531525024470 0ustar mpmmpm00000000000000 {parent%changelogparent} {child%changelogchild} {changelogtag}
        {date|rfc822date}: {desc|strip|firstline|escape|nonempty}
        changeset {rev}: {node|short}
        author: {author|obfuscate}
        date: {date|rfc822date}
        files: {files}
        mercurial-3.7.3/mercurial/templates/spartan/map0000644000175000017500000001670612676531525021272 0ustar mpmmpm00000000000000default = 'shortlog' mimetype = 'text/html; charset={encoding}' header = header.tmpl footer = footer.tmpl search = search.tmpl changelog = changelog.tmpl shortlog = shortlog.tmpl shortlogentry = shortlogentry.tmpl graph = graph.tmpl naventry = '{label|escape} ' navshortentry = '{label|escape} ' navgraphentry = '{label|escape} ' filenaventry = '{label|escape} ' filedifflink = '{file|escape} ' filenodelink = '{file|escape} ' filenolink = '{file|escape} ' fileellipses = '...' changelogentry = changelogentry.tmpl searchentry = changelogentry.tmpl changeset = changeset.tmpl manifest = manifest.tmpl nav = '{before%naventry} {after%naventry}' navshort = '{before%navshortentry}{after%navshortentry}' navgraph = '{before%navgraphentry}{after%navgraphentry}' filenav = '{before%filenaventry}{after%filenaventry}' direntry = ' drwxr-xr-x      {basename|escape}/ {emptydirs|urlescape} ' fileentry = ' {permissions|permissions}  {date|isodate}  {size}  {basename|escape}' filerevision = filerevision.tmpl fileannotate = fileannotate.tmpl filediff = filediff.tmpl filelog = filelog.tmpl fileline = '
        {linenumber} {line|escape}
        ' filelogentry = filelogentry.tmpl # The   ensures that all table cells have content (even if there # is an empty line in the annotated file), which in turn ensures that # all table rows have equal height. annotateline = ' {author|user}@{rev} {linenumber}
         {line|escape}
        ' difflineplus = '{linenumber}{line|escape}' difflineminus = '{linenumber}{line|escape}' difflineat = '{linenumber}{line|escape}' diffline = '{linenumber}{line|escape}' changesetlink = '{node|short}' changelogparent = ' parent {rev}: {changesetlink} ' changesetparent = ' parent {rev}: {changesetlink} ' changesetparentdiff = ' parent {rev}: {changesetlink} {ifeq(node, basenode, '(current diff)', '({difffrom})')} ' difffrom = 'diff' filerevparent = ' parent: {rename%filerename}{node|short} ' filerename = '{file|escape}@' filelogrename = ' base: {file|escape}@{node|short} ' fileannotateparent = ' parent: {rename%filerename}{node|short} ' changesetchild = ' child {rev}: {node|short} ' changelogchild = ' child {rev}: {node|short} ' filerevchild = ' child: {node|short} ' fileannotatechild = ' child: {node|short} ' tags = tags.tmpl tagentry = '
      • {node} {tag|escape}
      • ' branches = branches.tmpl branchentry = '
      • {node} {branch|escape}
      • ' diffblock = '
        {lines}
        ' changelogtag = 'tag:{tag|escape}' changesettag = 'tag:{tag|escape}' filediffparent = ' parent {rev}: {node|short} ' filelogparent = ' parent {rev}: {node|short} ' filediffchild = ' child {rev}: {node|short} ' filelogchild = ' child {rev}: {node|short} ' indexentry = ' {name|escape} {description} {contact|obfuscate} {lastchange|rfc822date} RSS Atom {archives%archiveentry} ' index = index.tmpl archiveentry = '{type|escape} ' notfound = notfound.tmpl error = error.tmpl urlparameter = '{separator}{name}={value|urlescape}' hiddenformentry = '' breadcrumb = '> {name|escape} ' mercurial-3.7.3/mercurial/templates/spartan/changelog.tmpl0000644000175000017500000000300112676531525023377 0ustar mpmmpm00000000000000{header} {repo|escape}: changelog
        shortlog graph tags branches files {archives%archiveentry} help rss atom

        Mercurial {pathdef%breadcrumb} / changelog

        {sessionvars%hiddenformentry}

        navigate: {changenav%nav}

        {entries%changelogentry}
        {sessionvars%hiddenformentry}

        navigate: {changenav%nav}

        {footer} mercurial-3.7.3/mercurial/templates/spartan/tags.tmpl0000644000175000017500000000177112676531525022422 0ustar mpmmpm00000000000000{header} {repo|escape}: tags

        Mercurial {pathdef%breadcrumb} / tags

          {entries%tagentry}
        {footer} mercurial-3.7.3/mercurial/templates/spartan/changeset.tmpl0000644000175000017500000000302512676531525023417 0ustar mpmmpm00000000000000{header} {repo|escape}: changeset {node|short}
        changelog shortlog graph tags branches files raw {archives%archiveentry} help

        Mercurial {pathdef%breadcrumb} / changeset: {desc|strip|escape|firstline|nonempty}

        {ifeq(count(parent), '2', parent%changesetparentdiff, parent%changesetparent)} {child%changesetchild} {changesettag}
        changeset {rev}: {node|short}
        author: {author|obfuscate}
        date: {date|rfc822date}
        files: {files}
        description: {desc|strip|escape|websub|addbreaks|nonempty}
        {diff}
        {footer} mercurial-3.7.3/mercurial/templates/spartan/error.tmpl0000644000175000017500000000025612676531525022612 0ustar mpmmpm00000000000000{header} Mercurial Error

        Mercurial Error

        An error occurred while processing your request:

        {error|escape}

        {footer} mercurial-3.7.3/mercurial/templates/spartan/index.tmpl0000644000175000017500000000073012676531525022565 0ustar mpmmpm00000000000000{header} Mercurial repositories index

        Mercurial {pathdef%breadcrumb}

        {entries%indexentry}
        Name Description Contact Last modified  
        {footer} mercurial-3.7.3/mercurial/templates/spartan/filelog.tmpl0000644000175000017500000000260712676531525023104 0ustar mpmmpm00000000000000{header} {repo|escape}: {file|escape} history

        Mercurial {pathdef%breadcrumb} / {file|escape} revision history

        navigate: {nav%filenav}

        {entries%filelogentry} {footer} mercurial-3.7.3/mercurial/templates/spartan/filerevision.tmpl0000644000175000017500000000315712676531525024162 0ustar mpmmpm00000000000000{header} {repo|escape}:{file|escape}

        Mercurial {pathdef%breadcrumb} / {file|escape}

        {parent%filerevparent} {child%filerevchild}
        changeset {rev}: {node|short}
        author: {author|obfuscate}
        date: {date|rfc822date}
        permissions: {permissions|permissions}
        description: {desc|strip|escape|websub|addbreaks|nonempty}
        {text%fileline}
        
        {footer} mercurial-3.7.3/mercurial/templates/spartan/branches.tmpl0000644000175000017500000000202412676531525023241 0ustar mpmmpm00000000000000{header} {repo|escape}: branches

        Mercurial {pathdef%breadcrumb} / branches

          {entries%branchentry}
        {footer} mercurial-3.7.3/mercurial/templates/spartan/graph.tmpl0000644000175000017500000000475312676531525022570 0ustar mpmmpm00000000000000{header} {repo|escape}: graph

        Mercurial {pathdef%breadcrumb} / graph

        {sessionvars%hiddenformentry}

        navigate: {changenav%navgraph}

            {sessionvars%hiddenformentry}

            navigate: {changenav%navgraph}

            {footer} mercurial-3.7.3/mercurial/templates/map-cmdline.default0000644000175000017500000000541112676531525022645 0ustar mpmmpm00000000000000# Base templates. Due to name clashes with existing keywords, we have # to replace some keywords with 'lkeyword', for 'labelled keyword' changeset = '{cset}{branches}{bookmarks}{tags}{parents}{user}{ldate}{summary}\n' changeset_quiet = '{lnode}' changeset_verbose = '{cset}{branches}{bookmarks}{tags}{parents}{user}{ldate}{lfiles}{lfile_copies_switch}{description}\n' changeset_debug = '{fullcset}{branches}{bookmarks}{tags}{lphase}{parents}{manifest}{user}{ldate}{lfile_mods}{lfile_adds}{lfile_dels}{lfile_copies_switch}{extras}{description}\n' # File templates lfiles = '{if(files, label("ui.note log.files", "files: {files}\n"))}' lfile_mods = '{if(file_mods, label("ui.debug log.files", "files: {file_mods}\n"))}' lfile_adds = '{if(file_adds, label("ui.debug log.files", "files+: {file_adds}\n"))}' lfile_dels = '{if(file_dels, label("ui.debug log.files", "files-: {file_dels}\n"))}' lfile_copies_switch = '{if(file_copies_switch, label("ui.note log.copies", "copies: {file_copies_switch % ' {name} ({source})'}\n"))}' # General templates cset = '{label("log.changeset changeset.{phase}", "changeset: {rev}:{node|short}")}\n' lphase = '{label("log.phase", "phase: {phase}")}\n' fullcset = '{label("log.changeset changeset.{phase}", "changeset: {rev}:{node}")}\n' parent = '{label("log.parent changeset.{phase}", "parent: {rev}:{node|formatnode}")}\n' lnode = '{label("log.node", "{rev}:{node|short}")}\n' manifest = '{label("ui.debug log.manifest", "manifest: {rev}:{node}")}\n' branch = '{label("log.branch", "branch: {branch}")}\n' tag = '{label("log.tag", "tag: {tag}")}\n' bookmark = '{label("log.bookmark", "bookmark: {bookmark}")}\n' user = '{label("log.user", "user: {author}")}\n' summary = '{if(desc|strip, "{label('log.summary', 'summary: {desc|firstline}')}\n")}' ldate = '{label("log.date", "date: {date|date}")}\n' extra = '{label("ui.debug log.extra", "extra: {key}={value|stringescape}")}\n' description = '{if(desc|strip, "{label('ui.note log.description', 'description:')} {label('ui.note log.description', '{desc|strip}')}\n\n")}' status = '{status} {path}\n{if(copy, " {copy}\n")}' mercurial-3.7.3/mercurial/templates/map-cmdline.status0000644000175000017500000000226012676531525022543 0ustar mpmmpm00000000000000%include map-cmdline.default # Override base templates changeset = '{cset}{branches}{bookmarks}{tags}{parents}{user}{ldate}{summary}{lfiles}\n' changeset_verbose = '{cset}{branches}{bookmarks}{tags}{parents}{user}{ldate}{description}{lfiles}\n' changeset_debug = '{fullcset}{branches}{bookmarks}{tags}{lphase}{parents}{manifest}{user}{ldate}{extras}{description}{lfiles}\n' # Override the file templates lfiles = '{if(files, label('ui.note log.files', 'files:\n'))}{lfile_mods}{lfile_adds}{lfile_copies_switch}{lfile_dels}' # Exclude copied files, will display those in lfile_copies_switch lfile_adds = '{file_adds % "{ifcontains(file, file_copies_switch, '', '{lfile_add}')}"}' lfile_add = '{label("status.added", "A {file}\n")}' lfile_copies_switch = '{file_copies_switch % "{lfile_copy_orig}{lfile_copy_dest}"}' lfile_copy_orig = '{label("status.added", "A {name}\n")}' lfile_copy_dest = '{label("status.copied", " {source}\n")}' lfile_mods = '{file_mods % "{label('status.modified', 'M {file}\n')}"}' lfile_dels = '{file_dels % "{label('status.removed', 'R {file}\n')}"}' mercurial-3.7.3/mercurial/templates/gitweb/0000755000175000017500000000000012676531544020372 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/templates/gitweb/footer.tmpl0000644000175000017500000000043312676531525022565 0ustar mpmmpm00000000000000 mercurial-3.7.3/mercurial/templates/gitweb/search.tmpl0000644000175000017500000000257212676531525022542 0ustar mpmmpm00000000000000{header} {repo|escape}: Search
            searching for {query|escape}
            {entries} {footer} mercurial-3.7.3/mercurial/templates/gitweb/shortlog.tmpl0000644000175000017500000000372412676531525023136 0ustar mpmmpm00000000000000{header} {repo|escape}: Shortlog
            {sessionvars%hiddenformentry}
             
            {entries%shortlogentry}
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/summary.tmpl0000644000175000017500000000500312676531525022762 0ustar mpmmpm00000000000000{header} {repo|escape}: Summary
             
            description{desc}
            owner{owner|obfuscate}
            last change{lastchange|rfc822date}
            {shortlog}
            ...
            {tags}
            ...
            {bookmarks%bookmarkentry}
            ...
            {branches%branchentry}
            ...
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/filediff.tmpl0000644000175000017500000000423312676531525023041 0ustar mpmmpm00000000000000{header} {repo|escape}: diff {file|escape}
            {file|escape}
            {branch%filerevbranch} {parent%filediffparent} {child%filediffchild}
            changeset {rev} {node|short}
            {diff}
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/header.tmpl0000644000175000017500000000101712676531525022516 0ustar mpmmpm00000000000000 mercurial-3.7.3/mercurial/templates/gitweb/manifest.tmpl0000644000175000017500000000325512676531525023102 0ustar mpmmpm00000000000000{header} {repo|escape}: files
            {path|escape} {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}
            {dentries%direntry} {fentries%fileentry}
            drwxr-xr-x [up]
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/help.tmpl0000644000175000017500000000221112676531525022213 0ustar mpmmpm00000000000000{header} Help: {topic}
             
            {rstdoc(doc, "html")}
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/notfound.tmpl0000644000175000017500000000061512676531525023125 0ustar mpmmpm00000000000000{header} Mercurial repository not found
            The specified repository "{repo|escape}" is unknown, sorry.

            Please go back to the main repository list page.
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/fileannotate.tmpl0000644000175000017500000000503212676531525023740 0ustar mpmmpm00000000000000{header} {repo|escape}: {file|escape}@{node|short} (annotated)
            {file|escape}
            {branch%filerevbranch} {parent%fileannotateparent} {child%fileannotatechild}
            author {author|obfuscate}
            {date|rfc822date}
            changeset {rev} {node|short}
            permissions {permissions|permissions}
            {desc|strip|escape|websub|addbreaks|nonempty}
            {annotate%annotateline}
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/changelogentry.tmpl0000644000175000017500000000111212676531525024273 0ustar mpmmpm00000000000000
            {author|obfuscate} [{date|rfc822date}] rev {rev}
            {desc|strip|escape|websub|addbreaks|nonempty}

            mercurial-3.7.3/mercurial/templates/gitweb/bookmarks.tmpl0000644000175000017500000000230212676531525023254 0ustar mpmmpm00000000000000{header} {repo|escape}: Bookmarks
             
            {entries%bookmarkentry}
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/map0000644000175000017500000003340512676531525021076 0ustar mpmmpm00000000000000default = 'summary' mimetype = 'text/html; charset={encoding}' header = header.tmpl footer = footer.tmpl search = search.tmpl changelog = changelog.tmpl summary = summary.tmpl error = error.tmpl notfound = notfound.tmpl help = help.tmpl helptopics = helptopics.tmpl helpentry = ' {if(basename, '{basename|escape}', '{topic|escape}')} {summary|escape} ' naventry = '{label|escape} ' navshortentry = '{label|escape} ' navgraphentry = '{label|escape} ' filenaventry = '{label|escape} ' filedifflink = '{file|escape} ' filenodelink = ' {file|escape} file | annotate | diff | comparison | revisions ' filenolink = ' {file|escape} file | annotate | diff | comparison | revisions ' nav = '{before%naventry} {after%naventry}' navshort = '{before%navshortentry}{after%navshortentry}' navgraph = '{before%navgraphentry}{after%navgraphentry}' filenav = '{before%filenaventry}{after%filenaventry}' fileellipses = '...' changelogentry = changelogentry.tmpl searchentry = changelogentry.tmpl changeset = changeset.tmpl manifest = manifest.tmpl direntry = ' drwxr-xr-x {basename|escape} {emptydirs|escape} files ' fileentry = ' {permissions|permissions} {date|isodate} {size} {basename|escape} file | revisions | annotate ' filerevision = filerevision.tmpl fileannotate = fileannotate.tmpl filediff = filediff.tmpl filecomparison = filecomparison.tmpl filelog = filelog.tmpl fileline = ' {strip(line|escape, '\r\n')}' annotateline = ' {author|user}@{rev}
            {linenumber}
            {line|escape}
            ' difflineplus = ' {strip(line|escape, '\r\n')}' difflineminus = ' {strip(line|escape, '\r\n')}' difflineat = ' {strip(line|escape, '\r\n')}' diffline = ' {strip(line|escape, '\r\n')}' comparisonblock =' {lines} ' comparisonline = '
            {leftlinenumber} {leftline|escape}
            {rightlinenumber} {rightline|escape}
            ' changelogparent = ' parent {rev}: {node|short} ' changesetlink = '{node|short}' changesetbranch = 'branch{name|escape}' changesetparent = ' parent {rev} {changesetlink} ' changesetparentdiff = ' parent {rev} {changesetlink} {ifeq(node, basenode, '(current diff)', '({difffrom})')} ' difffrom = 'diff' filerevbranch = 'branch{name|escape}' filerevparent = ' parent {rev} {rename%filerename}{node|short} ' filerename = '{file|escape}@' filelogrename = '| base' fileannotateparent = ' parent {rev} {rename%filerename}{node|short} ' changelogchild = ' child {rev}: {node|short} ' changesetchild = ' child {rev} {node|short} ' filerevchild = ' child {rev} {node|short} ' fileannotatechild = ' child {rev} {node|short} ' tags = tags.tmpl tagentry = ' {date|rfc822date} {tag|escape} changeset | changelog | files ' bookmarks = bookmarks.tmpl bookmarkentry = ' {date|rfc822date} {bookmark|escape} changeset | changelog | files ' branches = branches.tmpl branchentry = ' {date|rfc822date} {branch|escape} changeset | changelog | files ' diffblock = '
            {lines}
            ' filediffparent = ' parent {rev} {node|short} ' filecompparent = ' parent {rev} {node|short} ' filelogparent = ' parent {rev}:  {node|short} ' filediffchild = ' child {rev} {node|short} ' filecompchild = ' child {rev} {node|short} ' filelogchild = ' child {rev}:  {node|short} ' shortlog = shortlog.tmpl graph = graph.tmpl tagtag = '{name|escape} ' branchtag = '{name|escape} ' inbranchtag = '{name|escape} ' bookmarktag = '{name|escape} ' shortlogentry = ' {date|rfc822date} {author|person} {desc|strip|firstline|escape|nonempty} {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag} changeset | files ' filelogentry = ' {date|rfc822date} {author|person} {desc|strip|firstline|escape|nonempty} {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag} file | diff | annotate {rename%filelogrename} ' archiveentry = ' | {type|escape} ' indexentry = ' {name|escape} {description} {contact|obfuscate} {lastchange|rfc822date} {archives%indexarchiveentry} {if(isdirectory, '', '' )} \n' indexarchiveentry = ' {type|escape} ' index = index.tmpl urlparameter = '{separator}{name}={value|urlescape}' hiddenformentry = '' breadcrumb = '> {name|escape} ' mercurial-3.7.3/mercurial/templates/gitweb/changelog.tmpl0000644000175000017500000000255112676531525023221 0ustar mpmmpm00000000000000{header} {repo|escape}: Changelog
            {sessionvars%hiddenformentry}
            {entries%changelogentry} {footer} mercurial-3.7.3/mercurial/templates/gitweb/tags.tmpl0000644000175000017500000000224412676531525022227 0ustar mpmmpm00000000000000{header} {repo|escape}: Tags
             
            {entries%tagentry}
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/filecomparison.tmpl0000644000175000017500000000517612676531525024312 0ustar mpmmpm00000000000000{header} {repo|escape}: comparison {file|escape}
            {file|escape}
            {branch%filerevbranch} {parent%filecompparent} {child%filecompchild}
            changeset {rev} {node|short}
            equal deleted inserted replaced
            {comparison}
            {leftrev}:{leftnode|short} {rightrev}:{rightnode|short}
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/changeset.tmpl0000644000175000017500000000427312676531525023236 0ustar mpmmpm00000000000000{header} {repo|escape}: changeset {rev}:{node|short}
            {branch%changesetbranch} {ifeq(count(parent), '2', parent%changesetparentdiff, parent%changesetparent)} {child%changesetchild}
            author{author|obfuscate}
            {date|rfc822date}
            changeset {rev} {node|short}
            {desc|strip|escape|websub|addbreaks|nonempty}
            {files}
            {diff}
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/helptopics.tmpl0000644000175000017500000000312712676531525023444 0ustar mpmmpm00000000000000{header} Help: {title}
             
            {topics % helpentry} {if(earlycommands, ' {earlycommands % helpentry} ')} {if(othercommands, ' {othercommands % helpentry} ')}

            Topics

            Main Commands

            Other Commands

            {footer} mercurial-3.7.3/mercurial/templates/gitweb/error.tmpl0000644000175000017500000000226112676531525022421 0ustar mpmmpm00000000000000{header} {repo|escape}: Error

            An error occurred while processing your request

            {error|escape}
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/index.tmpl0000644000175000017500000000132012676531525022372 0ustar mpmmpm00000000000000{header} Mercurial repositories index {entries%indexentry}
            Name Description Contact Last modified    
            mercurial-3.7.3/mercurial/templates/gitweb/filelog.tmpl0000644000175000017500000000326112676531525022712 0ustar mpmmpm00000000000000{header} {repo|escape}: File revisions
            {file|urlescape}
            {entries%filelogentry}
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/filerevision.tmpl0000644000175000017500000000503012676531525023763 0ustar mpmmpm00000000000000{header} {repo|escape}: {file|escape}@{node|short}
            {file|escape}
            {branch%filerevbranch} {parent%filerevparent} {child%filerevchild}
            author {author|obfuscate}
            {date|rfc822date}
            changeset {rev} {node|short}
            permissions {permissions|permissions}
            {desc|strip|escape|websub|addbreaks|nonempty}
            {text%fileline}
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/branches.tmpl0000644000175000017500000000227312676531525023060 0ustar mpmmpm00000000000000{header} {repo|escape}: Branches
             
            {entries%branchentry}
            {footer} mercurial-3.7.3/mercurial/templates/gitweb/graph.tmpl0000644000175000017500000000766512676531525022406 0ustar mpmmpm00000000000000{header} {repo|escape}: Graph
            {sessionvars%hiddenformentry}
             
                {footer} mercurial-3.7.3/mercurial/templates/json/0000755000175000017500000000000012676531544020062 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/templates/json/changelist.tmpl0000644000175000017500000000017412676531525023102 0ustar mpmmpm00000000000000\{ "node": {node|json}, "changeset_count": {changesets|json}, "changesets": [{join(entries%changelistentry, ", ")}] } mercurial-3.7.3/mercurial/templates/json/map0000644000175000017500000001044512676531525020565 0ustar mpmmpm00000000000000mimetype = 'application/json' filerevision = '"not yet implemented"' search = '"not yet implemented"' # changelog and shortlog are the same web API but with different # number of entries. changelog = changelist.tmpl shortlog = changelist.tmpl changelistentry = '\{ "node": {node|json}, "date": {date|json}, "desc": {desc|json}, "bookmarks": [{join(bookmarks%changelistentryname, ", ")}], "tags": [{join(tags%changelistentryname, ", ")}], "user": {author|json} }' changelistentryname = '{name|json}' changeset = '\{ "node": {node|json}, "date": {date|json}, "desc": {desc|json}, "branch": {if(branch, branch%changesetbranch, "default"|json)}, "bookmarks": [{join(changesetbookmark, ", ")}], "tags": [{join(changesettag, ", ")}], "user": {author|json}, "parents": [{join(parent%changesetparent, ", ")}], "phase": {phase|json} }' changesetbranch = '{name|json}' changesetbookmark = '{bookmark|json}' changesettag = '{tag|json}' changesetparent = '{node|json}' manifest = '\{ "node": {node|json}, "abspath": {path|json}, "directories": [{join(dentries%direntry, ", ")}], "files": [{join(fentries%fileentry, ", ")}], "bookmarks": [{join(bookmarks%name, ", ")}], "tags": [{join(tags%name, ", ")}] }' name = '{name|json}' direntry = '\{ "abspath": {path|json}, "basename": {basename|json}, "emptydirs": {emptydirs|json} }' fileentry = '\{ "abspath": {file|json}, "basename": {basename|json}, "date": {date|json}, "size": {size|json}, "flags": {permissions|json} }' tags = '\{ "node": {node|json}, "tags": [{join(entriesnotip%tagentry, ", ")}] }' tagentry = '\{ "tag": {tag|json}, "node": {node|json}, "date": {date|json} }' bookmarks = '\{ "node": {node|json}, "bookmarks": [{join(entries%bookmarkentry, ", ")}] }' bookmarkentry = '\{ "bookmark": {bookmark|json}, "node": {node|json}, "date": {date|json} }' branches = '\{ "branches": [{join(entries%branchentry, ", ")}] }' branchentry = '\{ "branch": {branch|json}, "node": {node|json}, "date": {date|json}, "status": {status|json} }' summary = '"not yet implemented"' filediff = '\{ "path": {file|json}, "node": {node|json}, "date": {date|json}, "desc": {desc|json}, "author": {author|json}, "parents": [{join(parent%changesetparent, ", ")}], "children": [{join(child%changesetparent, ", ")}], "diff": [{join(diff%diffblock, ", ")}] }' diffblock = '\{ "blockno": {blockno|json}, "lines": [{join(lines, ", ")}] }' difflineplus = '\{ "t": "+", "n": {lineno|json}, "l": {line|json} }' difflineminus = '\{ "t": "-", "n": {lineno|json}, "l": {line|json} }' difflineat = '\{ "t": "@", "n": {lineno|json}, "l": {line|json} }' diffline = '\{ "t": "", "n": {lineno|json}, "l": {line|json} }' filecomparison = '\{ "path": {file|json}, "node": {node|json}, "date": {date|json}, "desc": {desc|json}, "author": {author|json}, "parents": [{join(parent%changesetparent, ", ")}], "children": [{join(child%changesetparent, ", ")}], "leftnode": {leftnode|json}, "rightnode": {rightnode|json}, "comparison": [{join(comparison, ", ")}] }' comparisonblock = '\{ "lines": [{join(lines, ", ")}] }' comparisonline = '\{ "t": {type|json}, "ln": {leftlineno|json}, "ll": {leftline|json}, "rn": {rightlineno|json}, "rl": {rightline|json} }' fileannotate = '\{ "abspath": {file|json}, "node": {node|json}, "author": {author|json}, "date": {date|json}, "desc": {desc|json}, "parents": [{join(parent%changesetparent, ", ")}], "children": [{join(child%changesetparent, ", ")}], "permissions": {permissions|json}, "annotate": [{join(annotate%fileannotation, ", ")}] }' fileannotation = '\{ "node": {node|json}, "author": {author|json}, "desc": {desc|json}, "abspath": {file|json}, "targetline": {targetline|json}, "line": {line|json}, "lineno": {lineno|json}, "revdate": {revdate|json} }' filelog = '"not yet implemented"' graph = '"not yet implemented"' helptopics = '\{ "topics": [{join(topics%helptopicentry, ", ")}], "earlycommands": [{join(earlycommands%helptopicentry, ", ")}], "othercommands": [{join(othercommands%helptopicentry, ", ")}] }' helptopicentry = '\{ "topic": {topic|json}, "summary": {summary|json} }' help = '\{ "topic": {topic|json}, "rawdoc": {doc|json} }' filenodelink = '' filenolink = '' mercurial-3.7.3/mercurial/templates/coal/0000755000175000017500000000000012676531544020027 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/templates/coal/header.tmpl0000644000175000017500000000104512676531525022154 0ustar mpmmpm00000000000000 mercurial-3.7.3/mercurial/templates/coal/map0000644000175000017500000000166612676531525020537 0ustar mpmmpm00000000000000%include paper/map footer = ../paper/footer.tmpl search = ../paper/search.tmpl changelog = ../paper/shortlog.tmpl shortlog = ../paper/shortlog.tmpl shortlogentry = ../paper/shortlogentry.tmpl graph = ../paper/graph.tmpl help = ../paper/help.tmpl helptopics = ../paper/helptopics.tmpl diffstatlink = ../paper/diffstat.tmpl diffstatnolink = ../paper/diffstat.tmpl changelogentry = ../paper/shortlogentry.tmpl searchentry = ../paper/shortlogentry.tmpl changeset = ../paper/changeset.tmpl manifest = ../paper/manifest.tmpl filerevision = ../paper/filerevision.tmpl fileannotate = ../paper/fileannotate.tmpl filediff = ../paper/filediff.tmpl filecomparison = ../paper/filecomparison.tmpl filelog = ../paper/filelog.tmpl filelogentry = ../paper/filelogentry.tmpl tags = ../paper/tags.tmpl bookmarks = ../paper/bookmarks.tmpl branches = ../paper/branches.tmpl index = ../paper/index.tmpl notfound = ../paper/notfound.tmpl error = ../paper/error.tmpl mercurial-3.7.3/mercurial/templates/static/0000755000175000017500000000000012676531544020400 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/templates/static/hgicon.png0000644000175000017500000000143012676531525022352 0ustar mpmmpm00000000000000‰PNG  IHDR(-S‰PLTEüüüþþþÿÿÿùùùùùùüüüûûûòòò÷÷÷ûûûùùùßßßãâãììì÷÷÷èèèöööïïïÜÜÜßßßéééÔÓÔîîîñññÙÙÙÍÌÍÝÝÝîîîËËËåååÉÉɺºº×××ååå´³´¿¿¿¬¬¬º¹º¾¾¾ÝÝÝÖÖÖÉÉɘ˜˜•••¢¡¢ÍÍ͸¸¸ÆÆÆ¯¯¯ÆÆÆ½½½¢¢¢Œšššªªª¯¯¯```±±±SRSooo¼¼¼ooo¸¸¸tttººº³³³kkk½½½ƒƒƒ‰‰‰••••••LKL´´´‰‰‰‘‘‘KKK«««œœœªªªwwwªªª;;;hhhgggyyy¬¬¬III[[[„„„888;;;???RRRXXX[[[iiiqqqrrrxxx{{{‡‡‡–––™™™ššš›››œœœŸŸŸ¡¡¡¨¨¨®®®¸¸¸¹¹¹ººº½½½ÁÁÁÂÂÂÄÄÄÅÅÅÊÊÊÌÌÌÑÑÑÓÓÓßßßêêêíííóóóÿÿÿ>ܲD[tRNS !')***.3;;@@LOSVWWZbmtŒ–˜ž¢¢£«­±´¹ºÃÅÅÈÌÌÕÖÛÛÝßäåççèëîïïõö÷÷÷øøúûüüýþþþ×>bKGD‚‹³ÿDÖIDAT5Á…Z€ѻ»°;°»;±»»ón8ìÂ|sÇç // Copyright 2006 Alexander Schremmer // // derived from code written by Scott James Remnant // Copyright 2005 Canonical Ltd. // // This software may be used and distributed according to the terms // of the GNU General Public License, incorporated herein by reference. var colors = [ [ 1.0, 0.0, 0.0 ], [ 1.0, 1.0, 0.0 ], [ 0.0, 1.0, 0.0 ], [ 0.0, 1.0, 1.0 ], [ 0.0, 0.0, 1.0 ], [ 1.0, 0.0, 1.0 ] ]; function Graph() { this.canvas = document.getElementById('graph'); if (window.G_vmlCanvasManager) this.canvas = window.G_vmlCanvasManager.initElement(this.canvas); this.ctx = this.canvas.getContext('2d'); this.ctx.strokeStyle = 'rgb(0, 0, 0)'; this.ctx.fillStyle = 'rgb(0, 0, 0)'; this.cur = [0, 0]; this.line_width = 3; this.bg = [0, 4]; this.cell = [2, 0]; this.columns = 0; this.revlink = ''; this.reset = function() { this.bg = [0, 4]; this.cell = [2, 0]; this.columns = 0; document.getElementById('nodebgs').innerHTML = ''; document.getElementById('graphnodes').innerHTML = ''; } this.scale = function(height) { this.bg_height = height; this.box_size = Math.floor(this.bg_height / 1.2); this.cell_height = this.box_size; } this.setColor = function(color, bg, fg) { // Set the colour. // // If color is a string, expect an hexadecimal RGB // value and apply it unchanged. If color is a number, // pick a distinct colour based on an internal wheel; // the bg parameter provides the value that should be // assigned to the 'zero' colours and the fg parameter // provides the multiplier that should be applied to // the foreground colours. var s; if(typeof color == "string") { s = "#" + color; } else { //typeof color == "number" color %= colors.length; var red = (colors[color][0] * fg) || bg; var green = (colors[color][1] * fg) || bg; var blue = (colors[color][2] * fg) || bg; red = Math.round(red * 255); green = Math.round(green * 255); blue = Math.round(blue * 255); s = 'rgb(' + red + ', ' + green + ', ' + blue + ')'; } this.ctx.strokeStyle = s; this.ctx.fillStyle = s; return s; } this.edge = function(x0, y0, x1, y1, color, width) { this.setColor(color, 0.0, 0.65); if(width >= 0) this.ctx.lineWidth = width; this.ctx.beginPath(); this.ctx.moveTo(x0, y0); this.ctx.lineTo(x1, y1); this.ctx.stroke(); } this.render = function(data) { var backgrounds = ''; var nodedata = ''; for (var i in data) { var parity = i % 2; this.cell[1] += this.bg_height; this.bg[1] += this.bg_height; var cur = data[i]; var node = cur[1]; var edges = cur[2]; var fold = false; var prevWidth = this.ctx.lineWidth; for (var j in edges) { line = edges[j]; start = line[0]; end = line[1]; color = line[2]; var width = line[3]; if(width < 0) width = prevWidth; var branchcolor = line[4]; if(branchcolor) color = branchcolor; if (end > this.columns || start > this.columns) { this.columns += 1; } if (start == this.columns && start > end) { var fold = true; } x0 = this.cell[0] + this.box_size * start + this.box_size / 2; y0 = this.bg[1] - this.bg_height / 2; x1 = this.cell[0] + this.box_size * end + this.box_size / 2; y1 = this.bg[1] + this.bg_height / 2; this.edge(x0, y0, x1, y1, color, width); } this.ctx.lineWidth = prevWidth; // Draw the revision node in the right column column = node[0] color = node[1] radius = this.box_size / 8; x = this.cell[0] + this.box_size * column + this.box_size / 2; y = this.bg[1] - this.bg_height / 2; var add = this.vertex(x, y, color, parity, cur); backgrounds += add[0]; nodedata += add[1]; if (fold) this.columns -= 1; } document.getElementById('nodebgs').innerHTML += backgrounds; document.getElementById('graphnodes').innerHTML += nodedata; } } function process_dates(parentSelector){ // derived from code from mercurial/templatefilter.py var scales = { 'year': 365 * 24 * 60 * 60, 'month': 30 * 24 * 60 * 60, 'week': 7 * 24 * 60 * 60, 'day': 24 * 60 * 60, 'hour': 60 * 60, 'minute': 60, 'second': 1 }; function format(count, string){ var ret = count + ' ' + string; if (count > 1){ ret = ret + 's'; } return ret; } function shortdate(date){ var ret = date.getFullYear() + '-'; // getMonth() gives a 0-11 result var month = date.getMonth() + 1; if (month <= 9){ ret += '0' + month; } else { ret += month; } ret += '-'; var day = date.getDate(); if (day <= 9){ ret += '0' + day; } else { ret += day; } return ret; } function age(datestr){ var now = new Date(); var once = new Date(datestr); if (isNaN(once.getTime())){ // parsing error return datestr; } var delta = Math.floor((now.getTime() - once.getTime()) / 1000); var future = false; if (delta < 0){ future = true; delta = -delta; if (delta > (30 * scales.year)){ return "in the distant future"; } } if (delta > (2 * scales.year)){ return shortdate(once); } for (unit in scales){ var s = scales[unit]; var n = Math.floor(delta / s); if ((n >= 2) || (s == 1)){ if (future){ return format(n, unit) + ' from now'; } else { return format(n, unit) + ' ago'; } } } } var nodes = document.querySelectorAll((parentSelector || '') + ' .age'); var dateclass = new RegExp('\\bdate\\b'); for (var i=0; i<\/canvas>$/m); var addWidth = sizes[1]; var addHeight = sizes[2]; addWidth = parseInt(addWidth); addHeight = parseInt(addHeight); graph.canvas.width = addWidth; graph.canvas.height = addHeight; var dataStr = htmlText.match(/^\s*var data = (.*);$/m)[1]; var data = JSON.parse(dataStr); if (data.length < nextPageVar) { nextPageVar = undefined; } graph.reset(); graph.render(data); } else { var doc = docFromHTML(htmlText); var nodes = doc.querySelector(containerSelector).children; var curClass = 'c' + Date.now(); while (nodes.length) { var node = nodes[0]; node = document.adoptNode(node); node.classList.add(curClass); container.appendChild(node); } process_dates('.' + curClass); } nextPageVar = nextPageVarGet(htmlText, nextPageVar); }, function onerror(errorText) { var message = { 'class': 'scroll-loading-error', text: 'Error: ' + errorText }; appendFormatHTML(container, messageFormat, message); }, function oncomplete() { removeByClassName('scroll-loading'); updateInitiated = false; scrollHandler(); } ); } } window.addEventListener('scroll', scrollHandler); window.addEventListener('resize', scrollHandler); scrollHandler(); } mercurial-3.7.3/mercurial/templates/static/style.css0000644000175000017500000000450312676531525022253 0ustar mpmmpm00000000000000a { text-decoration:none; } .age { white-space:nowrap; } .date { white-space:nowrap; } .indexlinks { white-space:nowrap; } .parity0 { background-color: #ddd; color: #000; } .parity1 { background-color: #eee; color: #000; } .lineno { width: 60px; color: #aaa; font-size: smaller; text-align: right; } .plusline { color: green; } .minusline { color: red; } .atline { color: purple; } .annotate { font-size: smaller; text-align: right; padding-right: 1em; } .buttons a { background-color: #666; padding: 2pt; color: white; font-family: sans-serif; font-weight: bold; } .navigate a { background-color: #ccc; padding: 2pt; font-family: sans-serif; color: black; } .metatag { background-color: #888; color: white; text-align: right; } /* Common */ pre { margin: 0; } .logo { float: right; clear: right; } /* Changelog/Filelog entries */ .logEntry { width: 100%; } .logEntry .age { width: 15%; } .logEntry th.label { width: 16em; } .logEntry th { font-weight: normal; text-align: right; vertical-align: top; } .logEntry th.age, .logEntry th.firstline { font-weight: bold; } .logEntry th.firstline { text-align: left; width: inherit; } /* Shortlog entries */ .slogEntry { width: 100%; } .slogEntry .age { width: 8em; } .slogEntry td { font-weight: normal; text-align: left; vertical-align: top; } .slogEntry td.author { width: 15em; } /* Tag entries */ #tagEntries { list-style: none; margin: 0; padding: 0; } #tagEntries .tagEntry { list-style: none; margin: 0; padding: 0; } /* Changeset entry */ #changesetEntry { } #changesetEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } #changesetEntry th.files, #changesetEntry th.description { vertical-align: top; } /* File diff view */ #filediffEntry { } #filediffEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } /* Graph */ div#wrapper { position: relative; margin: 0; padding: 0; } canvas { position: absolute; z-index: 5; top: -0.6em; margin: 0; } ul#nodebgs { list-style: none inside none; padding: 0; margin: 0; top: -0.7em; } ul#graphnodes li, ul#nodebgs li { height: 39px; } ul#graphnodes { position: absolute; z-index: 10; top: -0.85em; list-style: none inside none; padding: 0; } ul#graphnodes li .info { display: block; font-size: 70%; position: relative; top: -1px; } mercurial-3.7.3/mercurial/templates/static/coal-folder.png0000644000175000017500000000043412676531525023275 0ustar mpmmpm00000000000000‰PNG  IHDRH-ÑsRGB®ÎébKGDÿÿÿ ½§“ pHYs  šœtIMEص>‘ËtEXtCommentCreated with GIMPWwIDAT(Ïcøÿÿ?#=£‹‹ËaeeeSt‰Û·oŸÜ·oŸ=.,æææ6UUUÇŽ³ûþýûlšž :nth-child(4n+1), .stripes2 > :nth-child(2n+1) { background-color: #f0f0f0; } .parity1, .stripes4 > :nth-child(4n+3), .stripes2 > :nth-child(2n+2) { background-color: white; } .plusline { color: green; } .minusline { color: #dc143c; } /* crimson */ .atline { color: purple; } .diffstat-table { margin-top: 1em; } .diffstat-file { white-space: nowrap; font-size: 90%; } .diffstat-total { white-space: nowrap; font-size: 90%; } .diffstat-graph { width: 100%; } .diffstat-add { background-color: green; float: left; } .diffstat-remove { background-color: red; float: left; } .navigate { text-align: right; font-size: 60%; margin: 1em 0; } .tag { color: #999; font-size: 70%; font-weight: normal; margin-left: .5em; vertical-align: baseline; } .branchhead { color: #000; font-size: 80%; font-weight: normal; margin-left: .5em; vertical-align: baseline; } ul#graphnodes .branchhead { font-size: 75%; } .branchname { color: #000; font-size: 60%; font-weight: normal; margin-left: .5em; vertical-align: baseline; } h3 .branchname { font-size: 80%; } /* Common */ pre { margin: 0; } h2 { font-size: 120%; border-bottom: 1px solid #999; } h2 a { color: #000; } h3 { margin-top: +.7em; font-size: 100%; } /* log and tags tables */ .bigtable { border-bottom: 1px solid #999; border-collapse: collapse; font-size: 90%; width: 100%; font-weight: normal; text-align: left; } .bigtable td { vertical-align: top; } .bigtable th { padding: 1px 4px; border-bottom: 1px solid #999; } .bigtable tr { border: none; } .bigtable .age { width: 7em; } .bigtable .author { width: 15em; } .bigtable .description { } .bigtable .description .base { font-size: 70%; float: right; line-height: 1.66; } .bigtable .node { width: 5em; font-family: monospace;} .bigtable .permissions { width: 8em; text-align: left;} .bigtable .size { width: 5em; text-align: right; } .bigtable .annotate { text-align: right; } .bigtable td.annotate { font-size: smaller; } .bigtable td.source { font-size: inherit; } .source, .sourcefirst { font-family: monospace; white-space: pre; padding: 1px 4px; font-size: 90%; } .sourcefirst { border-bottom: 1px solid #999; font-weight: bold; } .source a { color: #999; font-size: smaller; font-family: monospace;} .bottomline { border-bottom: 1px solid #999; } .sourcelines { font-size: 90%; position: relative; counter-reset: lineno; } .wrap > span { white-space: pre-wrap; } .linewraptoggle { float: right; } .diffblocks { counter-reset: lineno; } .diffblocks > div { counter-increment: lineno; } .sourcelines > span { display: inline-block; box-sizing: border-box; width: 100%; padding: 1px 0px 1px 5em; counter-increment: lineno; } .sourcelines > span:before { -moz-user-select: -moz-none; -khtml-user-select: none; -webkit-user-select: none; -ms-user-select: none; user-select: none; display: inline-block; margin-left: -5em; width: 4em; font-size: smaller; color: #999; text-align: right; content: counters(lineno, "."); float: left; } .sourcelines > span:target, tr:target td { background-color: #bfdfff; } .sourcelines > a { display: inline-block; position: absolute; left: 0px; width: 4em; height: 1em; } .fileline { font-family: monospace; } .fileline img { border: 0; } .tagEntry .closed { color: #99f; } /* Changeset entry */ #changesetEntry { border-collapse: collapse; font-size: 90%; width: 100%; margin-bottom: 1em; } #changesetEntry th { padding: 1px 4px; width: 4em; text-align: right; font-weight: normal; color: #999; margin-right: .5em; vertical-align: top; } div.description { border-left: 2px solid #999; margin: 1em 0 1em 0; padding: .3em; white-space: pre; font-family: monospace; } /* Graph */ div#wrapper { position: relative; border-top: 1px solid black; border-bottom: 1px solid black; margin: 0; padding: 0; } canvas { position: absolute; z-index: 5; top: -0.7em; margin: 0; } ul#graphnodes { position: absolute; z-index: 10; top: -1.0em; list-style: none inside none; padding: 0; } ul#nodebgs { list-style: none inside none; padding: 0; margin: 0; top: -0.7em; } ul#graphnodes li, ul#nodebgs li { height: 39px; } ul#graphnodes li .info { display: block; font-size: 70%; position: relative; top: -3px; } /* Comparison */ .legend { padding: 1.5% 0 1.5% 0; } .legendinfo { border: 1px solid #999; font-size: 80%; text-align: center; padding: 0.5%; } .equal { background-color: #ffffff; } .delete { background-color: #faa; color: #333; } .insert { background-color: #ffa; } .replace { background-color: #e8e8e8; } .header { text-align: center; } .block { border-top: 1px solid #999; } .breadcrumb { color: gray; } .breadcrumb a { color: blue; } .scroll-loading { -webkit-animation: change_color 1s linear 0s infinite alternate; -moz-animation: change_color 1s linear 0s infinite alternate; -o-animation: change_color 1s linear 0s infinite alternate; animation: change_color 1s linear 0s infinite alternate; } @-webkit-keyframes change_color { from { background-color: #A0CEFF; } to { } } @-moz-keyframes change_color { from { background-color: #A0CEFF; } to { } } @-o-keyframes change_color { from { background-color: #A0CEFF; } to { } } @keyframes change_color { from { background-color: #A0CEFF; } to { } } .scroll-loading-error { background-color: #FFCCCC !important; } mercurial-3.7.3/mercurial/templates/static/style-gitweb.css0000644000175000017500000001457112676531525023540 0ustar mpmmpm00000000000000body { font-family: sans-serif; font-size: 12px; border:solid #d9d8d1; border-width:1px; margin:10px; background: white; color: black; } a { color:#0000cc; } a:hover, a:visited, a:active { color:#880000; } div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; } div.page_header a:visited { color:#0000cc; } div.page_header a:hover { color:#880000; } div.page_nav { padding:8px; } div.page_nav a:visited { color:#0000cc; } div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px} div.page_footer { padding:4px 8px; background-color: #d9d8d1; } div.page_footer_text { float:left; color:#555555; font-style:italic; } div.page_body { padding:8px; } div.title, a.title { display:block; padding:6px 8px; font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000; } a.title:hover { background-color: #d9d8d1; } div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; } div.log_body { padding:8px 8px 8px 150px; } .age { white-space:nowrap; } span.age { position:relative; float:left; width:142px; font-style:italic; } div.log_link { padding:0px 8px; font-size:10px; font-family:sans-serif; font-style:normal; position:relative; float:left; width:136px; } div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; } a.list { text-decoration:none; color:#000000; } a.list:hover { text-decoration:underline; color:#880000; } table { padding:8px 4px; } th { padding:2px 5px; font-size:12px; text-align:left; } tr.dark, .parity1, pre.sourcelines.stripes > :nth-child(4n+4) { background-color:#f6f6f0; } tr.light:hover, .parity0:hover, tr.dark:hover, .parity1:hover, pre.sourcelines.stripes > :nth-child(4n+2):hover, pre.sourcelines.stripes > :nth-child(4n+4):hover, pre.sourcelines.stripes > :nth-child(4n+1):hover + :nth-child(4n+2), pre.sourcelines.stripes > :nth-child(4n+3):hover + :nth-child(4n+4) { background-color:#edece6; } td { padding:2px 5px; font-size:12px; vertical-align:top; } td.closed { background-color: #99f; } td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; } td.indexlinks { white-space: nowrap; } td.indexlinks a { padding: 2px 5px; line-height: 10px; border: 1px solid; color: #ffffff; background-color: #7777bb; border-color: #aaaadd #333366 #333366 #aaaadd; font-weight: bold; text-align: center; text-decoration: none; font-size: 10px; } td.indexlinks a:hover { background-color: #6666aa; } div.pre { font-family:monospace; font-size:12px; white-space:pre; } div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; } div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; } div.search { margin:4px 8px; position:absolute; top:56px; right:12px } .linenr { color:#999999; text-decoration:none } div.rss_logo { float: right; white-space: nowrap; } div.rss_logo a { padding:3px 6px; line-height:10px; border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e; color:#ffffff; background-color:#ff6600; font-weight:bold; font-family:sans-serif; font-size:10px; text-align:center; text-decoration:none; } div.rss_logo a:hover { background-color:#ee5500; } pre { margin: 0; } span.logtags span { padding: 0px 4px; font-size: 10px; font-weight: normal; border: 1px solid; background-color: #ffaaff; border-color: #ffccff #ff00ee #ff00ee #ffccff; } span.logtags span.tagtag { background-color: #ffffaa; border-color: #ffffcc #ffee00 #ffee00 #ffffcc; } span.logtags span.branchtag { background-color: #aaffaa; border-color: #ccffcc #00cc33 #00cc33 #ccffcc; } span.logtags span.inbranchtag { background-color: #d5dde6; border-color: #e3ecf4 #9398f4 #9398f4 #e3ecf4; } span.logtags span.bookmarktag { background-color: #afdffa; border-color: #ccecff #46ace6 #46ace6 #ccecff; } span.difflineplus { color:#008800; } span.difflineminus { color:#cc0000; } span.difflineat { color:#990099; } div.diffblocks { counter-reset: lineno; } div.diffblock { counter-increment: lineno; } pre.sourcelines { position: relative; counter-reset: lineno; } pre.sourcelines > span { display: inline-block; box-sizing: border-box; width: 100%; padding: 0 0 0 5em; counter-increment: lineno; vertical-align: top; } pre.sourcelines > span:before { -moz-user-select: -moz-none; -khtml-user-select: none; -webkit-user-select: none; -ms-user-select: none; user-select: none; display: inline-block; margin-left: -5em; width: 4em; color: #999; text-align: right; content: counters(lineno,"."); float: left; } pre.sourcelines > a { display: inline-block; position: absolute; left: 0px; width: 4em; height: 1em; } tr:target td, pre.sourcelines > span:target, pre.sourcelines.stripes > span:target { background-color: #bfdfff; } /* Graph */ div#wrapper { position: relative; margin: 0; padding: 0; margin-top: 3px; } canvas { position: absolute; z-index: 5; top: -0.9em; margin: 0; } ul#nodebgs { list-style: none inside none; padding: 0; margin: 0; top: -0.7em; } ul#graphnodes li, ul#nodebgs li { height: 39px; } ul#graphnodes { position: absolute; z-index: 10; top: -0.8em; list-style: none inside none; padding: 0; } ul#graphnodes li .info { display: block; font-size: 100%; position: relative; top: -3px; font-style: italic; } /* Comparison */ .legend { padding: 1.5% 0 1.5% 0; } .legendinfo { border: 1px solid #d9d8d1; font-size: 80%; text-align: center; padding: 0.5%; } .equal { background-color: #ffffff; } .delete { background-color: #faa; color: #333; } .insert { background-color: #ffa; } .replace { background-color: #e8e8e8; } .comparison { overflow-x: auto; } .header th { text-align: center; } .block { border-top: 1px solid #d9d8d1; } .scroll-loading { -webkit-animation: change_color 1s linear 0s infinite alternate; -moz-animation: change_color 1s linear 0s infinite alternate; -o-animation: change_color 1s linear 0s infinite alternate; animation: change_color 1s linear 0s infinite alternate; } @-webkit-keyframes change_color { from { background-color: #A0CEFF; } to { } } @-moz-keyframes change_color { from { background-color: #A0CEFF; } to { } } @-o-keyframes change_color { from { background-color: #A0CEFF; } to { } } @keyframes change_color { from { background-color: #A0CEFF; } to { } } .scroll-loading-error { background-color: #FFCCCC !important; } #doc { margin: 0 8px; } mercurial-3.7.3/mercurial/templates/static/excanvas.js0000644000175000017500000006464512676531525022564 0ustar mpmmpm00000000000000// Copyright 2006 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Known Issues: // // * Patterns are not implemented. // * Radial gradient are not implemented. The VML version of these look very // different from the canvas one. // * Clipping paths are not implemented. // * Coordsize. The width and height attribute have higher priority than the // width and height style values which isn't correct. // * Painting mode isn't implemented. // * Canvas width/height should is using content-box by default. IE in // Quirks mode will draw the canvas using border-box. Either change your // doctype to HTML5 // (http://www.whatwg.org/specs/web-apps/current-work/#the-doctype) // or use Box Sizing Behavior from WebFX // (http://webfx.eae.net/dhtml/boxsizing/boxsizing.html) // * Non uniform scaling does not correctly scale strokes. // * Optimize. There is always room for speed improvements. // Only add this code if we do not already have a canvas implementation if (!document.createElement('canvas').getContext) { (function() { // alias some functions to make (compiled) code shorter var m = Math; var mr = m.round; var ms = m.sin; var mc = m.cos; var abs = m.abs; var sqrt = m.sqrt; // this is used for sub pixel precision var Z = 10; var Z2 = Z / 2; /** * This funtion is assigned to the elements as element.getContext(). * @this {HTMLElement} * @return {CanvasRenderingContext2D_} */ function getContext() { return this.context_ || (this.context_ = new CanvasRenderingContext2D_(this)); } var slice = Array.prototype.slice; /** * Binds a function to an object. The returned function will always use the * passed in {@code obj} as {@code this}. * * Example: * * g = bind(f, obj, a, b) * g(c, d) // will do f.call(obj, a, b, c, d) * * @param {Function} f The function to bind the object to * @param {Object} obj The object that should act as this when the function * is called * @param {*} var_args Rest arguments that will be used as the initial * arguments when the function is called * @return {Function} A new function that has bound this */ function bind(f, obj, var_args) { var a = slice.call(arguments, 2); return function() { return f.apply(obj, a.concat(slice.call(arguments))); }; } var G_vmlCanvasManager_ = { init: function(opt_doc) { if (/MSIE/.test(navigator.userAgent) && !window.opera) { var doc = opt_doc || document; // Create a dummy element so that IE will allow canvas elements to be // recognized. doc.createElement('canvas'); doc.attachEvent('onreadystatechange', bind(this.init_, this, doc)); } }, init_: function(doc) { // create xmlns if (!doc.namespaces['g_vml_']) { doc.namespaces.add('g_vml_', 'urn:schemas-microsoft-com:vml', '#default#VML'); } if (!doc.namespaces['g_o_']) { doc.namespaces.add('g_o_', 'urn:schemas-microsoft-com:office:office', '#default#VML'); } // Setup default CSS. Only add one style sheet per document if (!doc.styleSheets['ex_canvas_']) { var ss = doc.createStyleSheet(); ss.owningElement.id = 'ex_canvas_'; ss.cssText = 'canvas{display:inline-block;overflow:hidden;' + // default size is 300x150 in Gecko and Opera 'text-align:left;width:300px;height:150px}' + 'g_vml_\\:*{behavior:url(#default#VML)}' + 'g_o_\\:*{behavior:url(#default#VML)}'; } // find all canvas elements var els = doc.getElementsByTagName('canvas'); for (var i = 0; i < els.length; i++) { this.initElement(els[i]); } }, /** * Public initializes a canvas element so that it can be used as canvas * element from now on. This is called automatically before the page is * loaded but if you are creating elements using createElement you need to * make sure this is called on the element. * @param {HTMLElement} el The canvas element to initialize. * @return {HTMLElement} the element that was created. */ initElement: function(el) { if (!el.getContext) { el.getContext = getContext; // Remove fallback content. There is no way to hide text nodes so we // just remove all childNodes. We could hide all elements and remove // text nodes but who really cares about the fallback content. el.innerHTML = ''; // do not use inline function because that will leak memory el.attachEvent('onpropertychange', onPropertyChange); el.attachEvent('onresize', onResize); var attrs = el.attributes; if (attrs.width && attrs.width.specified) { // TODO: use runtimeStyle and coordsize // el.getContext().setWidth_(attrs.width.nodeValue); el.style.width = attrs.width.nodeValue + 'px'; } else { el.width = el.clientWidth; } if (attrs.height && attrs.height.specified) { // TODO: use runtimeStyle and coordsize // el.getContext().setHeight_(attrs.height.nodeValue); el.style.height = attrs.height.nodeValue + 'px'; } else { el.height = el.clientHeight; } //el.getContext().setCoordsize_() } return el; } }; function onPropertyChange(e) { var el = e.srcElement; switch (e.propertyName) { case 'width': el.style.width = el.attributes.width.nodeValue + 'px'; el.getContext().clearRect(); break; case 'height': el.style.height = el.attributes.height.nodeValue + 'px'; el.getContext().clearRect(); break; } } function onResize(e) { var el = e.srcElement; if (el.firstChild) { el.firstChild.style.width = el.clientWidth + 'px'; el.firstChild.style.height = el.clientHeight + 'px'; } } G_vmlCanvasManager_.init(); // precompute "00" to "FF" var dec2hex = []; for (var i = 0; i < 16; i++) { for (var j = 0; j < 16; j++) { dec2hex[i * 16 + j] = i.toString(16) + j.toString(16); } } function createMatrixIdentity() { return [ [1, 0, 0], [0, 1, 0], [0, 0, 1] ]; } function matrixMultiply(m1, m2) { var result = createMatrixIdentity(); for (var x = 0; x < 3; x++) { for (var y = 0; y < 3; y++) { var sum = 0; for (var z = 0; z < 3; z++) { sum += m1[x][z] * m2[z][y]; } result[x][y] = sum; } } return result; } function copyState(o1, o2) { o2.fillStyle = o1.fillStyle; o2.lineCap = o1.lineCap; o2.lineJoin = o1.lineJoin; o2.lineWidth = o1.lineWidth; o2.miterLimit = o1.miterLimit; o2.shadowBlur = o1.shadowBlur; o2.shadowColor = o1.shadowColor; o2.shadowOffsetX = o1.shadowOffsetX; o2.shadowOffsetY = o1.shadowOffsetY; o2.strokeStyle = o1.strokeStyle; o2.globalAlpha = o1.globalAlpha; o2.arcScaleX_ = o1.arcScaleX_; o2.arcScaleY_ = o1.arcScaleY_; o2.lineScale_ = o1.lineScale_; } function processStyle(styleString) { var str, alpha = 1; styleString = String(styleString); if (styleString.substring(0, 3) == 'rgb') { var start = styleString.indexOf('(', 3); var end = styleString.indexOf(')', start + 1); var guts = styleString.substring(start + 1, end).split(','); str = '#'; for (var i = 0; i < 3; i++) { str += dec2hex[Number(guts[i])]; } if (guts.length == 4 && styleString.substr(3, 1) == 'a') { alpha = guts[3]; } } else { str = styleString; } return {color: str, alpha: alpha}; } function processLineCap(lineCap) { switch (lineCap) { case 'butt': return 'flat'; case 'round': return 'round'; case 'square': default: return 'square'; } } /** * This class implements CanvasRenderingContext2D interface as described by * the WHATWG. * @param {HTMLElement} surfaceElement The element that the 2D context should * be associated with */ function CanvasRenderingContext2D_(surfaceElement) { this.m_ = createMatrixIdentity(); this.mStack_ = []; this.aStack_ = []; this.currentPath_ = []; // Canvas context properties this.strokeStyle = '#000'; this.fillStyle = '#000'; this.lineWidth = 1; this.lineJoin = 'miter'; this.lineCap = 'butt'; this.miterLimit = Z * 1; this.globalAlpha = 1; this.canvas = surfaceElement; var el = surfaceElement.ownerDocument.createElement('div'); el.style.width = surfaceElement.clientWidth + 'px'; el.style.height = surfaceElement.clientHeight + 'px'; el.style.overflow = 'hidden'; el.style.position = 'absolute'; surfaceElement.appendChild(el); this.element_ = el; this.arcScaleX_ = 1; this.arcScaleY_ = 1; this.lineScale_ = 1; } var contextPrototype = CanvasRenderingContext2D_.prototype; contextPrototype.clearRect = function() { this.element_.innerHTML = ''; }; contextPrototype.beginPath = function() { // TODO: Branch current matrix so that save/restore has no effect // as per safari docs. this.currentPath_ = []; }; contextPrototype.moveTo = function(aX, aY) { var p = this.getCoords_(aX, aY); this.currentPath_.push({type: 'moveTo', x: p.x, y: p.y}); this.currentX_ = p.x; this.currentY_ = p.y; }; contextPrototype.lineTo = function(aX, aY) { var p = this.getCoords_(aX, aY); this.currentPath_.push({type: 'lineTo', x: p.x, y: p.y}); this.currentX_ = p.x; this.currentY_ = p.y; }; contextPrototype.bezierCurveTo = function(aCP1x, aCP1y, aCP2x, aCP2y, aX, aY) { var p = this.getCoords_(aX, aY); var cp1 = this.getCoords_(aCP1x, aCP1y); var cp2 = this.getCoords_(aCP2x, aCP2y); bezierCurveTo(this, cp1, cp2, p); }; // Helper function that takes the already fixed cordinates. function bezierCurveTo(self, cp1, cp2, p) { self.currentPath_.push({ type: 'bezierCurveTo', cp1x: cp1.x, cp1y: cp1.y, cp2x: cp2.x, cp2y: cp2.y, x: p.x, y: p.y }); self.currentX_ = p.x; self.currentY_ = p.y; } contextPrototype.quadraticCurveTo = function(aCPx, aCPy, aX, aY) { // the following is lifted almost directly from // http://developer.mozilla.org/en/docs/Canvas_tutorial:Drawing_shapes var cp = this.getCoords_(aCPx, aCPy); var p = this.getCoords_(aX, aY); var cp1 = { x: this.currentX_ + 2.0 / 3.0 * (cp.x - this.currentX_), y: this.currentY_ + 2.0 / 3.0 * (cp.y - this.currentY_) }; var cp2 = { x: cp1.x + (p.x - this.currentX_) / 3.0, y: cp1.y + (p.y - this.currentY_) / 3.0 }; bezierCurveTo(this, cp1, cp2, p); }; contextPrototype.arc = function(aX, aY, aRadius, aStartAngle, aEndAngle, aClockwise) { aRadius *= Z; var arcType = aClockwise ? 'at' : 'wa'; var xStart = aX + mc(aStartAngle) * aRadius - Z2; var yStart = aY + ms(aStartAngle) * aRadius - Z2; var xEnd = aX + mc(aEndAngle) * aRadius - Z2; var yEnd = aY + ms(aEndAngle) * aRadius - Z2; // IE won't render arches drawn counter clockwise if xStart == xEnd. if (xStart == xEnd && !aClockwise) { xStart += 0.125; // Offset xStart by 1/80 of a pixel. Use something // that can be represented in binary } var p = this.getCoords_(aX, aY); var pStart = this.getCoords_(xStart, yStart); var pEnd = this.getCoords_(xEnd, yEnd); this.currentPath_.push({type: arcType, x: p.x, y: p.y, radius: aRadius, xStart: pStart.x, yStart: pStart.y, xEnd: pEnd.x, yEnd: pEnd.y}); }; contextPrototype.rect = function(aX, aY, aWidth, aHeight) { this.moveTo(aX, aY); this.lineTo(aX + aWidth, aY); this.lineTo(aX + aWidth, aY + aHeight); this.lineTo(aX, aY + aHeight); this.closePath(); }; contextPrototype.strokeRect = function(aX, aY, aWidth, aHeight) { var oldPath = this.currentPath_; this.beginPath(); this.moveTo(aX, aY); this.lineTo(aX + aWidth, aY); this.lineTo(aX + aWidth, aY + aHeight); this.lineTo(aX, aY + aHeight); this.closePath(); this.stroke(); this.currentPath_ = oldPath; }; contextPrototype.fillRect = function(aX, aY, aWidth, aHeight) { var oldPath = this.currentPath_; this.beginPath(); this.moveTo(aX, aY); this.lineTo(aX + aWidth, aY); this.lineTo(aX + aWidth, aY + aHeight); this.lineTo(aX, aY + aHeight); this.closePath(); this.fill(); this.currentPath_ = oldPath; }; contextPrototype.createLinearGradient = function(aX0, aY0, aX1, aY1) { var gradient = new CanvasGradient_('gradient'); gradient.x0_ = aX0; gradient.y0_ = aY0; gradient.x1_ = aX1; gradient.y1_ = aY1; return gradient; }; contextPrototype.createRadialGradient = function(aX0, aY0, aR0, aX1, aY1, aR1) { var gradient = new CanvasGradient_('gradientradial'); gradient.x0_ = aX0; gradient.y0_ = aY0; gradient.r0_ = aR0; gradient.x1_ = aX1; gradient.y1_ = aY1; gradient.r1_ = aR1; return gradient; }; contextPrototype.drawImage = function(image, var_args) { var dx, dy, dw, dh, sx, sy, sw, sh; // to find the original width we overide the width and height var oldRuntimeWidth = image.runtimeStyle.width; var oldRuntimeHeight = image.runtimeStyle.height; image.runtimeStyle.width = 'auto'; image.runtimeStyle.height = 'auto'; // get the original size var w = image.width; var h = image.height; // and remove overides image.runtimeStyle.width = oldRuntimeWidth; image.runtimeStyle.height = oldRuntimeHeight; if (arguments.length == 3) { dx = arguments[1]; dy = arguments[2]; sx = sy = 0; sw = dw = w; sh = dh = h; } else if (arguments.length == 5) { dx = arguments[1]; dy = arguments[2]; dw = arguments[3]; dh = arguments[4]; sx = sy = 0; sw = w; sh = h; } else if (arguments.length == 9) { sx = arguments[1]; sy = arguments[2]; sw = arguments[3]; sh = arguments[4]; dx = arguments[5]; dy = arguments[6]; dw = arguments[7]; dh = arguments[8]; } else { throw Error('Invalid number of arguments'); } var d = this.getCoords_(dx, dy); var w2 = sw / 2; var h2 = sh / 2; var vmlStr = []; var W = 10; var H = 10; // For some reason that I've now forgotten, using divs didn't work vmlStr.push(' ' , '', ''); this.element_.insertAdjacentHTML('BeforeEnd', vmlStr.join('')); }; contextPrototype.stroke = function(aFill) { var lineStr = []; var lineOpen = false; var a = processStyle(aFill ? this.fillStyle : this.strokeStyle); var color = a.color; var opacity = a.alpha * this.globalAlpha; var W = 10; var H = 10; lineStr.push(''); if (!aFill) { var lineWidth = this.lineScale_ * this.lineWidth; // VML cannot correctly render a line if the width is less than 1px. // In that case, we dilute the color to make the line look thinner. if (lineWidth < 1) { opacity *= lineWidth; } lineStr.push( '' ); } else if (typeof this.fillStyle == 'object') { var fillStyle = this.fillStyle; var angle = 0; var focus = {x: 0, y: 0}; // additional offset var shift = 0; // scale factor for offset var expansion = 1; if (fillStyle.type_ == 'gradient') { var x0 = fillStyle.x0_ / this.arcScaleX_; var y0 = fillStyle.y0_ / this.arcScaleY_; var x1 = fillStyle.x1_ / this.arcScaleX_; var y1 = fillStyle.y1_ / this.arcScaleY_; var p0 = this.getCoords_(x0, y0); var p1 = this.getCoords_(x1, y1); var dx = p1.x - p0.x; var dy = p1.y - p0.y; angle = Math.atan2(dx, dy) * 180 / Math.PI; // The angle should be a non-negative number. if (angle < 0) { angle += 360; } // Very small angles produce an unexpected result because they are // converted to a scientific notation string. if (angle < 1e-6) { angle = 0; } } else { var p0 = this.getCoords_(fillStyle.x0_, fillStyle.y0_); var width = max.x - min.x; var height = max.y - min.y; focus = { x: (p0.x - min.x) / width, y: (p0.y - min.y) / height }; width /= this.arcScaleX_ * Z; height /= this.arcScaleY_ * Z; var dimension = m.max(width, height); shift = 2 * fillStyle.r0_ / dimension; expansion = 2 * fillStyle.r1_ / dimension - shift; } // We need to sort the color stops in ascending order by offset, // otherwise IE won't interpret it correctly. var stops = fillStyle.colors_; stops.sort(function(cs1, cs2) { return cs1.offset - cs2.offset; }); var length = stops.length; var color1 = stops[0].color; var color2 = stops[length - 1].color; var opacity1 = stops[0].alpha * this.globalAlpha; var opacity2 = stops[length - 1].alpha * this.globalAlpha; var colors = []; for (var i = 0; i < length; i++) { var stop = stops[i]; colors.push(stop.offset * expansion + shift + ' ' + stop.color); } // When colors attribute is used, the meanings of opacity and o:opacity2 // are reversed. lineStr.push(''); } else { lineStr.push(''); } lineStr.push(''); this.element_.insertAdjacentHTML('beforeEnd', lineStr.join('')); }; contextPrototype.fill = function() { this.stroke(true); } contextPrototype.closePath = function() { this.currentPath_.push({type: 'close'}); }; /** * @private */ contextPrototype.getCoords_ = function(aX, aY) { var m = this.m_; return { x: Z * (aX * m[0][0] + aY * m[1][0] + m[2][0]) - Z2, y: Z * (aX * m[0][1] + aY * m[1][1] + m[2][1]) - Z2 } }; contextPrototype.save = function() { var o = {}; copyState(this, o); this.aStack_.push(o); this.mStack_.push(this.m_); this.m_ = matrixMultiply(createMatrixIdentity(), this.m_); }; contextPrototype.restore = function() { copyState(this.aStack_.pop(), this); this.m_ = this.mStack_.pop(); }; function matrixIsFinite(m) { for (var j = 0; j < 3; j++) { for (var k = 0; k < 2; k++) { if (!isFinite(m[j][k]) || isNaN(m[j][k])) { return false; } } } return true; } function setM(ctx, m, updateLineScale) { if (!matrixIsFinite(m)) { return; } ctx.m_ = m; if (updateLineScale) { // Get the line scale. // Determinant of this.m_ means how much the area is enlarged by the // transformation. So its square root can be used as a scale factor // for width. var det = m[0][0] * m[1][1] - m[0][1] * m[1][0]; ctx.lineScale_ = sqrt(abs(det)); } } contextPrototype.translate = function(aX, aY) { var m1 = [ [1, 0, 0], [0, 1, 0], [aX, aY, 1] ]; setM(this, matrixMultiply(m1, this.m_), false); }; contextPrototype.rotate = function(aRot) { var c = mc(aRot); var s = ms(aRot); var m1 = [ [c, s, 0], [-s, c, 0], [0, 0, 1] ]; setM(this, matrixMultiply(m1, this.m_), false); }; contextPrototype.scale = function(aX, aY) { this.arcScaleX_ *= aX; this.arcScaleY_ *= aY; var m1 = [ [aX, 0, 0], [0, aY, 0], [0, 0, 1] ]; setM(this, matrixMultiply(m1, this.m_), true); }; contextPrototype.transform = function(m11, m12, m21, m22, dx, dy) { var m1 = [ [m11, m12, 0], [m21, m22, 0], [dx, dy, 1] ]; setM(this, matrixMultiply(m1, this.m_), true); }; contextPrototype.setTransform = function(m11, m12, m21, m22, dx, dy) { var m = [ [m11, m12, 0], [m21, m22, 0], [dx, dy, 1] ]; setM(this, m, true); }; /******** STUBS ********/ contextPrototype.clip = function() { // TODO: Implement }; contextPrototype.arcTo = function() { // TODO: Implement }; contextPrototype.createPattern = function() { return new CanvasPattern_; }; // Gradient / Pattern Stubs function CanvasGradient_(aType) { this.type_ = aType; this.x0_ = 0; this.y0_ = 0; this.r0_ = 0; this.x1_ = 0; this.y1_ = 0; this.r1_ = 0; this.colors_ = []; } CanvasGradient_.prototype.addColorStop = function(aOffset, aColor) { aColor = processStyle(aColor); this.colors_.push({offset: aOffset, color: aColor.color, alpha: aColor.alpha}); }; function CanvasPattern_() {} // set up externs G_vmlCanvasManager = G_vmlCanvasManager_; CanvasRenderingContext2D = CanvasRenderingContext2D_; CanvasGradient = CanvasGradient_; CanvasPattern = CanvasPattern_; })(); } // if mercurial-3.7.3/mercurial/templates/static/hglogo.png0000644000175000017500000001003312676531525022361 0ustar mpmmpm00000000000000‰PNG  IHDRKZ¦<ƒbKGDÿÿÿ ½§“ÐIDATxÚíœy\UÕÇ{š’¦–Røipsj0,0A!B™˜b0  ÁAõƒZ"Š™\q(d0ËJR“¨`$è2$,#§,+±²÷}ì÷Îç~î˹—{¯Ãsÿáçr8wïõÛk­ßú­}Þu×ÿÃøàƒþu;pý!?ü};pÝAh¸ñ×_]¹r¥®®îôéÓ'Nœ`Åà ƒÇçâ¹sç~ûí·ëׯßbëëë¿ûî;ìß¿ÿŸjã³Ï>ËÏÏß»w/Ô¯ùDe´ñî»ï¢ÂœœœˆX…ž4 B½¸¸XÄ'F¨Œ9ð$kkk[^^®¤aJìòñÇ«Œ?²²²ÒÒÒ\\\ S üóÏ?&R²U&uHHHllìÏ?ÿlt„D âK„¨Ne½%ƒ…PH}ûöªvÖ1Â;vBt*buáÂ…Hvã"\£Z#©3•iÇ®]» ’’òûï¿ !òÆoÜ(„HÂÅ‹÷èÑ l,„W¯^MJJÉF#D¸îÙ³§wïÞ³gÏFë!½ß¬Y³ØH!©è L y˜Ð½{÷cÇŽ!5wÚ´iãÆKOO!]’‰ÒUâFssóÔÔTê–Q˜&òõÈääd+++¢”ÔW™|€pôèÑÎÎÎgÏž5 Âe)ËÎ:ÄÇÇãÆ-[¶˜!JuÁ‚íÛ·/--Õ!„yéÒ%$Ò÷ßùòe 5ÈÃÃÃ=<<:wî¼}ûvvÔ m¡N…‘FªüÈ£„¢a/**¢šç5 Úö’’’óçÏ«ßãçë­µnÝzÈ!Ä* «)¢õ±ªcÇŽÐÁ¹sçt@ˆéHjº!ä¼úŒˆO`ÔÖÖJwîܹ3,,ÌÝÝy&Ožl¼±)“wëÖíÙgŸýúë¯u@ˆ÷ðÎi”Á IÚÆAA)KSØÈV­ZEEE±$бÑïc°ãhTÒäË/¿TŠð×_åkZ”ô¶mÛ"""¤œD¾èøbttô?F`` Ê1v»(QÚ¯_?Ö%âä4Ñ8Â}ûö!©5&âûÖ£GB˜„.m¨zB²Œópg°‰ û÷ï¿zõjœÉ<ÆS$ˆ(h7e! Ú$Âuk×APês8p€fì—†!@>õÔS_}õ•ú·²³²‡9 ó÷÷g;™“ˆ…{–,Y‚31iðöªººš”as-,,X‘µ~úé'EiLˆCõl替üo€8Pž½tÃC솼úê«(){{û.]º09ùigg %Izggg¯_¿¾åX…%—™™ %&&þøãŠÆDǨIN¼‹/B•/¿ü²››é*¯1UUUn®nÁÁÁÜ6gΜ)S¦<þøã±(¬#FÐò¨8Vƒ¨uÄ?ö0ÃæÍ›Џ¸8ж"„ËS—«ÒUê§gΜ!Jkjj€‡Ôvrrš4iRS"dX™¶Òv°ík¯½6a„±cÇé‹/¾ ò‘¸—»víŠW `½ÏŽ?ÿüsÈÆ-Z$PÌœ9;!¤¸º¸ßl3fë„[…€ ]²±±quu=tèvAGýE²‚3$$šÅŸ Å v âÇvíÚAH,A˜é’íF‹bñ"PÌ;W)B¼ÄÞ³ål¶££#Š!((`LÇ¿+W®T(\ñ'ájgk÷Ê+¯Ì˜1cêÔ©ôÍ$Ì?ü0f̘ ‘ §îfÅ;UYYÉ©õÁ¥4J…¦ÉX“Á#jI¡¶mÛNŸ>ÉÒ˲¬Çà ’–˜d†¡C†’ç„(Û„•–––â)¢NÜsðàA’°°°¾ €Ð¦M›U«Vi·æ•7:(1!ÑÅÅå¹çžsqvIJLúæ›oZø„ƒ˜?wèС3bfÀC4éð6n„`Â#¤ ’­/%!Õ‹†Xiµ0ÍöáÆ“Ÿ½zõ‚l@H¤!>ǧN‚ö`f€çúÕ«Wo¢7.\¸0Â}¥Hœô(DHQ…N@ˆ6†¥:uê„ýg__ßýû÷+Um&d·@HÈ)ÔÙÀƒ¥(fóæÍö“Õ~~~åååFynA` PE‹Œ[®]»¦Ó×aWª¡G%ÀáŠöE°(ò0&&†N_Î-BH<îeeeÒ»bTTT\ºtI!ßBHVfPÒˆ „WWW'^w: õKa£A× ™!¬¯¯'$ÄA0’ã2€à!=øH£ç_ƒK•< 0õa_`xaüòåËù\TT$†¡'BÂÅD>háwÜ‚?Ñ=Í‚Ìß›Ï65[ñ‰aRxß~û-Úèî»ïÆrº{ ­ÂÉ“' sÖÆ>•––¯Ù ßiD´‡+IÈmÚ%8ûµB¹ø|øðáÂò´´´^x–¥Ñ—5ôAH6cÂSCÈ`Ú´iê²|{اý 3ˆÎæÈ‘#¢ÉHãKIDîÊYTO„l¡N§i¤¨µµ5»ÞÔ„p» Ñmã|*[II 4&JŸà„`(*O?ý´0;''ÇÙÙ™+ôt†AH6C!䀒óvÈ€vÄÇÇÇÓÓ³©''ì=?IßÂé0A}pg²4…AØL ¢K)ôlS‰ Bfa…OB 'Br•ö÷¾ûîÓüÒrXJÉF7ôK áj¶Lˆ˜:œ8q‚6ÿË‹„žñ1eGÉI;[‘²#¨ æ§Oˆ°Ä""`ª\Sð@N­gZ©QZ±b\@“ “ì0ª%eiJ³/\rx ³H$ÜÀüÔyDP ÅsUœsúôé¦à!È7’FÖRýh, DáÕ.¡tCˆÝ Úß ÂÖªª*aw²ë666æææ§r ʤ%8^i H€»¨x¨3Lµ°°`¯£¢¢âââ}Þ¤?Bºõ°)a˜Õ艒ŸŒª®®–¡‰+À{tnr®Û·o!Ê=°¿T äÞ#)€G¯,Ú\333!ÝÜÜ`Z¿OC\Mœ0„ò÷fàkqT¥Î{péK/½4qâÄQ£Fiè) Qrăqö¥Q欩©!2‰ÏôôôŽ;b$5í ]Ÿþùm¹Û”¼Ý¦sµÀ ¥K—’<êndÙlõ‡!¹˜ššúÌ3ÏSÐ5æ),(byB lK¬_¿ž]@‘µoß^ÀÓFÅ‹7U~ZЬpqvÁ?â=&8!CX‘” Ùƒ3Á†÷Øâ+ýýý5ún÷H=! ‹‹‹1€ê´*WÐeÌïåå%´ YTÌ@5⻉„ ±G€EFF²*þoo3øfÁŸcƌü+çÆ’%K¤žKøojÈkÑéÃÀTQa@á!±Gމl´´!Ù˜”˜àÿŸólÔý“O>ɾ¾Þ0­¬¬ €‹0”p¶\mp]"iܸqÌÀ®¹ººRîî¿ÿ~é/yyä"“zÙº»»Ïž=»Yò4L÷„‰,l?Ô>44´OŸ>biúôéè`\ÊuØ%sKfSbŠÊ&^à–¦ú@¯°;¤Ø áØØØ À ž1é{Þ‡ì`ïàæêf;ØÖÆÚÆÎÎnðàÁÈE\¤å=%ù9kÖ,BÔÖÖVàíí½`Á‚””ª(|#ržÎ÷ÆDÇlÚ¸I¿?K0Àû4ßÜœÜäää9sæ,J^‰+yíçx{yS²IÝG}Ðôâè ²²’„$ ‚‚‚ˆ|î©MÄúŠòŠ„y £µt;#T2ôA¨^”®7ŒfoÓ~]×*§ý~ßê€xðôôìÖ­- ½Â† ÐPÈ+~DyJÇ[â|‘:uêÔ·o_$˜¸~òäIZûîÝ»Ó( Wij€·Þzë‰'ž`ì¯Ø!¦Õ‹!‰øøxñ™%ÆÏŠÌÆ•ÜÜ\šý¿Þ¡£·bQ zæ=š¾ŒnS<áL̇cÇŽñ-–¦c.--…~ïú \±b…DÓ ”žì ÿíííÅg''':né[ýúõƒ@®þ0‡pxóÍ75bL;Bš .å ôàƒ¶áùóçÕ®^½zРAâ€øÞ{ïU¯?ŽŽŽË–-“#<~ü83à=c ÄÃ]ºtiBzPu„*•ŠN\$[ëÖ­ëêê¤o‘¢D£áÎ;ÍÍÍå žÞÉ‚Ÿ4fC.\øÀaYYC†É`ffFÌÈnݺÖ‘³¢~™gÒ¤I?ü0) }}}»víj„%%%mÚ´ÉÈÈÈÊÊÚ±cüAÆŠ'¾¹¡GòÓ!dÆÎ‹ë»wï­Q¢YIã}N1äQJªÈ£”f711QW„»ví‚´¥ã`ø3 Â+W®P*òóó›EXUUÕªU«êêjÛÆŽ©+BœÆºBêdÏž=‚PØAå8{öìµk×Ä[fBþË«FP¸q#YT[[+þl !!z-NÐêëëãââ” ¬©©!vöìÙ#4ÜÖ§Oz{{oÞ¼YzM1¯ø133ÓËËK|Æ-VVVdcçÎÛ¶mkii)Σ¢¢æÏŸ¯>a^^ê䡇i#ÎàakkkT&ò[ _÷‡††RxÕg˜9sfll¬`>¨ýû÷ì±Çâ\j`å-þ¯”G³GÑÔOn;tèúëô|æà"^Õée*v‡Ùˆù#ƒ;=þ„·*ÂÛÿ3»½Ç¿ìÉFvk€ýéIEND®B`‚mercurial-3.7.3/mercurial/templates/static/style-monoblue.css0000644000175000017500000002315112676531525024071 0ustar mpmmpm00000000000000/*** Initial Settings ***/ * { margin: 0; padding: 0; font-weight: normal; font-style: normal; } html { font-size: 100%; font-family: sans-serif; } body { font-size: 77%; margin: 15px 50px; background: #4B4B4C; } a { color:#0000cc; text-decoration: none; } /*** end of Initial Settings ***/ /** common settings **/ div#container { background: #FFFFFF; position: relative; color: #666; } div.page-header { padding: 50px 20px 0; background: #006699 top left repeat-x; position: relative; } div.page-header h1 { margin: 10px 0 30px; font-size: 1.8em; font-weight: bold; font-family: osaka,'MS P Gothic', Georgia, serif; letter-spacing: 1px; color: #DDD; } div.page-header h1 a { font-weight: bold; color: #FFF; } div.page-header a { text-decoration: none; } div.page-header form { float: right; margin-top: -2px; } div.page-header form label { color: #DDD; } div.page-header form input { padding: 2px; border: solid 1px #DDD; } div.page-header form dl { overflow: hidden; } div.page-header form dl dt { font-size: 1.2em; } div.page-header form dl dt, div.page-header form dl dd { margin: 0 0 0 5px; float: left; height: 24px; line-height: 20px; } ul.page-nav { margin: 10px 0 0 0; list-style-type: none; overflow: hidden; } ul.page-nav li { margin: 0 2px 0 0; float: left; width: 80px; height: 24px; font-size: 1.1em; line-height: 24px; text-align: center; } ul.page-nav li.current { background: #FFF; } ul.page-nav li a { height: 24px; color: #666; background: #DDD; display: block; text-decoration: none; } ul.page-nav li a:hover { color:#333; background: #FFF; } ul.submenu { margin: 10px 0 -10px 20px; list-style-type: none; } ul.submenu li { margin: 0 10px 0 0; font-size: 1.2em; display: inline; } h2 { margin: 20px 0 10px; height: 30px; line-height: 30px; text-indent: 20px; background: #FFF; font-size: 1.2em; border-top: dotted 1px #D5E1E6; font-weight: bold; } h2.no-link { color:#006699; } h2.no-border { color: #FFF; background: #006699; border: 0; } h2 a { font-weight:bold; color:#006699; } div.page-path { text-align: right; padding: 20px 30px 10px 0; border:solid #d9d8d1; border-width:0px 0px 1px; font-size: 1.2em; } div.page-footer { margin: 50px 0 0; position: relative; } div.page-footer p { position: relative; padding-left: 20px; bottom: 5px; font-size: 1.2em; } ul.rss-logo { position: absolute; top: -10px; right: 20px; height: 20px; list-style-type: none; } ul.rss-logo li { display: inline; } ul.rss-logo li a { padding: 3px 6px; line-height: 10px; border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e; color:#ffffff; background-color:#ff6600; font-weight:bold; font-family:sans-serif; font-size:10px; text-align:center; text-decoration:none; } div.rss-logo li a:hover { background-color:#ee5500; } p.normal { margin: 20px 0 20px 30px; font-size: 1.2em; } table { margin: 10px 0 0 20px; width: 95%; border-collapse: collapse; } table tr td { font-size: 1.1em; } table tr td.nowrap { white-space: nowrap; } table tr td.closed { background-color: #99f; } /* table tr.parity0:hover, table tr.parity1:hover { background: #D5E1E6; } */ table tr.parity0 { background: #F1F6F7; } table tr.parity1 { background: #FFFFFF; } table tr td { padding: 5px 5px; } table.annotated tr td { padding: 0px 5px; } span.logtags span { padding: 2px 6px; font-weight: normal; font-size: 11px; border: 1px solid; background-color: #ffaaff; border-color: #ffccff #ff00ee #ff00ee #ffccff; } span.logtags span.tagtag { background-color: #ffffaa; border-color: #ffffcc #ffee00 #ffee00 #ffffcc; } span.logtags span.branchtag { background-color: #aaffaa; border-color: #ccffcc #00cc33 #00cc33 #ccffcc; } span.logtags span.inbranchtag { background-color: #d5dde6; border-color: #e3ecf4 #9398f4 #9398f4 #e3ecf4; } span.logtags span.bookmarktag { background-color: #afdffa; border-color: #ccecff #46ace6 #46ace6 #ccecff; } div.diff pre { margin: 10px 0 0 0; } div.diff pre span { font-family: monospace; white-space: pre; font-size: 1.2em; } div.diffblocks { counter-reset: lineno; } div.diffblock { counter-increment: lineno; } span.difflineplus { color:#008800; } span.difflineminus { color:#cc0000; } span.difflineat { color:#990099; } pre.sourcelines { position: relative; counter-reset: lineno; font-size: 1.2em; } pre.sourcelines > span { display: inline-block; box-sizing: border-box; width: 100%; padding: 0 0 0 5em; counter-increment: lineno; vertical-align: top; } div.source > pre.sourcelines > span { padding: 1px 1px 1px 5em; } pre.sourcelines > span:before { -moz-user-select: -moz-none; -khtml-user-select: none; -webkit-user-select: none; -ms-user-select: none; user-select: none; display: inline-block; margin-left: -5em; width: 4em; color: #999; text-align: right; content: counters(lineno,"."); float: left; } pre.sourcelines > a { display: inline-block; position: absolute; left: 0px; width: 4em; height: 1em; padding: 0.15em; } pre.sourcelines.stripes > :nth-child(4n+2) { background-color: #F1F6F7; } pre.sourcelines.stripes > :nth-child(4n+4) { background-color: #FFFFFF; } pre.sourcelines.stripes > :nth-child(4n+2):hover, pre.sourcelines.stripes > :nth-child(4n+4):hover, pre.sourcelines.stripes > :nth-child(4n+1):hover + :nth-child(4n+2), pre.sourcelines.stripes > :nth-child(4n+3):hover + :nth-child(4n+4) { background-color: #D5E1E6; } pre.sourcelines > span:target, pre.sourcelines.stripes > span:target { background-color: #bfdfff; } td.source { white-space: pre; margin: 10px 30px 0; font-size: 1.2em; font-family: monospace; } .linenr { color: #999; text-align: right; } .lineno { text-align: right; } .lineno a { color: #999; } td.linenr { width: 60px; } div#powered-by { position: absolute; width: 75px; top: 15px; right: 20px; font-size: 1.2em; } div#powered-by a { color: #EEE; text-decoration: none; } div#powered-by a:hover { text-decoration: underline; } /** end of common settings **/ /** summary **/ dl.overview { margin: 0 0 0 30px; font-size: 1.1em; overflow: hidden; } dl.overview dt, dl.overview dd { margin: 5px 0; float: left; } dl.overview dt { clear: left; font-weight: bold; width: 150px; } /** end of summary **/ /** chagelog **/ h3.changelog { margin: 20px 0 5px 30px; padding: 0 0 2px; font-size: 1.4em; border-bottom: dotted 1px #D5E1E6; } ul.changelog-entry { margin: 0 0 10px 30px; list-style-type: none; position: relative; } ul.changelog-entry li span.revdate { font-size: 1.1em; } ul.changelog-entry li.age { position: absolute; top: -25px; right: 10px; font-size: 1.4em; color: #CCC; font-weight: bold; font-style: italic; } ul.changelog-entry li span.name { font-size: 1.2em; font-weight: bold; } ul.changelog-entry li.description { margin: 10px 0 0; font-size: 1.1em; } /** end of changelog **/ /** file **/ p.files { margin: 0 0 0 20px; font-size: 2.0em; font-weight: bold; } /** end of file **/ /** changeset **/ h3.changeset { margin: 20px 0 5px 20px; padding: 0 0 2px; font-size: 1.6em; border-bottom: dotted 1px #D5E1E6; } p.changeset-age { position: relative; } p.changeset-age span { position: absolute; top: -25px; right: 10px; font-size: 1.4em; color: #CCC; font-weight: bold; font-style: italic; } p.description { margin: 10px 30px 0 30px; padding: 10px; border: solid 1px #CCC; font-size: 1.2em; } /** end of changeset **/ /** canvas **/ div#wrapper { position: relative; font-size: 1.2em; } canvas { position: absolute; z-index: 5; top: -0.7em; } ul#nodebgs li.parity0 { background: #F1F6F7; } ul#nodebgs li.parity1 { background: #FFFFFF; } ul#graphnodes { position: absolute; z-index: 10; top: 7px; list-style: none inside none; } ul#nodebgs { list-style: none inside none; } ul#graphnodes li, ul#nodebgs li { height: 39px; } ul#graphnodes li .info { display: block; position: relative; } /** end of canvas **/ /** comparison **/ .legend { margin-left: 20px; padding: 1.5% 0 1.5% 0; } .legendinfo { border: 1px solid #999; font-size: 80%; text-align: center; padding: 0.5%; } .equal { background-color: #ffffff; } .delete { background-color: #faa; color: #333; } .insert { background-color: #ffa; } .replace { background-color: #e8e8e8; } .comparison { overflow-x: auto; } .comparison table td { padding: 0px 5px; } .header th { font-weight: bold; } .block { border-top: 1px solid #999; } /** end of comparison **/ .breadcrumb a:hover { text-decoration:underline; } .scroll-loading { -webkit-animation: change_color 1s linear 0s infinite alternate; -moz-animation: change_color 1s linear 0s infinite alternate; -o-animation: change_color 1s linear 0s infinite alternate; animation: change_color 1s linear 0s infinite alternate; } @-webkit-keyframes change_color { from { background-color: #A0CEFF; } to { } } @-moz-keyframes change_color { from { background-color: #A0CEFF; } to { } } @-o-keyframes change_color { from { background-color: #A0CEFF; } to { } } @keyframes change_color { from { background-color: #A0CEFF; } to { } } .scroll-loading-error { background-color: #FFCCCC !important; } #doc { margin: 0 30px; } mercurial-3.7.3/mercurial/templates/static/feed-icon-14x14.png0000644000175000017500000000126112676531525023515 0ustar mpmmpm00000000000000‰PNG  IHDRH-ÑgAMA¯È7ŠétEXtSoftwareAdobe ImageReadyqÉe<CIDATxÚŒ’MHaÇóÎÌδ1E"Ûæ!¡HD*Ã(ò–‡P¡KtëP‡nžú¸EÐ¥:¯oAAHBRQÔ!ƒüÊÈ%ЬÖBÔ]ÝüØÝùê™YêÔ;ó ÏûÎóž÷yþíséƒ4°Iƒ@“¼ÿ®õ£±#©ôîÖ’Psü Z¹Hÿ sYLKDzzd¦ŽR`‚TîÙä†ìõB ½~;»¯ýUbõý#Üá ºŸÇ4èb^)úuÀk)C2†ÏÌKhJ’lM‘lhÁiî±Êèm‚ñ~”&@eÅI¨¶25¼Åï„ ¸‚K,M °ò±mGÏa<‹›°á]aào% „µõÔõÞ·¥/¯q'ïQʽbvð©®[ØM§)¿`q~fB†ðYx‘aqd»¡ çdgÿ)¬B–ùÇW«M7õH‹^ÍØÞlâP@ôQzz™™þÖ¦ß`»H²®/7JþeëðyÜJ•%ü„‚Ò†WÀ±\ùIföÆ‰Ö m•ó¿â}XÓXú\^+`´_ǹò«ý¶\â,űX{ÚÑìí¸+Ë1À7j6€š+N²ñDuZÍÝ(á5âÉ]˜ŽÏt»6®ûé¦jœ.<&mÕì¶êÁ¸+A–RÙçä§Ç…ž)ü ¹›Ý¬N £GúûÔÉhêÈ®ˆ¤4yt©È¼²PZ‘þÅ*kâ‹`ür4Mí÷·1MD¾C E2âÿÖX$ò? ¡ÍÖíšêGbIEND®B`‚mercurial-3.7.3/mercurial/templates/static/style-extra-coal.css0000644000175000017500000000114012676531525024302 0ustar mpmmpm00000000000000body { background: black url('background.png') repeat-x; } .container { padding-left: 0; padding-right: 150px; } .main { padding: 2em; border-right: 15px solid black; border-bottom: 15px solid black; } .menu { background: #999; padding: 10px; width: 75px; position: fixed; top: 27px; left: auto; right: 27px; } .menu ul { border-left: 0; } .menu li.active { font-weight: normal; background: black; color: white; } .menu li.active a { color: white; } h3 { margin-top: -.7em; } div.description { border-left-width: 3px; } mercurial-3.7.3/mercurial/templates/static/background.png0000644000175000017500000000113312676531525023222 0ustar mpmmpm00000000000000‰PNG  IHDR Î÷!bKGDÿ‡Ì¿IDATXÃ¥–gWšA…y[Tì1ÆÞ°`,?*¿×{A±÷F¢ILeØs²ãìàÃÅã³ÏÝ™/{ˆ¾Æ^?U…ˆ%òÞßó” ÿÿãŸAÔ™£ü\@™‹šg)ÁÌz³î ÊÜýè—Šé˜YPæNS.WPæ¦Uª»S”+zsTÈX>쮂™Ój³î ÊÜI]tÂ@×ßÎ#i`fA™;N¹Q–›zëêÍŒŽQnªÍ›`ßTØu/CJm.Þ»\@Ý™QÝtÄ@3`_fÖï¦Ü.hfn¦´fG‡ tìëÑ€;H™-Íuûz.lF÷Pî©4û¾˜Yoî§Ü®GûêÎôè ½z^½¹‡ò¸¢™í+Ü@ó¡õ^F?è1˜YPævSžWPæ~ª˜žÚe g`f½ù#å9pEsÀ­„žÚéhTøEQøI `fA™ÛAy \A™Ûn WàeðhÉnå5˜YoÖ]A™Ûj 7€¶è-Ø×£WPw¦™ò¸€º3I•êneNmÎ}“a×ÍœT›‹÷þ. îL£þ,Ëm ¼û‚f7³çêÍ%» ôÐzýöõh`f@Ý™:ÊßÀ”¹¨¹–òJQs å#xU@³›Y¿W43Pw¦Ú@ÿš0Ð`_A™W©îÒW⩬æ¢û\V³p㑹™NFUõÛd¾ãìÝ1å:eIEND®B`‚mercurial-3.7.3/mercurial/templates/paper/0000755000175000017500000000000012676531544020220 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/templates/paper/footer.tmpl0000644000175000017500000000012012676531525022404 0ustar mpmmpm00000000000000 {motd} mercurial-3.7.3/mercurial/templates/paper/shortlogentry.tmpl0000644000175000017500000000054012676531525024037 0ustar mpmmpm00000000000000 {date|rfc822date} {author|person} {desc|strip|firstline|escape|nonempty} {inbranch%changelogbranchname}{branches%changelogbranchhead}{tags%changelogtag}{bookmarks%changelogtag} mercurial-3.7.3/mercurial/templates/paper/search.tmpl0000644000175000017500000000355312676531525022370 0ustar mpmmpm00000000000000{header} {repo|escape}: searching for {query|escape}

                searching for '{query|escape}'

                Assuming {modedesc}. {if(showforcekw, ' Use {showforcekw} instead.')} {if(showunforcekw, ' Use {showunforcekw} instead.')}

                {entries}
                age author description
                {footer} mercurial-3.7.3/mercurial/templates/paper/shortlog.tmpl0000644000175000017500000000545012676531525022762 0ustar mpmmpm00000000000000{header} {repo|escape}: log

                log

                {entries%shortlogentry}
                age author description
                {footer} mercurial-3.7.3/mercurial/templates/paper/filelogentry.tmpl0000644000175000017500000000056612676531525023627 0ustar mpmmpm00000000000000 {date|rfc822date} {author|person} {desc|strip|firstline|escape|nonempty} {inbranch%changelogbranchname}{branches%changelogbranchhead}{tags%changelogtag}{bookmarks%changelogtag}{rename%filelogrename} mercurial-3.7.3/mercurial/templates/paper/filediff.tmpl0000644000175000017500000000520112676531525022663 0ustar mpmmpm00000000000000{header} {repo|escape}: {file|escape} diff

                diff {file|escape} @ {rev}:{node|short} {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}

                {desc|strip|escape|websub|nonempty}
                author {author|obfuscate}
                date {date|rfc822date}
                parents {parent%filerevparent}
                children {child%filerevchild}
                line wrap: on
                line diff
                {diff}
                {footer} mercurial-3.7.3/mercurial/templates/paper/header.tmpl0000644000175000017500000000071212676531525022345 0ustar mpmmpm00000000000000 mercurial-3.7.3/mercurial/templates/paper/manifest.tmpl0000644000175000017500000000351412676531525022726 0ustar mpmmpm00000000000000{header} {repo|escape}: {node|short} {path|escape}

                directory {path|escape} @ {rev}:{node|short} {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}

                {dentries%direntry} {fentries%fileentry}
                name size permissions
                [up] drwxr-xr-x
                {footer} mercurial-3.7.3/mercurial/templates/paper/help.tmpl0000644000175000017500000000210512676531525022043 0ustar mpmmpm00000000000000{header} Help: {topic}

                Help: {topic}

                {rstdoc(doc, "html")}
                {footer} mercurial-3.7.3/mercurial/templates/paper/notfound.tmpl0000644000175000017500000000040612676531525022751 0ustar mpmmpm00000000000000{header} Mercurial repository not found

                Mercurial repository not found

                The specified repository "{repo|escape}" is unknown, sorry. Please go back to the main repository list page. {footer} mercurial-3.7.3/mercurial/templates/paper/fileannotate.tmpl0000644000175000017500000000533512676531525023574 0ustar mpmmpm00000000000000{header} {repo|escape}: {file|escape} annotate

                annotate {file|escape} @ {rev}:{node|short} {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}

                {desc|strip|escape|websub|nonempty}
                author {author|obfuscate}
                date {date|rfc822date}
                parents {parent%filerevparent}
                children {child%filerevchild}
                {annotate%annotateline}
                rev   line source
                {footer} mercurial-3.7.3/mercurial/templates/paper/bookmarks.tmpl0000644000175000017500000000312312676531525023104 0ustar mpmmpm00000000000000{header} {repo|escape}: bookmarks

                bookmarks

                {entries%bookmarkentry}
                bookmark node
                {footer} mercurial-3.7.3/mercurial/templates/paper/map0000644000175000017500000002171112676531525020721 0ustar mpmmpm00000000000000default = 'shortlog' mimetype = 'text/html; charset={encoding}' header = header.tmpl footer = footer.tmpl search = search.tmpl changelog = shortlog.tmpl shortlog = shortlog.tmpl shortlogentry = shortlogentry.tmpl graph = graph.tmpl help = help.tmpl helptopics = helptopics.tmpl helpentry = ' {if(basename, '{basename|escape}', '{topic|escape}')} {summary|escape} ' naventry = '{label|escape} ' navshortentry = '{label|escape} ' navgraphentry = '{label|escape} ' filenaventry = '{label|escape} ' filedifflink = '{file|escape} ' filenodelink = '{file|escape} ' filenolink = '{file|escape} ' fileellipses = '...' diffstatlink = diffstat.tmpl diffstatnolink = diffstat.tmpl changelogentry = shortlogentry.tmpl searchentry = shortlogentry.tmpl changeset = changeset.tmpl manifest = manifest.tmpl nav = '{before%naventry} {after%naventry}' navshort = '{before%navshortentry}{after%navshortentry}' navgraph = '{before%navgraphentry}{after%navgraphentry}' filenav = '{before%filenaventry}{after%filenaventry}' direntry = ' dir. {basename|escape}/ {emptydirs|escape} drwxr-xr-x ' fileentry = ' file {basename|escape} {size} {permissions|permissions} ' filerevision = filerevision.tmpl fileannotate = fileannotate.tmpl filediff = filediff.tmpl filecomparison = filecomparison.tmpl filelog = filelog.tmpl fileline = ' {strip(line|escape, '\r\n')}' filelogentry = filelogentry.tmpl annotateline = ' {author|user}@{rev} {linenumber} {line|escape} ' diffblock = '
                {lines}
                ' difflineplus = ' {strip(line|escape, '\r\n')}' difflineminus = ' {strip(line|escape, '\r\n')}' difflineat = ' {strip(line|escape, '\r\n')}' diffline = ' {strip(line|escape, '\r\n')}' comparisonblock =' {lines} ' comparisonline = ' {leftlinenumber} {leftline|escape} {rightlinenumber} {rightline|escape} ' changelogparent = ' parent {rev}: {node|short} ' changesetparent = '{node|short} ' changesetparentdiff = ' {changesetparent} {ifeq(node, basenode, '(current diff)', '({difffrom})')}' difffrom = 'diff' filerevparent = '{rename%filerename}{node|short} ' filerevchild = '{node|short} ' filerename = '{file|escape}@' filelogrename = ' base {file|escape}@{node|short} ' fileannotateparent = ' parent: {rename%filerename}{node|short} ' changesetchild = ' {node|short}' changelogchild = ' child {node|short} ' fileannotatechild = ' child: {node|short} ' tags = tags.tmpl tagentry = ' {tag|escape} {node|short} ' bookmarks = bookmarks.tmpl bookmarkentry = ' {bookmark|escape} {node|short} ' branches = branches.tmpl branchentry = ' {branch|escape} {node|short} ' changelogtag = '{name|escape} ' changesettag = '{tag|escape} ' changesetbookmark = '{bookmark|escape} ' changelogbranchhead = '{name|escape} ' changelogbranchname = '{name|escape} ' filediffparent = ' parent {rev}: {node|short} ' filelogparent = ' parent {rev}: {node|short} ' filediffchild = ' child {rev}: {node|short} ' filelogchild = ' child {rev}: {node|short} ' indexentry = ' {name|escape} {description} {contact|obfuscate} {lastchange|rfc822date} {archives%indexarchiveentry} {if(isdirectory, '', ' ' )} \n' indexarchiveentry = ' ↓{type|escape}' index = index.tmpl archiveentry = '
              • {type|escape}
              • ' notfound = notfound.tmpl error = error.tmpl urlparameter = '{separator}{name}={value|urlescape}' hiddenformentry = '' breadcrumb = '> {name|escape} ' searchhint = 'Find changesets by keywords (author, files, the commit message), revision number or hash, or revset expression.' mercurial-3.7.3/mercurial/templates/paper/diffstat.tmpl0000644000175000017500000000052512676531525022723 0ustar mpmmpm00000000000000 {file|escape} {total}     mercurial-3.7.3/mercurial/templates/paper/tags.tmpl0000644000175000017500000000305312676531525022054 0ustar mpmmpm00000000000000{header} {repo|escape}: tags

                tags

                {entries%tagentry}
                tag node
                {footer} mercurial-3.7.3/mercurial/templates/paper/filecomparison.tmpl0000644000175000017500000000557712676531525024145 0ustar mpmmpm00000000000000{header} {repo|escape}: {file|escape} comparison

                comparison {file|escape} @ {rev}:{node|short} {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}

                {desc|strip|escape|websub|nonempty}
                author {author|obfuscate}
                date {date|rfc822date}
                parents {parent%filerevparent}
                children {child%filerevchild}
                comparison
                equal deleted inserted replaced
                {comparison}
                {leftrev}:{leftnode|short} {rightrev}:{rightnode|short}
                {footer} mercurial-3.7.3/mercurial/templates/paper/changeset.tmpl0000644000175000017500000000520012676531525023053 0ustar mpmmpm00000000000000{header} {repo|escape}: {node|short}

                changeset {rev}:{node|short} {changesetbranch%changelogbranchname}{changesettag}{changesetbookmark}

                {desc|strip|escape|websub|nonempty}
                author {author|obfuscate}
                date {date|rfc822date}
                parents {ifeq(count(parent), '2', parent%changesetparentdiff, parent%changesetparent)}
                children {child%changesetchild}
                files {files}
                diffstat {diffsummary} [+]
                line wrap: on
                line diff
                {diff}
                {footer} mercurial-3.7.3/mercurial/templates/paper/helptopics.tmpl0000644000175000017500000000271112676531525023270 0ustar mpmmpm00000000000000{header} Help: {title}
                {topics % helpentry} {if(earlycommands, ' {earlycommands % helpentry} ')} {if(othercommands, ' {othercommands % helpentry} ')}

                Topics

                Main Commands

                Other Commands

                {footer} mercurial-3.7.3/mercurial/templates/paper/error.tmpl0000644000175000017500000000222712676531525022251 0ustar mpmmpm00000000000000{header} {repo|escape}: error

                error

                An error occurred while processing your request:

                {error|escape}

                {footer} mercurial-3.7.3/mercurial/templates/paper/index.tmpl0000644000175000017500000000143312676531525022225 0ustar mpmmpm00000000000000{header} Mercurial repositories index
                {entries%indexentry}
                Name Description Contact Last modified    
                {footer} mercurial-3.7.3/mercurial/templates/paper/filelog.tmpl0000644000175000017500000000613512676531525022543 0ustar mpmmpm00000000000000{header} {repo|escape}: {file|escape} history

                log {file|escape} @ {rev}:{node|short} {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}

                {entries%filelogentry}
                age author description
                {footer} mercurial-3.7.3/mercurial/templates/paper/filerevision.tmpl0000644000175000017500000000540312676531525023615 0ustar mpmmpm00000000000000{header} {repo|escape}: {node|short} {file|escape}

                view {file|escape} @ {rev}:{node|short} {branch%changelogbranchname}{tags%changelogtag}{bookmarks%changelogtag}

                {desc|strip|escape|websub|nonempty}
                author {author|obfuscate}
                date {date|rfc822date}
                parents {parent%filerevparent}
                children {child%filerevchild}
                line wrap: on
                line source
                {text%fileline}
                {footer} mercurial-3.7.3/mercurial/templates/paper/branches.tmpl0000644000175000017500000000311412676531525022701 0ustar mpmmpm00000000000000{header} {repo|escape}: branches

                branches

                {entries % branchentry}
                branch node
                {footer} mercurial-3.7.3/mercurial/templates/paper/graph.tmpl0000644000175000017500000001047512676531525022225 0ustar mpmmpm00000000000000{header} {repo|escape}: revision graph

                graph

                    {footer} mercurial-3.7.3/mercurial/templates/map-cmdline.changelog0000644000175000017500000000151212676531525023146 0ustar mpmmpm00000000000000header = '{date|shortdate} {author|person} <{author|email}>\n\n' header_verbose = '' changeset = '\t* {files|stringify|fill68|tabindent}{desc|fill68|tabindent|strip}\n\t[{node|short}]{tags}{branches}\n\n' changeset_quiet = '\t* {desc|firstline|fill68|tabindent|strip}\n\n' changeset_verbose = '{date|isodate} {author|person} <{author|email}> ({node|short}{tags}{branches})\n\n\t* {file_adds|stringify|fill68|tabindent}{file_dels|stringify|fill68|tabindent}{files|stringify|fill68|tabindent}{desc|fill68|tabindent|strip}\n\n' start_tags = ' [' tag = '{tag}, ' last_tag = '{tag}]' start_branches = ' <' branch = '{branch}, ' last_branch = '{branch}>' file = '{file}, ' last_file = '{file}:\n\t' file_add = '{file_add}, ' last_file_add = '{file_add}: new file.\n* ' file_del = '{file_del}, ' last_file_del = '{file_del}: deleted file.\n* ' mercurial-3.7.3/mercurial/obsolete.py0000644000175000017500000013046012676531525017304 0ustar mpmmpm00000000000000# obsolete.py - obsolete markers handling # # Copyright 2012 Pierre-Yves David # Logilab SA # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """Obsolete marker handling An obsolete marker maps an old changeset to a list of new changesets. If the list of new changesets is empty, the old changeset is said to be "killed". Otherwise, the old changeset is being "replaced" by the new changesets. Obsolete markers can be used to record and distribute changeset graph transformations performed by history rewrite operations, and help building new tools to reconcile conflicting rewrite actions. To facilitate conflict resolution, markers include various annotations besides old and news changeset identifiers, such as creation date or author name. The old obsoleted changeset is called a "precursor" and possible replacements are called "successors". Markers that used changeset X as a precursor are called "successor markers of X" because they hold information about the successors of X. Markers that use changeset Y as a successors are call "precursor markers of Y" because they hold information about the precursors of Y. Examples: - When changeset A is replaced by changeset A', one marker is stored: (A, (A',)) - When changesets A and B are folded into a new changeset C, two markers are stored: (A, (C,)) and (B, (C,)) - When changeset A is simply "pruned" from the graph, a marker is created: (A, ()) - When changeset A is split into B and C, a single marker are used: (A, (C, C)) We use a single marker to distinguish the "split" case from the "divergence" case. If two independent operations rewrite the same changeset A in to A' and A'', we have an error case: divergent rewriting. We can detect it because two markers will be created independently: (A, (B,)) and (A, (C,)) Format ------ Markers are stored in an append-only file stored in '.hg/store/obsstore'. The file starts with a version header: - 1 unsigned byte: version number, starting at zero. The header is followed by the markers. Marker format depend of the version. See comment associated with each format for details. """ from __future__ import absolute_import import errno import struct from .i18n import _ from . import ( base85, error, node, parsers, phases, util, ) _pack = struct.pack _unpack = struct.unpack _calcsize = struct.calcsize propertycache = util.propertycache # the obsolete feature is not mature enough to be enabled by default. # you have to rely on third party extension extension to enable this. _enabled = False # Options for obsolescence createmarkersopt = 'createmarkers' allowunstableopt = 'allowunstable' exchangeopt = 'exchange' ### obsolescence marker flag ## bumpedfix flag # # When a changeset A' succeed to a changeset A which became public, we call A' # "bumped" because it's a successors of a public changesets # # o A' (bumped) # |`: # | o A # |/ # o Z # # The way to solve this situation is to create a new changeset Ad as children # of A. This changeset have the same content than A'. So the diff from A to A' # is the same than the diff from A to Ad. Ad is marked as a successors of A' # # o Ad # |`: # | x A' # |'| # o | A # |/ # o Z # # But by transitivity Ad is also a successors of A. To avoid having Ad marked # as bumped too, we add the `bumpedfix` flag to the marker. . # This flag mean that the successors express the changes between the public and # bumped version and fix the situation, breaking the transitivity of # "bumped" here. bumpedfix = 1 usingsha256 = 2 ## Parsing and writing of version "0" # # The header is followed by the markers. Each marker is made of: # # - 1 uint8 : number of new changesets "N", can be zero. # # - 1 uint32: metadata size "M" in bytes. # # - 1 byte: a bit field. It is reserved for flags used in common # obsolete marker operations, to avoid repeated decoding of metadata # entries. # # - 20 bytes: obsoleted changeset identifier. # # - N*20 bytes: new changesets identifiers. # # - M bytes: metadata as a sequence of nul-terminated strings. Each # string contains a key and a value, separated by a colon ':', without # additional encoding. Keys cannot contain '\0' or ':' and values # cannot contain '\0'. _fm0version = 0 _fm0fixed = '>BIB20s' _fm0node = '20s' _fm0fsize = _calcsize(_fm0fixed) _fm0fnodesize = _calcsize(_fm0node) def _fm0readmarkers(data, off): # Loop on markers l = len(data) while off + _fm0fsize <= l: # read fixed part cur = data[off:off + _fm0fsize] off += _fm0fsize numsuc, mdsize, flags, pre = _unpack(_fm0fixed, cur) # read replacement sucs = () if numsuc: s = (_fm0fnodesize * numsuc) cur = data[off:off + s] sucs = _unpack(_fm0node * numsuc, cur) off += s # read metadata # (metadata will be decoded on demand) metadata = data[off:off + mdsize] if len(metadata) != mdsize: raise error.Abort(_('parsing obsolete marker: metadata is too ' 'short, %d bytes expected, got %d') % (mdsize, len(metadata))) off += mdsize metadata = _fm0decodemeta(metadata) try: when, offset = metadata.pop('date', '0 0').split(' ') date = float(when), int(offset) except ValueError: date = (0., 0) parents = None if 'p2' in metadata: parents = (metadata.pop('p1', None), metadata.pop('p2', None)) elif 'p1' in metadata: parents = (metadata.pop('p1', None),) elif 'p0' in metadata: parents = () if parents is not None: try: parents = tuple(node.bin(p) for p in parents) # if parent content is not a nodeid, drop the data for p in parents: if len(p) != 20: parents = None break except TypeError: # if content cannot be translated to nodeid drop the data. parents = None metadata = tuple(sorted(metadata.iteritems())) yield (pre, sucs, flags, metadata, date, parents) def _fm0encodeonemarker(marker): pre, sucs, flags, metadata, date, parents = marker if flags & usingsha256: raise error.Abort(_('cannot handle sha256 with old obsstore format')) metadata = dict(metadata) time, tz = date metadata['date'] = '%r %i' % (time, tz) if parents is not None: if not parents: # mark that we explicitly recorded no parents metadata['p0'] = '' for i, p in enumerate(parents): metadata['p%i' % (i + 1)] = node.hex(p) metadata = _fm0encodemeta(metadata) numsuc = len(sucs) format = _fm0fixed + (_fm0node * numsuc) data = [numsuc, len(metadata), flags, pre] data.extend(sucs) return _pack(format, *data) + metadata def _fm0encodemeta(meta): """Return encoded metadata string to string mapping. Assume no ':' in key and no '\0' in both key and value.""" for key, value in meta.iteritems(): if ':' in key or '\0' in key: raise ValueError("':' and '\0' are forbidden in metadata key'") if '\0' in value: raise ValueError("':' is forbidden in metadata value'") return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)]) def _fm0decodemeta(data): """Return string to string dictionary from encoded version.""" d = {} for l in data.split('\0'): if l: key, value = l.split(':') d[key] = value return d ## Parsing and writing of version "1" # # The header is followed by the markers. Each marker is made of: # # - uint32: total size of the marker (including this field) # # - float64: date in seconds since epoch # # - int16: timezone offset in minutes # # - uint16: a bit field. It is reserved for flags used in common # obsolete marker operations, to avoid repeated decoding of metadata # entries. # # - uint8: number of successors "N", can be zero. # # - uint8: number of parents "P", can be zero. # # 0: parents data stored but no parent, # 1: one parent stored, # 2: two parents stored, # 3: no parent data stored # # - uint8: number of metadata entries M # # - 20 or 32 bytes: precursor changeset identifier. # # - N*(20 or 32) bytes: successors changesets identifiers. # # - P*(20 or 32) bytes: parents of the precursors changesets. # # - M*(uint8, uint8): size of all metadata entries (key and value) # # - remaining bytes: the metadata, each (key, value) pair after the other. _fm1version = 1 _fm1fixed = '>IdhHBBB20s' _fm1nodesha1 = '20s' _fm1nodesha256 = '32s' _fm1nodesha1size = _calcsize(_fm1nodesha1) _fm1nodesha256size = _calcsize(_fm1nodesha256) _fm1fsize = _calcsize(_fm1fixed) _fm1parentnone = 3 _fm1parentshift = 14 _fm1parentmask = (_fm1parentnone << _fm1parentshift) _fm1metapair = 'BB' _fm1metapairsize = _calcsize('BB') def _fm1purereadmarkers(data, off): # make some global constants local for performance noneflag = _fm1parentnone sha2flag = usingsha256 sha1size = _fm1nodesha1size sha2size = _fm1nodesha256size sha1fmt = _fm1nodesha1 sha2fmt = _fm1nodesha256 metasize = _fm1metapairsize metafmt = _fm1metapair fsize = _fm1fsize unpack = _unpack # Loop on markers stop = len(data) - _fm1fsize ufixed = struct.Struct(_fm1fixed).unpack while off <= stop: # read fixed part o1 = off + fsize t, secs, tz, flags, numsuc, numpar, nummeta, prec = ufixed(data[off:o1]) if flags & sha2flag: # FIXME: prec was read as a SHA1, needs to be amended # read 0 or more successors if numsuc == 1: o2 = o1 + sha2size sucs = (data[o1:o2],) else: o2 = o1 + sha2size * numsuc sucs = unpack(sha2fmt * numsuc, data[o1:o2]) # read parents if numpar == noneflag: o3 = o2 parents = None elif numpar == 1: o3 = o2 + sha2size parents = (data[o2:o3],) else: o3 = o2 + sha2size * numpar parents = unpack(sha2fmt * numpar, data[o2:o3]) else: # read 0 or more successors if numsuc == 1: o2 = o1 + sha1size sucs = (data[o1:o2],) else: o2 = o1 + sha1size * numsuc sucs = unpack(sha1fmt * numsuc, data[o1:o2]) # read parents if numpar == noneflag: o3 = o2 parents = None elif numpar == 1: o3 = o2 + sha1size parents = (data[o2:o3],) else: o3 = o2 + sha1size * numpar parents = unpack(sha1fmt * numpar, data[o2:o3]) # read metadata off = o3 + metasize * nummeta metapairsize = unpack('>' + (metafmt * nummeta), data[o3:off]) metadata = [] for idx in xrange(0, len(metapairsize), 2): o1 = off + metapairsize[idx] o2 = o1 + metapairsize[idx + 1] metadata.append((data[off:o1], data[o1:o2])) off = o2 yield (prec, sucs, flags, tuple(metadata), (secs, tz * 60), parents) def _fm1encodeonemarker(marker): pre, sucs, flags, metadata, date, parents = marker # determine node size _fm1node = _fm1nodesha1 if flags & usingsha256: _fm1node = _fm1nodesha256 numsuc = len(sucs) numextranodes = numsuc if parents is None: numpar = _fm1parentnone else: numpar = len(parents) numextranodes += numpar formatnodes = _fm1node * numextranodes formatmeta = _fm1metapair * len(metadata) format = _fm1fixed + formatnodes + formatmeta # tz is stored in minutes so we divide by 60 tz = date[1]//60 data = [None, date[0], tz, flags, numsuc, numpar, len(metadata), pre] data.extend(sucs) if parents is not None: data.extend(parents) totalsize = _calcsize(format) for key, value in metadata: lk = len(key) lv = len(value) data.append(lk) data.append(lv) totalsize += lk + lv data[0] = totalsize data = [_pack(format, *data)] for key, value in metadata: data.append(key) data.append(value) return ''.join(data) def _fm1readmarkers(data, off): native = getattr(parsers, 'fm1readmarkers', None) if not native: return _fm1purereadmarkers(data, off) stop = len(data) - _fm1fsize return native(data, off, stop) # mapping to read/write various marker formats # -> (decoder, encoder) formats = {_fm0version: (_fm0readmarkers, _fm0encodeonemarker), _fm1version: (_fm1readmarkers, _fm1encodeonemarker)} @util.nogc def _readmarkers(data): """Read and enumerate markers from raw data""" off = 0 diskversion = _unpack('>B', data[off:off + 1])[0] off += 1 if diskversion not in formats: raise error.Abort(_('parsing obsolete marker: unknown version %r') % diskversion) return diskversion, formats[diskversion][0](data, off) def encodemarkers(markers, addheader=False, version=_fm0version): # Kept separate from flushmarkers(), it will be reused for # markers exchange. encodeone = formats[version][1] if addheader: yield _pack('>B', version) for marker in markers: yield encodeone(marker) class marker(object): """Wrap obsolete marker raw data""" def __init__(self, repo, data): # the repo argument will be used to create changectx in later version self._repo = repo self._data = data self._decodedmeta = None def __hash__(self): return hash(self._data) def __eq__(self, other): if type(other) != type(self): return False return self._data == other._data def precnode(self): """Precursor changeset node identifier""" return self._data[0] def succnodes(self): """List of successor changesets node identifiers""" return self._data[1] def parentnodes(self): """Parents of the precursors (None if not recorded)""" return self._data[5] def metadata(self): """Decoded metadata dictionary""" return dict(self._data[3]) def date(self): """Creation date as (unixtime, offset)""" return self._data[4] def flags(self): """The flags field of the marker""" return self._data[2] @util.nogc def _addsuccessors(successors, markers): for mark in markers: successors.setdefault(mark[0], set()).add(mark) @util.nogc def _addprecursors(precursors, markers): for mark in markers: for suc in mark[1]: precursors.setdefault(suc, set()).add(mark) @util.nogc def _addchildren(children, markers): for mark in markers: parents = mark[5] if parents is not None: for p in parents: children.setdefault(p, set()).add(mark) def _checkinvalidmarkers(markers): """search for marker with invalid data and raise error if needed Exist as a separated function to allow the evolve extension for a more subtle handling. """ for mark in markers: if node.nullid in mark[1]: raise error.Abort(_('bad obsolescence marker detected: ' 'invalid successors nullid')) class obsstore(object): """Store obsolete markers Markers can be accessed with two mappings: - precursors[x] -> set(markers on precursors edges of x) - successors[x] -> set(markers on successors edges of x) - children[x] -> set(markers on precursors edges of children(x) """ fields = ('prec', 'succs', 'flag', 'meta', 'date', 'parents') # prec: nodeid, precursor changesets # succs: tuple of nodeid, successor changesets (0-N length) # flag: integer, flag field carrying modifier for the markers (see doc) # meta: binary blob, encoded metadata dictionary # date: (float, int) tuple, date of marker creation # parents: (tuple of nodeid) or None, parents of precursors # None is used when no data has been recorded def __init__(self, svfs, defaultformat=_fm1version, readonly=False): # caches for various obsolescence related cache self.caches = {} self.svfs = svfs self._version = defaultformat self._readonly = readonly def __iter__(self): return iter(self._all) def __len__(self): return len(self._all) def __nonzero__(self): if not self._cached('_all'): try: return self.svfs.stat('obsstore').st_size > 1 except OSError as inst: if inst.errno != errno.ENOENT: raise # just build an empty _all list if no obsstore exists, which # avoids further stat() syscalls pass return bool(self._all) @property def readonly(self): """True if marker creation is disabled Remove me in the future when obsolete marker is always on.""" return self._readonly def create(self, transaction, prec, succs=(), flag=0, parents=None, date=None, metadata=None): """obsolete: add a new obsolete marker * ensuring it is hashable * check mandatory metadata * encode metadata If you are a human writing code creating marker you want to use the `createmarkers` function in this module instead. return True if a new marker have been added, False if the markers already existed (no op). """ if metadata is None: metadata = {} if date is None: if 'date' in metadata: # as a courtesy for out-of-tree extensions date = util.parsedate(metadata.pop('date')) else: date = util.makedate() if len(prec) != 20: raise ValueError(prec) for succ in succs: if len(succ) != 20: raise ValueError(succ) if prec in succs: raise ValueError(_('in-marker cycle with %s') % node.hex(prec)) metadata = tuple(sorted(metadata.iteritems())) marker = (str(prec), tuple(succs), int(flag), metadata, date, parents) return bool(self.add(transaction, [marker])) def add(self, transaction, markers): """Add new markers to the store Take care of filtering duplicate. Return the number of new marker.""" if self._readonly: raise error.Abort('creating obsolete markers is not enabled on ' 'this repo') known = set(self._all) new = [] for m in markers: if m not in known: known.add(m) new.append(m) if new: f = self.svfs('obsstore', 'ab') try: offset = f.tell() transaction.add('obsstore', offset) # offset == 0: new file - add the version header for bytes in encodemarkers(new, offset == 0, self._version): f.write(bytes) finally: # XXX: f.close() == filecache invalidation == obsstore rebuilt. # call 'filecacheentry.refresh()' here f.close() self._addmarkers(new) # new marker *may* have changed several set. invalidate the cache. self.caches.clear() # records the number of new markers for the transaction hooks previous = int(transaction.hookargs.get('new_obsmarkers', '0')) transaction.hookargs['new_obsmarkers'] = str(previous + len(new)) return len(new) def mergemarkers(self, transaction, data): """merge a binary stream of markers inside the obsstore Returns the number of new markers added.""" version, markers = _readmarkers(data) return self.add(transaction, markers) @propertycache def _all(self): data = self.svfs.tryread('obsstore') if not data: return [] self._version, markers = _readmarkers(data) markers = list(markers) _checkinvalidmarkers(markers) return markers @propertycache def successors(self): successors = {} _addsuccessors(successors, self._all) return successors @propertycache def precursors(self): precursors = {} _addprecursors(precursors, self._all) return precursors @propertycache def children(self): children = {} _addchildren(children, self._all) return children def _cached(self, attr): return attr in self.__dict__ def _addmarkers(self, markers): markers = list(markers) # to allow repeated iteration self._all.extend(markers) if self._cached('successors'): _addsuccessors(self.successors, markers) if self._cached('precursors'): _addprecursors(self.precursors, markers) if self._cached('children'): _addchildren(self.children, markers) _checkinvalidmarkers(markers) def relevantmarkers(self, nodes): """return a set of all obsolescence markers relevant to a set of nodes. "relevant" to a set of nodes mean: - marker that use this changeset as successor - prune marker of direct children on this changeset - recursive application of the two rules on precursors of these markers It is a set so you cannot rely on order.""" pendingnodes = set(nodes) seenmarkers = set() seennodes = set(pendingnodes) precursorsmarkers = self.precursors children = self.children while pendingnodes: direct = set() for current in pendingnodes: direct.update(precursorsmarkers.get(current, ())) pruned = [m for m in children.get(current, ()) if not m[1]] direct.update(pruned) direct -= seenmarkers pendingnodes = set([m[0] for m in direct]) seenmarkers |= direct pendingnodes -= seennodes seennodes |= pendingnodes return seenmarkers def commonversion(versions): """Return the newest version listed in both versions and our local formats. Returns None if no common version exists. """ versions.sort(reverse=True) # search for highest version known on both side for v in versions: if v in formats: return v return None # arbitrary picked to fit into 8K limit from HTTP server # you have to take in account: # - the version header # - the base85 encoding _maxpayload = 5300 def _pushkeyescape(markers): """encode markers into a dict suitable for pushkey exchange - binary data is base85 encoded - split in chunks smaller than 5300 bytes""" keys = {} parts = [] currentlen = _maxpayload * 2 # ensure we create a new part for marker in markers: nextdata = _fm0encodeonemarker(marker) if (len(nextdata) + currentlen > _maxpayload): currentpart = [] currentlen = 0 parts.append(currentpart) currentpart.append(nextdata) currentlen += len(nextdata) for idx, part in enumerate(reversed(parts)): data = ''.join([_pack('>B', _fm0version)] + part) keys['dump%i' % idx] = base85.b85encode(data) return keys def listmarkers(repo): """List markers over pushkey""" if not repo.obsstore: return {} return _pushkeyescape(sorted(repo.obsstore)) def pushmarker(repo, key, old, new): """Push markers over pushkey""" if not key.startswith('dump'): repo.ui.warn(_('unknown key: %r') % key) return 0 if old: repo.ui.warn(_('unexpected old value for %r') % key) return 0 data = base85.b85decode(new) lock = repo.lock() try: tr = repo.transaction('pushkey: obsolete markers') try: repo.obsstore.mergemarkers(tr, data) tr.close() return 1 finally: tr.release() finally: lock.release() def getmarkers(repo, nodes=None): """returns markers known in a repository If is specified, only markers "relevant" to those nodes are are returned""" if nodes is None: rawmarkers = repo.obsstore else: rawmarkers = repo.obsstore.relevantmarkers(nodes) for markerdata in rawmarkers: yield marker(repo, markerdata) def relevantmarkers(repo, node): """all obsolete markers relevant to some revision""" for markerdata in repo.obsstore.relevantmarkers(node): yield marker(repo, markerdata) def precursormarkers(ctx): """obsolete marker marking this changeset as a successors""" for data in ctx.repo().obsstore.precursors.get(ctx.node(), ()): yield marker(ctx.repo(), data) def successormarkers(ctx): """obsolete marker making this changeset obsolete""" for data in ctx.repo().obsstore.successors.get(ctx.node(), ()): yield marker(ctx.repo(), data) def allsuccessors(obsstore, nodes, ignoreflags=0): """Yield node for every successor of . Some successors may be unknown locally. This is a linear yield unsuited to detecting split changesets. It includes initial nodes too.""" remaining = set(nodes) seen = set(remaining) while remaining: current = remaining.pop() yield current for mark in obsstore.successors.get(current, ()): # ignore marker flagged with specified flag if mark[2] & ignoreflags: continue for suc in mark[1]: if suc not in seen: seen.add(suc) remaining.add(suc) def allprecursors(obsstore, nodes, ignoreflags=0): """Yield node for every precursors of . Some precursors may be unknown locally. This is a linear yield unsuited to detecting folded changesets. It includes initial nodes too.""" remaining = set(nodes) seen = set(remaining) while remaining: current = remaining.pop() yield current for mark in obsstore.precursors.get(current, ()): # ignore marker flagged with specified flag if mark[2] & ignoreflags: continue suc = mark[0] if suc not in seen: seen.add(suc) remaining.add(suc) def foreground(repo, nodes): """return all nodes in the "foreground" of other node The foreground of a revision is anything reachable using parent -> children or precursor -> successor relation. It is very similar to "descendant" but augmented with obsolescence information. Beware that possible obsolescence cycle may result if complex situation. """ repo = repo.unfiltered() foreground = set(repo.set('%ln::', nodes)) if repo.obsstore: # We only need this complicated logic if there is obsolescence # XXX will probably deserve an optimised revset. nm = repo.changelog.nodemap plen = -1 # compute the whole set of successors or descendants while len(foreground) != plen: plen = len(foreground) succs = set(c.node() for c in foreground) mutable = [c.node() for c in foreground if c.mutable()] succs.update(allsuccessors(repo.obsstore, mutable)) known = (n for n in succs if n in nm) foreground = set(repo.set('%ln::', known)) return set(c.node() for c in foreground) def successorssets(repo, initialnode, cache=None): """Return set of all latest successors of initial nodes The successors set of a changeset A are the group of revisions that succeed A. It succeeds A as a consistent whole, each revision being only a partial replacement. The successors set contains non-obsolete changesets only. This function returns the full list of successor sets which is why it returns a list of tuples and not just a single tuple. Each tuple is a valid successors set. Note that (A,) may be a valid successors set for changeset A (see below). In most cases, a changeset A will have a single element (e.g. the changeset A is replaced by A') in its successors set. Though, it is also common for a changeset A to have no elements in its successor set (e.g. the changeset has been pruned). Therefore, the returned list of successors sets will be [(A',)] or [], respectively. When a changeset A is split into A' and B', however, it will result in a successors set containing more than a single element, i.e. [(A',B')]. Divergent changesets will result in multiple successors sets, i.e. [(A',), (A'')]. If a changeset A is not obsolete, then it will conceptually have no successors set. To distinguish this from a pruned changeset, the successor set will contain itself only, i.e. [(A,)]. Finally, successors unknown locally are considered to be pruned (obsoleted without any successors). The optional `cache` parameter is a dictionary that may contain precomputed successors sets. It is meant to reuse the computation of a previous call to `successorssets` when multiple calls are made at the same time. The cache dictionary is updated in place. The caller is responsible for its life span. Code that makes multiple calls to `successorssets` *must* use this cache mechanism or suffer terrible performance. """ succmarkers = repo.obsstore.successors # Stack of nodes we search successors sets for toproceed = [initialnode] # set version of above list for fast loop detection # element added to "toproceed" must be added here stackedset = set(toproceed) if cache is None: cache = {} # This while loop is the flattened version of a recursive search for # successors sets # # def successorssets(x): # successors = directsuccessors(x) # ss = [[]] # for succ in directsuccessors(x): # # product as in itertools cartesian product # ss = product(ss, successorssets(succ)) # return ss # # But we can not use plain recursive calls here: # - that would blow the python call stack # - obsolescence markers may have cycles, we need to handle them. # # The `toproceed` list act as our call stack. Every node we search # successors set for are stacked there. # # The `stackedset` is set version of this stack used to check if a node is # already stacked. This check is used to detect cycles and prevent infinite # loop. # # successors set of all nodes are stored in the `cache` dictionary. # # After this while loop ends we use the cache to return the successors sets # for the node requested by the caller. while toproceed: # Every iteration tries to compute the successors sets of the topmost # node of the stack: CURRENT. # # There are four possible outcomes: # # 1) We already know the successors sets of CURRENT: # -> mission accomplished, pop it from the stack. # 2) Node is not obsolete: # -> the node is its own successors sets. Add it to the cache. # 3) We do not know successors set of direct successors of CURRENT: # -> We add those successors to the stack. # 4) We know successors sets of all direct successors of CURRENT: # -> We can compute CURRENT successors set and add it to the # cache. # current = toproceed[-1] if current in cache: # case (1): We already know the successors sets stackedset.remove(toproceed.pop()) elif current not in succmarkers: # case (2): The node is not obsolete. if current in repo: # We have a valid last successors. cache[current] = [(current,)] else: # Final obsolete version is unknown locally. # Do not count that as a valid successors cache[current] = [] else: # cases (3) and (4) # # We proceed in two phases. Phase 1 aims to distinguish case (3) # from case (4): # # For each direct successors of CURRENT, we check whether its # successors sets are known. If they are not, we stack the # unknown node and proceed to the next iteration of the while # loop. (case 3) # # During this step, we may detect obsolescence cycles: a node # with unknown successors sets but already in the call stack. # In such a situation, we arbitrary set the successors sets of # the node to nothing (node pruned) to break the cycle. # # If no break was encountered we proceed to phase 2. # # Phase 2 computes successors sets of CURRENT (case 4); see details # in phase 2 itself. # # Note the two levels of iteration in each phase. # - The first one handles obsolescence markers using CURRENT as # precursor (successors markers of CURRENT). # # Having multiple entry here means divergence. # # - The second one handles successors defined in each marker. # # Having none means pruned node, multiple successors means split, # single successors are standard replacement. # for mark in sorted(succmarkers[current]): for suc in mark[1]: if suc not in cache: if suc in stackedset: # cycle breaking cache[suc] = [] else: # case (3) If we have not computed successors sets # of one of those successors we add it to the # `toproceed` stack and stop all work for this # iteration. toproceed.append(suc) stackedset.add(suc) break else: continue break else: # case (4): we know all successors sets of all direct # successors # # Successors set contributed by each marker depends on the # successors sets of all its "successors" node. # # Each different marker is a divergence in the obsolescence # history. It contributes successors sets distinct from other # markers. # # Within a marker, a successor may have divergent successors # sets. In such a case, the marker will contribute multiple # divergent successors sets. If multiple successors have # divergent successors sets, a Cartesian product is used. # # At the end we post-process successors sets to remove # duplicated entry and successors set that are strict subset of # another one. succssets = [] for mark in sorted(succmarkers[current]): # successors sets contributed by this marker markss = [[]] for suc in mark[1]: # cardinal product with previous successors productresult = [] for prefix in markss: for suffix in cache[suc]: newss = list(prefix) for part in suffix: # do not duplicated entry in successors set # first entry wins. if part not in newss: newss.append(part) productresult.append(newss) markss = productresult succssets.extend(markss) # remove duplicated and subset seen = [] final = [] candidate = sorted(((set(s), s) for s in succssets if s), key=lambda x: len(x[1]), reverse=True) for setversion, listversion in candidate: for seenset in seen: if setversion.issubset(seenset): break else: final.append(listversion) seen.append(setversion) final.reverse() # put small successors set first cache[current] = final return cache[initialnode] # mapping of 'set-name' -> cachefuncs = {} def cachefor(name): """Decorator to register a function as computing the cache for a set""" def decorator(func): assert name not in cachefuncs cachefuncs[name] = func return func return decorator def getrevs(repo, name): """Return the set of revision that belong to the set Such access may compute the set and cache it for future use""" repo = repo.unfiltered() if not repo.obsstore: return frozenset() if name not in repo.obsstore.caches: repo.obsstore.caches[name] = cachefuncs[name](repo) return repo.obsstore.caches[name] # To be simple we need to invalidate obsolescence cache when: # # - new changeset is added: # - public phase is changed # - obsolescence marker are added # - strip is used a repo def clearobscaches(repo): """Remove all obsolescence related cache from a repo This remove all cache in obsstore is the obsstore already exist on the repo. (We could be smarter here given the exact event that trigger the cache clearing)""" # only clear cache is there is obsstore data in this repo if 'obsstore' in repo._filecache: repo.obsstore.caches.clear() @cachefor('obsolete') def _computeobsoleteset(repo): """the set of obsolete revisions""" obs = set() getnode = repo.changelog.node notpublic = repo.revs("not public()") for r in notpublic: if getnode(r) in repo.obsstore.successors: obs.add(r) return obs @cachefor('unstable') def _computeunstableset(repo): """the set of non obsolete revisions with obsolete parents""" revs = [(ctx.rev(), ctx) for ctx in repo.set('(not public()) and (not obsolete())')] revs.sort(key=lambda x:x[0]) unstable = set() for rev, ctx in revs: # A rev is unstable if one of its parent is obsolete or unstable # this works since we traverse following growing rev order if any((x.obsolete() or (x.rev() in unstable)) for x in ctx.parents()): unstable.add(rev) return unstable @cachefor('suspended') def _computesuspendedset(repo): """the set of obsolete parents with non obsolete descendants""" suspended = repo.changelog.ancestors(getrevs(repo, 'unstable')) return set(r for r in getrevs(repo, 'obsolete') if r in suspended) @cachefor('extinct') def _computeextinctset(repo): """the set of obsolete parents without non obsolete descendants""" return getrevs(repo, 'obsolete') - getrevs(repo, 'suspended') @cachefor('bumped') def _computebumpedset(repo): """the set of revs trying to obsolete public revisions""" bumped = set() # util function (avoid attribute lookup in the loop) phase = repo._phasecache.phase # would be faster to grab the full list public = phases.public cl = repo.changelog torev = cl.nodemap.get for ctx in repo.set('(not public()) and (not obsolete())'): rev = ctx.rev() # We only evaluate mutable, non-obsolete revision node = ctx.node() # (future) A cache of precursors may worth if split is very common for pnode in allprecursors(repo.obsstore, [node], ignoreflags=bumpedfix): prev = torev(pnode) # unfiltered! but so is phasecache if (prev is not None) and (phase(repo, prev) <= public): # we have a public precursors bumped.add(rev) break # Next draft! return bumped @cachefor('divergent') def _computedivergentset(repo): """the set of rev that compete to be the final successors of some revision. """ divergent = set() obsstore = repo.obsstore newermap = {} for ctx in repo.set('(not public()) - obsolete()'): mark = obsstore.precursors.get(ctx.node(), ()) toprocess = set(mark) seen = set() while toprocess: prec = toprocess.pop()[0] if prec in seen: continue # emergency cycle hanging prevention seen.add(prec) if prec not in newermap: successorssets(repo, prec, newermap) newer = [n for n in newermap[prec] if n] if len(newer) > 1: divergent.add(ctx.rev()) break toprocess.update(obsstore.precursors.get(prec, ())) return divergent def createmarkers(repo, relations, flag=0, date=None, metadata=None): """Add obsolete markers between changesets in a repo must be an iterable of (, (, ...)[,{metadata}]) tuple. `old` and `news` are changectx. metadata is an optional dictionary containing metadata for this marker only. It is merged with the global metadata specified through the `metadata` argument of this function, Trying to obsolete a public changeset will raise an exception. Current user and date are used except if specified otherwise in the metadata attribute. This function operates within a transaction of its own, but does not take any lock on the repo. """ # prepare metadata if metadata is None: metadata = {} if 'user' not in metadata: metadata['user'] = repo.ui.username() tr = repo.transaction('add-obsolescence-marker') try: markerargs = [] for rel in relations: prec = rel[0] sucs = rel[1] localmetadata = metadata.copy() if 2 < len(rel): localmetadata.update(rel[2]) if not prec.mutable(): raise error.Abort("cannot obsolete public changeset: %s" % prec, hint='see "hg help phases" for details') nprec = prec.node() nsucs = tuple(s.node() for s in sucs) npare = None if not nsucs: npare = tuple(p.node() for p in prec.parents()) if nprec in nsucs: raise error.Abort("changeset %s cannot obsolete itself" % prec) # Creating the marker causes the hidden cache to become invalid, # which causes recomputation when we ask for prec.parents() above. # Resulting in n^2 behavior. So let's prepare all of the args # first, then create the markers. markerargs.append((nprec, nsucs, npare, localmetadata)) for args in markerargs: nprec, nsucs, npare, localmetadata = args repo.obsstore.create(tr, nprec, nsucs, flag, parents=npare, date=date, metadata=localmetadata) repo.filteredrevcache.clear() tr.close() finally: tr.release() def isenabled(repo, option): """Returns True if the given repository has the given obsolete option enabled. """ result = set(repo.ui.configlist('experimental', 'evolution')) if 'all' in result: return True # For migration purposes, temporarily return true if the config hasn't been # set but _enabled is true. if len(result) == 0 and _enabled: return True # createmarkers must be enabled if other options are enabled if ((allowunstableopt in result or exchangeopt in result) and not createmarkersopt in result): raise error.Abort(_("'createmarkers' obsolete option must be enabled " "if other obsolete options are enabled")) return option in result mercurial-3.7.3/mercurial/windows.py0000644000175000017500000003422312676531525017162 0ustar mpmmpm00000000000000# windows.py - Windows utility function implementations for Mercurial # # Copyright 2005-2009 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import _winreg import errno import msvcrt import os import re import stat import sys from .i18n import _ from . import ( encoding, osutil, win32, ) executablepath = win32.executablepath getuser = win32.getuser hidewindow = win32.hidewindow makedir = win32.makedir nlinks = win32.nlinks oslink = win32.oslink samedevice = win32.samedevice samefile = win32.samefile setsignalhandler = win32.setsignalhandler spawndetached = win32.spawndetached split = os.path.split termwidth = win32.termwidth testpid = win32.testpid unlink = win32.unlink umask = 0o022 class mixedfilemodewrapper(object): """Wraps a file handle when it is opened in read/write mode. fopen() and fdopen() on Windows have a specific-to-Windows requirement that files opened with mode r+, w+, or a+ make a call to a file positioning function when switching between reads and writes. Without this extra call, Python will raise a not very intuitive "IOError: [Errno 0] Error." This class wraps posixfile instances when the file is opened in read/write mode and automatically adds checks or inserts appropriate file positioning calls when necessary. """ OPNONE = 0 OPREAD = 1 OPWRITE = 2 def __init__(self, fp): object.__setattr__(self, '_fp', fp) object.__setattr__(self, '_lastop', 0) def __getattr__(self, name): return getattr(self._fp, name) def __setattr__(self, name, value): return self._fp.__setattr__(name, value) def _noopseek(self): self._fp.seek(0, os.SEEK_CUR) def seek(self, *args, **kwargs): object.__setattr__(self, '_lastop', self.OPNONE) return self._fp.seek(*args, **kwargs) def write(self, d): if self._lastop == self.OPREAD: self._noopseek() object.__setattr__(self, '_lastop', self.OPWRITE) return self._fp.write(d) def writelines(self, *args, **kwargs): if self._lastop == self.OPREAD: self._noopeseek() object.__setattr__(self, '_lastop', self.OPWRITE) return self._fp.writelines(*args, **kwargs) def read(self, *args, **kwargs): if self._lastop == self.OPWRITE: self._noopseek() object.__setattr__(self, '_lastop', self.OPREAD) return self._fp.read(*args, **kwargs) def readline(self, *args, **kwargs): if self._lastop == self.OPWRITE: self._noopseek() object.__setattr__(self, '_lastop', self.OPREAD) return self._fp.readline(*args, **kwargs) def readlines(self, *args, **kwargs): if self._lastop == self.OPWRITE: self._noopseek() object.__setattr__(self, '_lastop', self.OPREAD) return self._fp.readlines(*args, **kwargs) def posixfile(name, mode='r', buffering=-1): '''Open a file with even more POSIX-like semantics''' try: fp = osutil.posixfile(name, mode, buffering) # may raise WindowsError # The position when opening in append mode is implementation defined, so # make it consistent with other platforms, which position at EOF. if 'a' in mode: fp.seek(0, os.SEEK_END) if '+' in mode: return mixedfilemodewrapper(fp) return fp except WindowsError as err: # convert to a friendlier exception raise IOError(err.errno, '%s: %s' % (name, err.strerror)) class winstdout(object): '''stdout on windows misbehaves if sent through a pipe''' def __init__(self, fp): self.fp = fp def __getattr__(self, key): return getattr(self.fp, key) def close(self): try: self.fp.close() except IOError: pass def write(self, s): try: # This is workaround for "Not enough space" error on # writing large size of data to console. limit = 16000 l = len(s) start = 0 self.softspace = 0 while start < l: end = start + limit self.fp.write(s[start:end]) start = end except IOError as inst: if inst.errno != 0: raise self.close() raise IOError(errno.EPIPE, 'Broken pipe') def flush(self): try: return self.fp.flush() except IOError as inst: if inst.errno != errno.EINVAL: raise self.close() raise IOError(errno.EPIPE, 'Broken pipe') sys.__stdout__ = sys.stdout = winstdout(sys.stdout) def _is_win_9x(): '''return true if run on windows 95, 98 or me.''' try: return sys.getwindowsversion()[3] == 1 except AttributeError: return 'command' in os.environ.get('comspec', '') def openhardlinks(): return not _is_win_9x() def parsepatchoutput(output_line): """parses the output produced by patch and returns the filename""" pf = output_line[14:] if pf[0] == '`': pf = pf[1:-1] # Remove the quotes return pf def sshargs(sshcmd, host, user, port): '''Build argument list for ssh or Plink''' pflag = 'plink' in sshcmd.lower() and '-P' or '-p' args = user and ("%s@%s" % (user, host)) or host return port and ("%s %s %s" % (args, pflag, port)) or args def setflags(f, l, x): pass def copymode(src, dst, mode=None): pass def checkexec(path): return False def checklink(path): return False def setbinary(fd): # When run without console, pipes may expose invalid # fileno(), usually set to -1. fno = getattr(fd, 'fileno', None) if fno is not None and fno() >= 0: msvcrt.setmode(fno(), os.O_BINARY) def pconvert(path): return path.replace(os.sep, '/') def localpath(path): return path.replace('/', '\\') def normpath(path): return pconvert(os.path.normpath(path)) def normcase(path): return encoding.upper(path) # NTFS compares via upper() # see posix.py for definitions normcasespec = encoding.normcasespecs.upper normcasefallback = encoding.upperfallback def samestat(s1, s2): return False # A sequence of backslashes is special iff it precedes a double quote: # - if there's an even number of backslashes, the double quote is not # quoted (i.e. it ends the quoted region) # - if there's an odd number of backslashes, the double quote is quoted # - in both cases, every pair of backslashes is unquoted into a single # backslash # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx ) # So, to quote a string, we must surround it in double quotes, double # the number of backslashes that precede double quotes and add another # backslash before every double quote (being careful with the double # quote we've appended to the end) _quotere = None _needsshellquote = None def shellquote(s): r""" >>> shellquote(r'C:\Users\xyz') '"C:\\Users\\xyz"' >>> shellquote(r'C:\Users\xyz/mixed') '"C:\\Users\\xyz/mixed"' >>> # Would be safe not to quote too, since it is all double backslashes >>> shellquote(r'C:\\Users\\xyz') '"C:\\\\Users\\\\xyz"' >>> # But this must be quoted >>> shellquote(r'C:\\Users\\xyz/abc') '"C:\\\\Users\\\\xyz/abc"' """ global _quotere if _quotere is None: _quotere = re.compile(r'(\\*)("|\\$)') global _needsshellquote if _needsshellquote is None: # ":" is also treated as "safe character", because it is used as a part # of path name on Windows. "\" is also part of a path name, but isn't # safe because shlex.split() (kind of) treats it as an escape char and # drops it. It will leave the next character, even if it is another # "\". _needsshellquote = re.compile(r'[^a-zA-Z0-9._:/-]').search if s and not _needsshellquote(s) and not _quotere.search(s): # "s" shouldn't have to be quoted return s return '"%s"' % _quotere.sub(r'\1\1\\\2', s) def quotecommand(cmd): """Build a command string suitable for os.popen* calls.""" if sys.version_info < (2, 7, 1): # Python versions since 2.7.1 do this extra quoting themselves return '"' + cmd + '"' return cmd def popen(command, mode='r'): # Work around "popen spawned process may not write to stdout # under windows" # http://bugs.python.org/issue1366 command += " 2> %s" % os.devnull return os.popen(quotecommand(command), mode) def explainexit(code): return _("exited with status %d") % code, code # if you change this stub into a real check, please try to implement the # username and groupname functions above, too. def isowner(st): return True def findexe(command): '''Find executable for command searching like cmd.exe does. If command is a basename then PATH is searched for command. PATH isn't searched if command is an absolute or relative path. An extension from PATHEXT is found and added if not present. If command isn't found None is returned.''' pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD') pathexts = [ext for ext in pathext.lower().split(os.pathsep)] if os.path.splitext(command)[1].lower() in pathexts: pathexts = [''] def findexisting(pathcommand): 'Will append extension (if needed) and return existing file' for ext in pathexts: executable = pathcommand + ext if os.path.exists(executable): return executable return None if os.sep in command: return findexisting(command) for path in os.environ.get('PATH', '').split(os.pathsep): executable = findexisting(os.path.join(path, command)) if executable is not None: return executable return findexisting(os.path.expanduser(os.path.expandvars(command))) _wantedkinds = set([stat.S_IFREG, stat.S_IFLNK]) def statfiles(files): '''Stat each file in files. Yield each stat, or None if a file does not exist or has a type we don't care about. Cluster and cache stat per directory to minimize number of OS stat calls.''' dircache = {} # dirname -> filename -> status | None if file does not exist getkind = stat.S_IFMT for nf in files: nf = normcase(nf) dir, base = os.path.split(nf) if not dir: dir = '.' cache = dircache.get(dir, None) if cache is None: try: dmap = dict([(normcase(n), s) for n, k, s in osutil.listdir(dir, True) if getkind(s.st_mode) in _wantedkinds]) except OSError as err: # Python >= 2.5 returns ENOENT and adds winerror field # EINVAL is raised if dir is not a directory. if err.errno not in (errno.ENOENT, errno.EINVAL, errno.ENOTDIR): raise dmap = {} cache = dircache.setdefault(dir, dmap) yield cache.get(base, None) def username(uid=None): """Return the name of the user with the given uid. If uid is None, return the name of the current user.""" return None def groupname(gid=None): """Return the name of the group with the given gid. If gid is None, return the name of the current group.""" return None def removedirs(name): """special version of os.removedirs that does not remove symlinked directories or junction points if they actually contain files""" if osutil.listdir(name): return os.rmdir(name) head, tail = os.path.split(name) if not tail: head, tail = os.path.split(head) while head and tail: try: if osutil.listdir(head): return os.rmdir(head) except (ValueError, OSError): break head, tail = os.path.split(head) def unlinkpath(f, ignoremissing=False): """unlink and remove the directory if it is empty""" try: unlink(f) except OSError as e: if not (ignoremissing and e.errno == errno.ENOENT): raise # try removing directories that might now be empty try: removedirs(os.path.dirname(f)) except OSError: pass def rename(src, dst): '''atomically rename file src to dst, replacing dst if it exists''' try: os.rename(src, dst) except OSError as e: if e.errno != errno.EEXIST: raise unlink(dst) os.rename(src, dst) def gethgcmd(): return [sys.executable] + sys.argv[:1] def groupmembers(name): # Don't support groups on Windows for now raise KeyError def isexec(f): return False class cachestat(object): def __init__(self, path): pass def cacheable(self): return False def lookupreg(key, valname=None, scope=None): ''' Look up a key/value name in the Windows registry. valname: value name. If unspecified, the default value for the key is used. scope: optionally specify scope for registry lookup, this can be a sequence of scopes to look up in order. Default (CURRENT_USER, LOCAL_MACHINE). ''' if scope is None: scope = (_winreg.HKEY_CURRENT_USER, _winreg.HKEY_LOCAL_MACHINE) elif not isinstance(scope, (list, tuple)): scope = (scope,) for s in scope: try: val = _winreg.QueryValueEx(_winreg.OpenKey(s, key), valname)[0] # never let a Unicode string escape into the wild return encoding.tolocal(val.encode('UTF-8')) except EnvironmentError: pass expandglobs = True def statislink(st): '''check whether a stat result is a symlink''' return False def statisexec(st): '''check whether a stat result is an executable file''' return False def poll(fds): # see posix.py for description raise NotImplementedError() def readpipe(pipe): """Read all available data from a pipe.""" chunks = [] while True: size = win32.peekpipe(pipe) if not size: break s = pipe.read(size) if not s: break chunks.append(s) return ''.join(chunks) mercurial-3.7.3/mercurial/hook.py0000644000175000017500000002026212676531525016426 0ustar mpmmpm00000000000000# hook.py - hook support for mercurial # # Copyright 2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import os import sys import time from .i18n import _ from . import ( demandimport, error, extensions, util, ) def _pythonhook(ui, repo, name, hname, funcname, args, throw): '''call python hook. hook is callable object, looked up as name in python module. if callable returns "true", hook fails, else passes. if hook raises exception, treated as hook failure. exception propagates if throw is "true". reason for "true" meaning "hook failed" is so that unmodified commands (e.g. mercurial.commands.update) can be run as hooks without wrappers to convert return values.''' if callable(funcname): obj = funcname funcname = obj.__module__ + "." + obj.__name__ else: d = funcname.rfind('.') if d == -1: raise error.HookLoadError( _('%s hook is invalid ("%s" not in a module)') % (hname, funcname)) modname = funcname[:d] oldpaths = sys.path if util.mainfrozen(): # binary installs require sys.path manipulation modpath, modfile = os.path.split(modname) if modpath and modfile: sys.path = sys.path[:] + [modpath] modname = modfile with demandimport.deactivated(): try: obj = __import__(modname) except ImportError: e1 = sys.exc_type, sys.exc_value, sys.exc_traceback try: # extensions are loaded with hgext_ prefix obj = __import__("hgext_%s" % modname) except ImportError: e2 = sys.exc_type, sys.exc_value, sys.exc_traceback if ui.tracebackflag: ui.warn(_('exception from first failed import ' 'attempt:\n')) ui.traceback(e1) if ui.tracebackflag: ui.warn(_('exception from second failed import ' 'attempt:\n')) ui.traceback(e2) raise error.HookLoadError( _('%s hook is invalid (import of "%s" failed)') % (hname, modname)) sys.path = oldpaths try: for p in funcname.split('.')[1:]: obj = getattr(obj, p) except AttributeError: raise error.HookLoadError( _('%s hook is invalid ("%s" is not defined)') % (hname, funcname)) if not callable(obj): raise error.HookLoadError( _('%s hook is invalid ("%s" is not callable)') % (hname, funcname)) ui.note(_("calling hook %s: %s\n") % (hname, funcname)) starttime = time.time() try: # redirect IO descriptors to the ui descriptors so hooks # that write directly to these don't mess up the command # protocol when running through the command server old = sys.stdout, sys.stderr, sys.stdin sys.stdout, sys.stderr, sys.stdin = ui.fout, ui.ferr, ui.fin r = obj(ui=ui, repo=repo, hooktype=name, **args) except Exception as exc: if isinstance(exc, error.Abort): ui.warn(_('error: %s hook failed: %s\n') % (hname, exc.args[0])) else: ui.warn(_('error: %s hook raised an exception: ' '%s\n') % (hname, exc)) if throw: raise ui.traceback() return True, True finally: sys.stdout, sys.stderr, sys.stdin = old duration = time.time() - starttime ui.log('pythonhook', 'pythonhook-%s: %s finished in %0.2f seconds\n', name, funcname, duration) if r: if throw: raise error.HookAbort(_('%s hook failed') % hname) ui.warn(_('warning: %s hook failed\n') % hname) return r, False def _exthook(ui, repo, name, cmd, args, throw): ui.note(_("running hook %s: %s\n") % (name, cmd)) starttime = time.time() env = {} # make in-memory changes visible to external process if repo is not None: tr = repo.currenttransaction() repo.dirstate.write(tr) if tr and tr.writepending(): env['HG_PENDING'] = repo.root for k, v in args.iteritems(): if callable(v): v = v() if isinstance(v, dict): # make the dictionary element order stable across Python # implementations v = ('{' + ', '.join('%r: %r' % i for i in sorted(v.iteritems())) + '}') env['HG_' + k.upper()] = v if repo: cwd = repo.root else: cwd = os.getcwd() r = ui.system(cmd, environ=env, cwd=cwd) duration = time.time() - starttime ui.log('exthook', 'exthook-%s: %s finished in %0.2f seconds\n', name, cmd, duration) if r: desc, r = util.explainexit(r) if throw: raise error.HookAbort(_('%s hook %s') % (name, desc)) ui.warn(_('warning: %s hook %s\n') % (name, desc)) return r def _allhooks(ui): hooks = [] for name, cmd in ui.configitems('hooks'): if not name.startswith('priority'): priority = ui.configint('hooks', 'priority.%s' % name, 0) hooks.append((-priority, len(hooks), name, cmd)) return [(k, v) for p, o, k, v in sorted(hooks)] _redirect = False def redirect(state): global _redirect _redirect = state def hook(ui, repo, name, throw=False, **args): if not ui.callhooks: return False hooks = [] for hname, cmd in _allhooks(ui): if hname.split('.')[0] == name and cmd: hooks.append((hname, cmd)) res = runhooks(ui, repo, name, hooks, throw=throw, **args) r = False for hname, cmd in hooks: r = res[hname][0] or r return r def runhooks(ui, repo, name, hooks, throw=False, **args): res = {} oldstdout = -1 try: for hname, cmd in hooks: if oldstdout == -1 and _redirect: try: stdoutno = sys.__stdout__.fileno() stderrno = sys.__stderr__.fileno() # temporarily redirect stdout to stderr, if possible if stdoutno >= 0 and stderrno >= 0: sys.__stdout__.flush() oldstdout = os.dup(stdoutno) os.dup2(stderrno, stdoutno) except (OSError, AttributeError): # files seem to be bogus, give up on redirecting (WSGI, etc) pass if callable(cmd): r, raised = _pythonhook(ui, repo, name, hname, cmd, args, throw) elif cmd.startswith('python:'): if cmd.count(':') >= 2: path, cmd = cmd[7:].rsplit(':', 1) path = util.expandpath(path) if repo: path = os.path.join(repo.root, path) try: mod = extensions.loadpath(path, 'hghook.%s' % hname) except Exception: ui.write(_("loading %s hook failed:\n") % hname) raise hookfn = getattr(mod, cmd) else: hookfn = cmd[7:].strip() r, raised = _pythonhook(ui, repo, name, hname, hookfn, args, throw) else: r = _exthook(ui, repo, hname, cmd, args, throw) raised = False res[hname] = r, raised # The stderr is fully buffered on Windows when connected to a pipe. # A forcible flush is required to make small stderr data in the # remote side available to the client immediately. sys.stderr.flush() finally: if _redirect and oldstdout >= 0: os.dup2(oldstdout, stdoutno) os.close(oldstdout) return res mercurial-3.7.3/mercurial/pathencode.c0000644000175000017500000004062612676531524017377 0ustar mpmmpm00000000000000/* pathencode.c - efficient path name encoding Copyright 2012 Facebook This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. */ /* * An implementation of the name encoding scheme used by the fncache * store. The common case is of a path < 120 bytes long, which is * handled either in a single pass with no allocations or two passes * with a single allocation. For longer paths, multiple passes are * required. */ #define PY_SSIZE_T_CLEAN #include #include #include #include #include #include "util.h" /* state machine for the fast path */ enum path_state { START, /* first byte of a path component */ A, /* "AUX" */ AU, THIRD, /* third of a 3-byte sequence, e.g. "AUX", "NUL" */ C, /* "CON" or "COMn" */ CO, COMLPT, /* "COM" or "LPT" */ COMLPTn, L, LP, N, NU, P, /* "PRN" */ PR, LDOT, /* leading '.' */ DOT, /* '.' in a non-leading position */ H, /* ".h" */ HGDI, /* ".hg", ".d", or ".i" */ SPACE, DEFAULT /* byte of a path component after the first */ }; /* state machine for dir-encoding */ enum dir_state { DDOT, DH, DHGDI, DDEFAULT }; static inline int inset(const uint32_t bitset[], char c) { return bitset[((uint8_t)c) >> 5] & (1 << (((uint8_t)c) & 31)); } static inline void charcopy(char *dest, Py_ssize_t *destlen, size_t destsize, char c) { if (dest) { assert(*destlen < destsize); dest[*destlen] = c; } (*destlen)++; } static inline void memcopy(char *dest, Py_ssize_t *destlen, size_t destsize, const void *src, Py_ssize_t len) { if (dest) { assert(*destlen + len < destsize); memcpy((void *)&dest[*destlen], src, len); } *destlen += len; } static inline void hexencode(char *dest, Py_ssize_t *destlen, size_t destsize, uint8_t c) { static const char hexdigit[] = "0123456789abcdef"; charcopy(dest, destlen, destsize, hexdigit[c >> 4]); charcopy(dest, destlen, destsize, hexdigit[c & 15]); } /* 3-byte escape: tilde followed by two hex digits */ static inline void escape3(char *dest, Py_ssize_t *destlen, size_t destsize, char c) { charcopy(dest, destlen, destsize, '~'); hexencode(dest, destlen, destsize, c); } static Py_ssize_t _encodedir(char *dest, size_t destsize, const char *src, Py_ssize_t len) { enum dir_state state = DDEFAULT; Py_ssize_t i = 0, destlen = 0; while (i < len) { switch (state) { case DDOT: switch (src[i]) { case 'd': case 'i': state = DHGDI; charcopy(dest, &destlen, destsize, src[i++]); break; case 'h': state = DH; charcopy(dest, &destlen, destsize, src[i++]); break; default: state = DDEFAULT; break; } break; case DH: if (src[i] == 'g') { state = DHGDI; charcopy(dest, &destlen, destsize, src[i++]); } else state = DDEFAULT; break; case DHGDI: if (src[i] == '/') { memcopy(dest, &destlen, destsize, ".hg", 3); charcopy(dest, &destlen, destsize, src[i++]); } state = DDEFAULT; break; case DDEFAULT: if (src[i] == '.') state = DDOT; charcopy(dest, &destlen, destsize, src[i++]); break; } } return destlen; } PyObject *encodedir(PyObject *self, PyObject *args) { Py_ssize_t len, newlen; PyObject *pathobj, *newobj; char *path; if (!PyArg_ParseTuple(args, "O:encodedir", &pathobj)) return NULL; if (PyString_AsStringAndSize(pathobj, &path, &len) == -1) { PyErr_SetString(PyExc_TypeError, "expected a string"); return NULL; } newlen = len ? _encodedir(NULL, 0, path, len + 1) : 1; if (newlen == len + 1) { Py_INCREF(pathobj); return pathobj; } newobj = PyString_FromStringAndSize(NULL, newlen); if (newobj) { PyString_GET_SIZE(newobj)--; _encodedir(PyString_AS_STRING(newobj), newlen, path, len + 1); } return newobj; } static Py_ssize_t _encode(const uint32_t twobytes[8], const uint32_t onebyte[8], char *dest, Py_ssize_t destlen, size_t destsize, const char *src, Py_ssize_t len, int encodedir) { enum path_state state = START; Py_ssize_t i = 0; /* * Python strings end with a zero byte, which we use as a * terminal token as they are not valid inside path names. */ while (i < len) { switch (state) { case START: switch (src[i]) { case '/': charcopy(dest, &destlen, destsize, src[i++]); break; case '.': state = LDOT; escape3(dest, &destlen, destsize, src[i++]); break; case ' ': state = DEFAULT; escape3(dest, &destlen, destsize, src[i++]); break; case 'a': state = A; charcopy(dest, &destlen, destsize, src[i++]); break; case 'c': state = C; charcopy(dest, &destlen, destsize, src[i++]); break; case 'l': state = L; charcopy(dest, &destlen, destsize, src[i++]); break; case 'n': state = N; charcopy(dest, &destlen, destsize, src[i++]); break; case 'p': state = P; charcopy(dest, &destlen, destsize, src[i++]); break; default: state = DEFAULT; break; } break; case A: if (src[i] == 'u') { state = AU; charcopy(dest, &destlen, destsize, src[i++]); } else state = DEFAULT; break; case AU: if (src[i] == 'x') { state = THIRD; i++; } else state = DEFAULT; break; case THIRD: state = DEFAULT; switch (src[i]) { case '.': case '/': case '\0': escape3(dest, &destlen, destsize, src[i - 1]); break; default: i--; break; } break; case C: if (src[i] == 'o') { state = CO; charcopy(dest, &destlen, destsize, src[i++]); } else state = DEFAULT; break; case CO: if (src[i] == 'm') { state = COMLPT; i++; } else if (src[i] == 'n') { state = THIRD; i++; } else state = DEFAULT; break; case COMLPT: switch (src[i]) { case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': state = COMLPTn; i++; break; default: state = DEFAULT; charcopy(dest, &destlen, destsize, src[i - 1]); break; } break; case COMLPTn: state = DEFAULT; switch (src[i]) { case '.': case '/': case '\0': escape3(dest, &destlen, destsize, src[i - 2]); charcopy(dest, &destlen, destsize, src[i - 1]); break; default: memcopy(dest, &destlen, destsize, &src[i - 2], 2); break; } break; case L: if (src[i] == 'p') { state = LP; charcopy(dest, &destlen, destsize, src[i++]); } else state = DEFAULT; break; case LP: if (src[i] == 't') { state = COMLPT; i++; } else state = DEFAULT; break; case N: if (src[i] == 'u') { state = NU; charcopy(dest, &destlen, destsize, src[i++]); } else state = DEFAULT; break; case NU: if (src[i] == 'l') { state = THIRD; i++; } else state = DEFAULT; break; case P: if (src[i] == 'r') { state = PR; charcopy(dest, &destlen, destsize, src[i++]); } else state = DEFAULT; break; case PR: if (src[i] == 'n') { state = THIRD; i++; } else state = DEFAULT; break; case LDOT: switch (src[i]) { case 'd': case 'i': state = HGDI; charcopy(dest, &destlen, destsize, src[i++]); break; case 'h': state = H; charcopy(dest, &destlen, destsize, src[i++]); break; default: state = DEFAULT; break; } break; case DOT: switch (src[i]) { case '/': case '\0': state = START; memcopy(dest, &destlen, destsize, "~2e", 3); charcopy(dest, &destlen, destsize, src[i++]); break; case 'd': case 'i': state = HGDI; charcopy(dest, &destlen, destsize, '.'); charcopy(dest, &destlen, destsize, src[i++]); break; case 'h': state = H; memcopy(dest, &destlen, destsize, ".h", 2); i++; break; default: state = DEFAULT; charcopy(dest, &destlen, destsize, '.'); break; } break; case H: if (src[i] == 'g') { state = HGDI; charcopy(dest, &destlen, destsize, src[i++]); } else state = DEFAULT; break; case HGDI: if (src[i] == '/') { state = START; if (encodedir) memcopy(dest, &destlen, destsize, ".hg", 3); charcopy(dest, &destlen, destsize, src[i++]); } else state = DEFAULT; break; case SPACE: switch (src[i]) { case '/': case '\0': state = START; memcopy(dest, &destlen, destsize, "~20", 3); charcopy(dest, &destlen, destsize, src[i++]); break; default: state = DEFAULT; charcopy(dest, &destlen, destsize, ' '); break; } break; case DEFAULT: while (inset(onebyte, src[i])) { charcopy(dest, &destlen, destsize, src[i++]); if (i == len) goto done; } switch (src[i]) { case '.': state = DOT; i++; break; case ' ': state = SPACE; i++; break; case '/': state = START; charcopy(dest, &destlen, destsize, '/'); i++; break; default: if (inset(onebyte, src[i])) { do { charcopy(dest, &destlen, destsize, src[i++]); } while (i < len && inset(onebyte, src[i])); } else if (inset(twobytes, src[i])) { char c = src[i++]; charcopy(dest, &destlen, destsize, '_'); charcopy(dest, &destlen, destsize, c == '_' ? '_' : c + 32); } else escape3(dest, &destlen, destsize, src[i++]); break; } break; } } done: return destlen; } static Py_ssize_t basicencode(char *dest, size_t destsize, const char *src, Py_ssize_t len) { static const uint32_t twobytes[8] = { 0, 0, 0x87fffffe }; static const uint32_t onebyte[8] = { 1, 0x2bff3bfa, 0x68000001, 0x2fffffff, }; Py_ssize_t destlen = 0; return _encode(twobytes, onebyte, dest, destlen, destsize, src, len, 1); } static const Py_ssize_t maxstorepathlen = 120; static Py_ssize_t _lowerencode(char *dest, size_t destsize, const char *src, Py_ssize_t len) { static const uint32_t onebyte[8] = { 1, 0x2bfffbfb, 0xe8000001, 0x2fffffff }; static const uint32_t lower[8] = { 0, 0, 0x7fffffe }; Py_ssize_t i, destlen = 0; for (i = 0; i < len; i++) { if (inset(onebyte, src[i])) charcopy(dest, &destlen, destsize, src[i]); else if (inset(lower, src[i])) charcopy(dest, &destlen, destsize, src[i] + 32); else escape3(dest, &destlen, destsize, src[i]); } return destlen; } PyObject *lowerencode(PyObject *self, PyObject *args) { char *path; Py_ssize_t len, newlen; PyObject *ret; if (!PyArg_ParseTuple(args, "s#:lowerencode", &path, &len)) return NULL; newlen = _lowerencode(NULL, 0, path, len); ret = PyString_FromStringAndSize(NULL, newlen); if (ret) _lowerencode(PyString_AS_STRING(ret), newlen, path, len); return ret; } /* See store.py:_auxencode for a description. */ static Py_ssize_t auxencode(char *dest, size_t destsize, const char *src, Py_ssize_t len) { static const uint32_t twobytes[8]; static const uint32_t onebyte[8] = { ~0U, 0xffff3ffe, ~0U, ~0U, ~0U, ~0U, ~0U, ~0U, }; return _encode(twobytes, onebyte, dest, 0, destsize, src, len, 0); } static PyObject *hashmangle(const char *src, Py_ssize_t len, const char sha[20]) { static const Py_ssize_t dirprefixlen = 8; static const Py_ssize_t maxshortdirslen = 68; char *dest; PyObject *ret; Py_ssize_t i, d, p, lastslash = len - 1, lastdot = -1; Py_ssize_t destsize, destlen = 0, slop, used; while (lastslash >= 0 && src[lastslash] != '/') { if (src[lastslash] == '.' && lastdot == -1) lastdot = lastslash; lastslash--; } #if 0 /* All paths should end in a suffix of ".i" or ".d". Unfortunately, the file names in test-hybridencode.py violate this rule. */ if (lastdot != len - 3) { PyErr_SetString(PyExc_ValueError, "suffix missing or wrong length"); return NULL; } #endif /* If src contains a suffix, we will append it to the end of the new string, so make room. */ destsize = 120; if (lastdot >= 0) destsize += len - lastdot - 1; ret = PyString_FromStringAndSize(NULL, destsize); if (ret == NULL) return NULL; dest = PyString_AS_STRING(ret); memcopy(dest, &destlen, destsize, "dh/", 3); /* Copy up to dirprefixlen bytes of each path component, up to a limit of maxshortdirslen bytes. */ for (i = d = p = 0; i < lastslash; i++, p++) { if (src[i] == '/') { char d = dest[destlen - 1]; /* After truncation, a directory name may end in a space or dot, which are unportable. */ if (d == '.' || d == ' ') dest[destlen - 1] = '_'; /* The + 3 is to account for "dh/" in the beginning */ if (destlen > maxshortdirslen + 3) break; charcopy(dest, &destlen, destsize, src[i]); p = -1; } else if (p < dirprefixlen) charcopy(dest, &destlen, destsize, src[i]); } /* Rewind to just before the last slash copied. */ if (destlen > maxshortdirslen + 3) do { destlen--; } while (destlen > 0 && dest[destlen] != '/'); if (destlen > 3) { if (lastslash > 0) { char d = dest[destlen - 1]; /* The last directory component may be truncated, so make it safe. */ if (d == '.' || d == ' ') dest[destlen - 1] = '_'; } charcopy(dest, &destlen, destsize, '/'); } /* Add a prefix of the original file's name. Its length depends on the number of bytes left after accounting for hash and suffix. */ used = destlen + 40; if (lastdot >= 0) used += len - lastdot - 1; slop = maxstorepathlen - used; if (slop > 0) { Py_ssize_t basenamelen = lastslash >= 0 ? len - lastslash - 2 : len - 1; if (basenamelen > slop) basenamelen = slop; if (basenamelen > 0) memcopy(dest, &destlen, destsize, &src[lastslash + 1], basenamelen); } /* Add hash and suffix. */ for (i = 0; i < 20; i++) hexencode(dest, &destlen, destsize, sha[i]); if (lastdot >= 0) memcopy(dest, &destlen, destsize, &src[lastdot], len - lastdot - 1); PyString_GET_SIZE(ret) = destlen; return ret; } /* * Avoiding a trip through Python would improve performance by 50%, * but we don't encounter enough long names to be worth the code. */ static int sha1hash(char hash[20], const char *str, Py_ssize_t len) { static PyObject *shafunc; PyObject *shaobj, *hashobj; if (shafunc == NULL) { PyObject *util, *name = PyString_FromString("mercurial.util"); if (name == NULL) return -1; util = PyImport_Import(name); Py_DECREF(name); if (util == NULL) { PyErr_SetString(PyExc_ImportError, "mercurial.util"); return -1; } shafunc = PyObject_GetAttrString(util, "sha1"); Py_DECREF(util); if (shafunc == NULL) { PyErr_SetString(PyExc_AttributeError, "module 'mercurial.util' has no " "attribute 'sha1'"); return -1; } } shaobj = PyObject_CallFunction(shafunc, "s#", str, len); if (shaobj == NULL) return -1; hashobj = PyObject_CallMethod(shaobj, "digest", ""); Py_DECREF(shaobj); if (hashobj == NULL) return -1; if (!PyString_Check(hashobj) || PyString_GET_SIZE(hashobj) != 20) { PyErr_SetString(PyExc_TypeError, "result of digest is not a 20-byte hash"); Py_DECREF(hashobj); return -1; } memcpy(hash, PyString_AS_STRING(hashobj), 20); Py_DECREF(hashobj); return 0; } #define MAXENCODE 4096 * 4 static PyObject *hashencode(const char *src, Py_ssize_t len) { char dired[MAXENCODE]; char lowered[MAXENCODE]; char auxed[MAXENCODE]; Py_ssize_t dirlen, lowerlen, auxlen, baselen; char sha[20]; baselen = (len - 5) * 3; if (baselen >= MAXENCODE) { PyErr_SetString(PyExc_ValueError, "string too long"); return NULL; } dirlen = _encodedir(dired, baselen, src, len); if (sha1hash(sha, dired, dirlen - 1) == -1) return NULL; lowerlen = _lowerencode(lowered, baselen, dired + 5, dirlen - 5); auxlen = auxencode(auxed, baselen, lowered, lowerlen); return hashmangle(auxed, auxlen, sha); } PyObject *pathencode(PyObject *self, PyObject *args) { Py_ssize_t len, newlen; PyObject *pathobj, *newobj; char *path; if (!PyArg_ParseTuple(args, "O:pathencode", &pathobj)) return NULL; if (PyString_AsStringAndSize(pathobj, &path, &len) == -1) { PyErr_SetString(PyExc_TypeError, "expected a string"); return NULL; } if (len > maxstorepathlen) newlen = maxstorepathlen + 2; else newlen = len ? basicencode(NULL, 0, path, len + 1) : 1; if (newlen <= maxstorepathlen + 1) { if (newlen == len + 1) { Py_INCREF(pathobj); return pathobj; } newobj = PyString_FromStringAndSize(NULL, newlen); if (newobj) { PyString_GET_SIZE(newobj)--; basicencode(PyString_AS_STRING(newobj), newlen, path, len + 1); } } else newobj = hashencode(path, len + 1); return newobj; } mercurial-3.7.3/mercurial/transaction.py0000644000175000017500000005022412676531525020014 0ustar mpmmpm00000000000000# transaction.py - simple journaling scheme for mercurial # # This transaction scheme is intended to gracefully handle program # errors and interruptions. More serious failures like system crashes # can be recovered with an fsck-like tool. As the whole repository is # effectively log-structured, this should amount to simply truncating # anything that isn't referenced in the changelog. # # Copyright 2005, 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno from .i18n import _ from . import ( error, util, ) version = 2 def active(func): def _active(self, *args, **kwds): if self.count == 0: raise error.Abort(_( 'cannot use transaction when it is already committed/aborted')) return func(self, *args, **kwds) return _active def _playback(journal, report, opener, vfsmap, entries, backupentries, unlink=True): for f, o, _ignore in entries: if o or not unlink: try: fp = opener(f, 'a') fp.truncate(o) fp.close() except IOError: report(_("failed to truncate %s\n") % f) raise else: try: opener.unlink(f) except (IOError, OSError) as inst: if inst.errno != errno.ENOENT: raise backupfiles = [] for l, f, b, c in backupentries: if l not in vfsmap and c: report("couldn't handle %s: unknown cache location %s\n" % (b, l)) vfs = vfsmap[l] try: if f and b: filepath = vfs.join(f) backuppath = vfs.join(b) try: util.copyfile(backuppath, filepath) backupfiles.append(b) except IOError: report(_("failed to recover %s\n") % f) else: target = f or b try: vfs.unlink(target) except (IOError, OSError) as inst: if inst.errno != errno.ENOENT: raise except (IOError, OSError, error.Abort) as inst: if not c: raise backuppath = "%s.backupfiles" % journal if opener.exists(backuppath): opener.unlink(backuppath) opener.unlink(journal) try: for f in backupfiles: if opener.exists(f): opener.unlink(f) except (IOError, OSError, error.Abort) as inst: # only pure backup file remains, it is sage to ignore any error pass class transaction(object): def __init__(self, report, opener, vfsmap, journalname, undoname=None, after=None, createmode=None, validator=None, releasefn=None): """Begin a new transaction Begins a new transaction that allows rolling back writes in the event of an exception. * `after`: called after the transaction has been committed * `createmode`: the mode of the journal file that will be created * `releasefn`: called after releasing (with transaction and result) """ self.count = 1 self.usages = 1 self.report = report # a vfs to the store content self.opener = opener # a map to access file in various {location -> vfs} vfsmap = vfsmap.copy() vfsmap[''] = opener # set default value self._vfsmap = vfsmap self.after = after self.entries = [] self.map = {} self.journal = journalname self.undoname = undoname self._queue = [] # A callback to validate transaction content before closing it. # should raise exception is anything is wrong. # target user is repository hooks. if validator is None: validator = lambda tr: None self.validator = validator # A callback to do something just after releasing transaction. if releasefn is None: releasefn = lambda tr, success: None self.releasefn = releasefn # a dict of arguments to be passed to hooks self.hookargs = {} self.file = opener.open(self.journal, "w") # a list of ('location', 'path', 'backuppath', cache) entries. # - if 'backuppath' is empty, no file existed at backup time # - if 'path' is empty, this is a temporary transaction file # - if 'location' is not empty, the path is outside main opener reach. # use 'location' value as a key in a vfsmap to find the right 'vfs' # (cache is currently unused) self._backupentries = [] self._backupmap = {} self._backupjournal = "%s.backupfiles" % self.journal self._backupsfile = opener.open(self._backupjournal, 'w') self._backupsfile.write('%d\n' % version) if createmode is not None: opener.chmod(self.journal, createmode & 0o666) opener.chmod(self._backupjournal, createmode & 0o666) # hold file generations to be performed on commit self._filegenerators = {} # hold callback to write pending data for hooks self._pendingcallback = {} # True is any pending data have been written ever self._anypending = False # holds callback to call when writing the transaction self._finalizecallback = {} # hold callback for post transaction close self._postclosecallback = {} # holds callbacks to call during abort self._abortcallback = {} def __del__(self): if self.journal: self._abort() @active def startgroup(self): """delay registration of file entry This is used by strip to delay vision of strip offset. The transaction sees either none or all of the strip actions to be done.""" self._queue.append([]) @active def endgroup(self): """apply delayed registration of file entry. This is used by strip to delay vision of strip offset. The transaction sees either none or all of the strip actions to be done.""" q = self._queue.pop() for f, o, data in q: self._addentry(f, o, data) @active def add(self, file, offset, data=None): """record the state of an append-only file before update""" if file in self.map or file in self._backupmap: return if self._queue: self._queue[-1].append((file, offset, data)) return self._addentry(file, offset, data) def _addentry(self, file, offset, data): """add a append-only entry to memory and on-disk state""" if file in self.map or file in self._backupmap: return self.entries.append((file, offset, data)) self.map[file] = len(self.entries) - 1 # add enough data to the journal to do the truncate self.file.write("%s\0%d\n" % (file, offset)) self.file.flush() @active def addbackup(self, file, hardlink=True, location=''): """Adds a backup of the file to the transaction Calling addbackup() creates a hardlink backup of the specified file that is used to recover the file in the event of the transaction aborting. * `file`: the file path, relative to .hg/store * `hardlink`: use a hardlink to quickly create the backup """ if self._queue: msg = 'cannot use transaction.addbackup inside "group"' raise RuntimeError(msg) if file in self.map or file in self._backupmap: return vfs = self._vfsmap[location] dirname, filename = vfs.split(file) backupfilename = "%s.backup.%s" % (self.journal, filename) backupfile = vfs.reljoin(dirname, backupfilename) if vfs.exists(file): filepath = vfs.join(file) backuppath = vfs.join(backupfile) util.copyfile(filepath, backuppath, hardlink=hardlink) else: backupfile = '' self._addbackupentry((location, file, backupfile, False)) def _addbackupentry(self, entry): """register a new backup entry and write it to disk""" self._backupentries.append(entry) self._backupmap[entry[1]] = len(self._backupentries) - 1 self._backupsfile.write("%s\0%s\0%s\0%d\n" % entry) self._backupsfile.flush() @active def registertmp(self, tmpfile, location=''): """register a temporary transaction file Such files will be deleted when the transaction exits (on both failure and success). """ self._addbackupentry((location, '', tmpfile, False)) @active def addfilegenerator(self, genid, filenames, genfunc, order=0, location=''): """add a function to generates some files at transaction commit The `genfunc` argument is a function capable of generating proper content of each entry in the `filename` tuple. At transaction close time, `genfunc` will be called with one file object argument per entries in `filenames`. The transaction itself is responsible for the backup, creation and final write of such file. The `genid` argument is used to ensure the same set of file is only generated once. Call to `addfilegenerator` for a `genid` already present will overwrite the old entry. The `order` argument may be used to control the order in which multiple generator will be executed. The `location` arguments may be used to indicate the files are located outside of the the standard directory for transaction. It should match one of the key of the `transaction.vfsmap` dictionary. """ # For now, we are unable to do proper backup and restore of custom vfs # but for bookmarks that are handled outside this mechanism. self._filegenerators[genid] = (order, filenames, genfunc, location) def _generatefiles(self, suffix=''): # write files registered for generation any = False for entry in sorted(self._filegenerators.values()): any = True order, filenames, genfunc, location = entry vfs = self._vfsmap[location] files = [] try: for name in filenames: name += suffix if suffix: self.registertmp(name, location=location) else: self.addbackup(name, location=location) files.append(vfs(name, 'w', atomictemp=True)) genfunc(*files) finally: for f in files: f.close() return any @active def find(self, file): if file in self.map: return self.entries[self.map[file]] if file in self._backupmap: return self._backupentries[self._backupmap[file]] return None @active def replace(self, file, offset, data=None): ''' replace can only replace already committed entries that are not pending in the queue ''' if file not in self.map: raise KeyError(file) index = self.map[file] self.entries[index] = (file, offset, data) self.file.write("%s\0%d\n" % (file, offset)) self.file.flush() @active def nest(self): self.count += 1 self.usages += 1 return self def release(self): if self.count > 0: self.usages -= 1 # if the transaction scopes are left without being closed, fail if self.count > 0 and self.usages == 0: self._abort() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): try: if exc_type is None: self.close() finally: self.release() def running(self): return self.count > 0 def addpending(self, category, callback): """add a callback to be called when the transaction is pending The transaction will be given as callback's first argument. Category is a unique identifier to allow overwriting an old callback with a newer callback. """ self._pendingcallback[category] = callback @active def writepending(self): '''write pending file to temporary version This is used to allow hooks to view a transaction before commit''' categories = sorted(self._pendingcallback) for cat in categories: # remove callback since the data will have been flushed any = self._pendingcallback.pop(cat)(self) self._anypending = self._anypending or any self._anypending |= self._generatefiles(suffix='.pending') return self._anypending @active def addfinalize(self, category, callback): """add a callback to be called when the transaction is closed The transaction will be given as callback's first argument. Category is a unique identifier to allow overwriting old callbacks with newer callbacks. """ self._finalizecallback[category] = callback @active def addpostclose(self, category, callback): """add a callback to be called after the transaction is closed The transaction will be given as callback's first argument. Category is a unique identifier to allow overwriting an old callback with a newer callback. """ self._postclosecallback[category] = callback @active def addabort(self, category, callback): """add a callback to be called when the transaction is aborted. The transaction will be given as the first argument to the callback. Category is a unique identifier to allow overwriting an old callback with a newer callback. """ self._abortcallback[category] = callback @active def close(self): '''commit the transaction''' if self.count == 1: self.validator(self) # will raise exception if needed self._generatefiles() categories = sorted(self._finalizecallback) for cat in categories: self._finalizecallback[cat](self) self.count -= 1 if self.count != 0: return self.file.close() self._backupsfile.close() # cleanup temporary files for l, f, b, c in self._backupentries: if l not in self._vfsmap and c: self.report("couldn't remove %s: unknown cache location %s\n" % (b, l)) continue vfs = self._vfsmap[l] if not f and b and vfs.exists(b): try: vfs.unlink(b) except (IOError, OSError, error.Abort) as inst: if not c: raise # Abort may be raise by read only opener self.report("couldn't remove %s: %s\n" % (vfs.join(b), inst)) self.entries = [] self._writeundo() if self.after: self.after() if self.opener.isfile(self._backupjournal): self.opener.unlink(self._backupjournal) if self.opener.isfile(self.journal): self.opener.unlink(self.journal) for l, _f, b, c in self._backupentries: if l not in self._vfsmap and c: self.report("couldn't remove %s: unknown cache location" "%s\n" % (b, l)) continue vfs = self._vfsmap[l] if b and vfs.exists(b): try: vfs.unlink(b) except (IOError, OSError, error.Abort) as inst: if not c: raise # Abort may be raise by read only opener self.report("couldn't remove %s: %s\n" % (vfs.join(b), inst)) self._backupentries = [] self.journal = None self.releasefn(self, True) # notify success of closing transaction # run post close action categories = sorted(self._postclosecallback) for cat in categories: self._postclosecallback[cat](self) @active def abort(self): '''abort the transaction (generally called on error, or when the transaction is not explicitly committed before going out of scope)''' self._abort() def _writeundo(self): """write transaction data for possible future undo call""" if self.undoname is None: return undobackupfile = self.opener.open("%s.backupfiles" % self.undoname, 'w') undobackupfile.write('%d\n' % version) for l, f, b, c in self._backupentries: if not f: # temporary file continue if not b: u = '' else: if l not in self._vfsmap and c: self.report("couldn't remove %s: unknown cache location" "%s\n" % (b, l)) continue vfs = self._vfsmap[l] base, name = vfs.split(b) assert name.startswith(self.journal), name uname = name.replace(self.journal, self.undoname, 1) u = vfs.reljoin(base, uname) util.copyfile(vfs.join(b), vfs.join(u), hardlink=True) undobackupfile.write("%s\0%s\0%s\0%d\n" % (l, f, u, c)) undobackupfile.close() def _abort(self): self.count = 0 self.usages = 0 self.file.close() self._backupsfile.close() try: if not self.entries and not self._backupentries: if self._backupjournal: self.opener.unlink(self._backupjournal) if self.journal: self.opener.unlink(self.journal) return self.report(_("transaction abort!\n")) try: for cat in sorted(self._abortcallback): self._abortcallback[cat](self) _playback(self.journal, self.report, self.opener, self._vfsmap, self.entries, self._backupentries, False) self.report(_("rollback completed\n")) except BaseException: self.report(_("rollback failed - please run hg recover\n")) finally: self.journal = None self.releasefn(self, False) # notify failure of transaction def rollback(opener, vfsmap, file, report): """Rolls back the transaction contained in the given file Reads the entries in the specified file, and the corresponding '*.backupfiles' file, to recover from an incomplete transaction. * `file`: a file containing a list of entries, specifying where to truncate each file. The file should contain a list of file\0offset pairs, delimited by newlines. The corresponding '*.backupfiles' file should contain a list of file\0backupfile pairs, delimited by \0. """ entries = [] backupentries = [] fp = opener.open(file) lines = fp.readlines() fp.close() for l in lines: try: f, o = l.split('\0') entries.append((f, int(o), None)) except ValueError: report(_("couldn't read journal entry %r!\n") % l) backupjournal = "%s.backupfiles" % file if opener.exists(backupjournal): fp = opener.open(backupjournal) lines = fp.readlines() if lines: ver = lines[0][:-1] if ver == str(version): for line in lines[1:]: if line: # Shave off the trailing newline line = line[:-1] l, f, b, c = line.split('\0') backupentries.append((l, f, b, bool(c))) else: report(_("journal was created by a different version of " "Mercurial\n")) _playback(file, report, opener, vfsmap, entries, backupentries) mercurial-3.7.3/mercurial/ancestor.py0000644000175000017500000003017312676531525017306 0ustar mpmmpm00000000000000# ancestor.py - generic DAG ancestor algorithm for mercurial # # Copyright 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import collections import heapq from .node import nullrev def commonancestorsheads(pfunc, *nodes): """Returns a set with the heads of all common ancestors of all nodes, heads(::nodes[0] and ::nodes[1] and ...) . pfunc must return a list of parent vertices for a given vertex. """ if not isinstance(nodes, set): nodes = set(nodes) if nullrev in nodes: return set() if len(nodes) <= 1: return nodes allseen = (1 << len(nodes)) - 1 seen = [0] * (max(nodes) + 1) for i, n in enumerate(nodes): seen[n] = 1 << i poison = 1 << (i + 1) gca = set() interesting = len(nodes) nv = len(seen) - 1 while nv >= 0 and interesting: v = nv nv -= 1 if not seen[v]: continue sv = seen[v] if sv < poison: interesting -= 1 if sv == allseen: gca.add(v) sv |= poison if v in nodes: # history is linear return set([v]) if sv < poison: for p in pfunc(v): sp = seen[p] if p == nullrev: continue if sp == 0: seen[p] = sv interesting += 1 elif sp != sv: seen[p] |= sv else: for p in pfunc(v): if p == nullrev: continue sp = seen[p] if sp and sp < poison: interesting -= 1 seen[p] = sv return gca def ancestors(pfunc, *orignodes): """ Returns the common ancestors of a and b that are furthest from a root (as measured by longest path). pfunc must return a list of parent vertices for a given vertex. """ def deepest(nodes): interesting = {} count = max(nodes) + 1 depth = [0] * count seen = [0] * count mapping = [] for (i, n) in enumerate(sorted(nodes)): depth[n] = 1 b = 1 << i seen[n] = b interesting[b] = 1 mapping.append((b, n)) nv = count - 1 while nv >= 0 and len(interesting) > 1: v = nv nv -= 1 dv = depth[v] if dv == 0: continue sv = seen[v] for p in pfunc(v): if p == nullrev: continue dp = depth[p] nsp = sp = seen[p] if dp <= dv: depth[p] = dv + 1 if sp != sv: interesting[sv] += 1 nsp = seen[p] = sv if sp: interesting[sp] -= 1 if interesting[sp] == 0: del interesting[sp] elif dv == dp - 1: nsp = sp | sv if nsp == sp: continue seen[p] = nsp interesting.setdefault(nsp, 0) interesting[nsp] += 1 interesting[sp] -= 1 if interesting[sp] == 0: del interesting[sp] interesting[sv] -= 1 if interesting[sv] == 0: del interesting[sv] if len(interesting) != 1: return [] k = 0 for i in interesting: k |= i return set(n for (i, n) in mapping if k & i) gca = commonancestorsheads(pfunc, *orignodes) if len(gca) <= 1: return gca return deepest(gca) class incrementalmissingancestors(object): '''persistent state used to calculate missing ancestors incrementally Although similar in spirit to lazyancestors below, this is a separate class because trying to support contains and missingancestors operations with the same internal data structures adds needless complexity.''' def __init__(self, pfunc, bases): self.bases = set(bases) if not self.bases: self.bases.add(nullrev) self.pfunc = pfunc def hasbases(self): '''whether the common set has any non-trivial bases''' return self.bases and self.bases != set([nullrev]) def addbases(self, newbases): '''grow the ancestor set by adding new bases''' self.bases.update(newbases) def removeancestorsfrom(self, revs): '''remove all ancestors of bases from the set revs (in place)''' bases = self.bases pfunc = self.pfunc revs.difference_update(bases) # nullrev is always an ancestor revs.discard(nullrev) if not revs: return # anything in revs > start is definitely not an ancestor of bases # revs <= start needs to be investigated start = max(bases) keepcount = sum(1 for r in revs if r > start) if len(revs) == keepcount: # no revs to consider return for curr in xrange(start, min(revs) - 1, -1): if curr not in bases: continue revs.discard(curr) bases.update(pfunc(curr)) if len(revs) == keepcount: # no more potential revs to discard break def missingancestors(self, revs): '''return all the ancestors of revs that are not ancestors of self.bases This may include elements from revs. Equivalent to the revset (::revs - ::self.bases). Revs are returned in revision number order, which is a topological order.''' revsvisit = set(revs) basesvisit = self.bases pfunc = self.pfunc bothvisit = revsvisit.intersection(basesvisit) revsvisit.difference_update(bothvisit) if not revsvisit: return [] start = max(max(revsvisit), max(basesvisit)) # At this point, we hold the invariants that: # - revsvisit is the set of nodes we know are an ancestor of at least # one of the nodes in revs # - basesvisit is the same for bases # - bothvisit is the set of nodes we know are ancestors of at least one # of the nodes in revs and one of the nodes in bases. bothvisit and # revsvisit are mutually exclusive, but bothvisit is a subset of # basesvisit. # Now we walk down in reverse topo order, adding parents of nodes # already visited to the sets while maintaining the invariants. When a # node is found in both revsvisit and basesvisit, it is removed from # revsvisit and added to bothvisit. When revsvisit becomes empty, there # are no more ancestors of revs that aren't also ancestors of bases, so # exit. missing = [] for curr in xrange(start, nullrev, -1): if not revsvisit: break if curr in bothvisit: bothvisit.remove(curr) # curr's parents might have made it into revsvisit through # another path for p in pfunc(curr): revsvisit.discard(p) basesvisit.add(p) bothvisit.add(p) continue if curr in revsvisit: missing.append(curr) revsvisit.remove(curr) thisvisit = revsvisit othervisit = basesvisit elif curr in basesvisit: thisvisit = basesvisit othervisit = revsvisit else: # not an ancestor of revs or bases: ignore continue for p in pfunc(curr): if p == nullrev: pass elif p in othervisit or p in bothvisit: # p is implicitly in thisvisit. This means p is or should be # in bothvisit revsvisit.discard(p) basesvisit.add(p) bothvisit.add(p) else: # visit later thisvisit.add(p) missing.reverse() return missing class lazyancestors(object): def __init__(self, pfunc, revs, stoprev=0, inclusive=False): """Create a new object generating ancestors for the given revs. Does not generate revs lower than stoprev. This is computed lazily starting from revs. The object supports iteration and membership. cl should be a changelog and revs should be an iterable. inclusive is a boolean that indicates whether revs should be included. Revs lower than stoprev will not be generated. Result does not include the null revision.""" self._parentrevs = pfunc self._initrevs = revs self._stoprev = stoprev self._inclusive = inclusive # Initialize data structures for __contains__. # For __contains__, we use a heap rather than a deque because # (a) it minimizes the number of parentrevs calls made # (b) it makes the loop termination condition obvious # Python's heap is a min-heap. Multiply all values by -1 to convert it # into a max-heap. self._containsvisit = [-rev for rev in revs] heapq.heapify(self._containsvisit) if inclusive: self._containsseen = set(revs) else: self._containsseen = set() def __nonzero__(self): """False if the set is empty, True otherwise.""" try: iter(self).next() return True except StopIteration: return False def __iter__(self): """Generate the ancestors of _initrevs in reverse topological order. If inclusive is False, yield a sequence of revision numbers starting with the parents of each revision in revs, i.e., each revision is *not* considered an ancestor of itself. Results are in breadth-first order: parents of each rev in revs, then parents of those, etc. If inclusive is True, yield all the revs first (ignoring stoprev), then yield all the ancestors of revs as when inclusive is False. If an element in revs is an ancestor of a different rev it is not yielded again.""" seen = set() revs = self._initrevs if self._inclusive: for rev in revs: yield rev seen.update(revs) parentrevs = self._parentrevs stoprev = self._stoprev visit = collections.deque(revs) see = seen.add schedule = visit.append while visit: for parent in parentrevs(visit.popleft()): if parent >= stoprev and parent not in seen: schedule(parent) see(parent) yield parent def __contains__(self, target): """Test whether target is an ancestor of self._initrevs.""" # Trying to do both __iter__ and __contains__ using the same visit # heap and seen set is complex enough that it slows down both. Keep # them separate. seen = self._containsseen if target in seen: return True parentrevs = self._parentrevs visit = self._containsvisit stoprev = self._stoprev heappop = heapq.heappop heappush = heapq.heappush see = seen.add targetseen = False while visit and -visit[0] > target and not targetseen: for parent in parentrevs(-heappop(visit)): if parent < stoprev or parent in seen: continue # We need to make sure we push all parents into the heap so # that we leave it in a consistent state for future calls. heappush(visit, -parent) see(parent) if parent == target: targetseen = True return targetseen mercurial-3.7.3/mercurial/sshpeer.py0000644000175000017500000002522112676531525017137 0ustar mpmmpm00000000000000# sshpeer.py - ssh repository proxy class for mercurial # # Copyright 2005, 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import re from .i18n import _ from . import ( error, util, wireproto, ) class remotelock(object): def __init__(self, repo): self.repo = repo def release(self): self.repo.unlock() self.repo = None def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): if self.repo: self.release() def __del__(self): if self.repo: self.release() def _serverquote(s): if not s: return s '''quote a string for the remote shell ... which we assume is sh''' if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s): return s return "'%s'" % s.replace("'", "'\\''") def _forwardoutput(ui, pipe): """display all data currently available on pipe as remote output. This is non blocking.""" s = util.readpipe(pipe) if s: for l in s.splitlines(): ui.status(_("remote: "), l, '\n') class doublepipe(object): """Operate a side-channel pipe in addition of a main one The side-channel pipe contains server output to be forwarded to the user input. The double pipe will behave as the "main" pipe, but will ensure the content of the "side" pipe is properly processed while we wait for blocking call on the "main" pipe. If large amounts of data are read from "main", the forward will cease after the first bytes start to appear. This simplifies the implementation without affecting actual output of sshpeer too much as we rarely issue large read for data not yet emitted by the server. The main pipe is expected to be a 'bufferedinputpipe' from the util module that handle all the os specific bites. This class lives in this module because it focus on behavior specific to the ssh protocol.""" def __init__(self, ui, main, side): self._ui = ui self._main = main self._side = side def _wait(self): """wait until some data are available on main or side return a pair of boolean (ismainready, issideready) (This will only wait for data if the setup is supported by `util.poll`) """ if getattr(self._main, 'hasbuffer', False): # getattr for classic pipe return (True, True) # main has data, assume side is worth poking at. fds = [self._main.fileno(), self._side.fileno()] try: act = util.poll(fds) except NotImplementedError: # non supported yet case, assume all have data. act = fds return (self._main.fileno() in act, self._side.fileno() in act) def write(self, data): return self._call('write', data) def read(self, size): return self._call('read', size) def readline(self): return self._call('readline') def _call(self, methname, data=None): """call on "main", forward output of "side" while blocking """ # data can be '' or 0 if (data is not None and not data) or self._main.closed: _forwardoutput(self._ui, self._side) return '' while True: mainready, sideready = self._wait() if sideready: _forwardoutput(self._ui, self._side) if mainready: meth = getattr(self._main, methname) if data is None: return meth() else: return meth(data) def close(self): return self._main.close() def flush(self): return self._main.flush() class sshpeer(wireproto.wirepeer): def __init__(self, ui, path, create=False): self._url = path self.ui = ui self.pipeo = self.pipei = self.pipee = None u = util.url(path, parsequery=False, parsefragment=False) if u.scheme != 'ssh' or not u.host or u.path is None: self._abort(error.RepoError(_("couldn't parse location %s") % path)) self.user = u.user if u.passwd is not None: self._abort(error.RepoError(_("password in URL not supported"))) self.host = u.host self.port = u.port self.path = u.path or "." sshcmd = self.ui.config("ui", "ssh", "ssh") remotecmd = self.ui.config("ui", "remotecmd", "hg") args = util.sshargs(sshcmd, _serverquote(self.host), _serverquote(self.user), _serverquote(self.port)) if create: cmd = '%s %s %s' % (sshcmd, args, util.shellquote("%s init %s" % (_serverquote(remotecmd), _serverquote(self.path)))) ui.debug('running %s\n' % cmd) res = ui.system(cmd) if res != 0: self._abort(error.RepoError(_("could not create remote repo"))) self._validaterepo(sshcmd, args, remotecmd) def url(self): return self._url def _validaterepo(self, sshcmd, args, remotecmd): # cleanup up previous run self.cleanup() cmd = '%s %s %s' % (sshcmd, args, util.shellquote("%s -R %s serve --stdio" % (_serverquote(remotecmd), _serverquote(self.path)))) self.ui.debug('running %s\n' % cmd) cmd = util.quotecommand(cmd) # while self.subprocess isn't used, having it allows the subprocess to # to clean up correctly later # # no buffer allow the use of 'select' # feel free to remove buffering and select usage when we ultimately # move to threading. sub = util.popen4(cmd, bufsize=0) self.pipeo, self.pipei, self.pipee, self.subprocess = sub self.pipei = util.bufferedinputpipe(self.pipei) self.pipei = doublepipe(self.ui, self.pipei, self.pipee) self.pipeo = doublepipe(self.ui, self.pipeo, self.pipee) # skip any noise generated by remote shell self._callstream("hello") r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40))) lines = ["", "dummy"] max_noise = 500 while lines[-1] and max_noise: l = r.readline() self.readerr() if lines[-1] == "1\n" and l == "\n": break if l: self.ui.debug("remote: ", l) lines.append(l) max_noise -= 1 else: self._abort(error.RepoError(_('no suitable response from ' 'remote hg'))) self._caps = set() for l in reversed(lines): if l.startswith("capabilities:"): self._caps.update(l[:-1].split(":")[1].split()) break def _capabilities(self): return self._caps def readerr(self): _forwardoutput(self.ui, self.pipee) def _abort(self, exception): self.cleanup() raise exception def cleanup(self): if self.pipeo is None: return self.pipeo.close() self.pipei.close() try: # read the error descriptor until EOF for l in self.pipee: self.ui.status(_("remote: "), l) except (IOError, ValueError): pass self.pipee.close() __del__ = cleanup def _callstream(self, cmd, **args): self.ui.debug("sending %s command\n" % cmd) self.pipeo.write("%s\n" % cmd) _func, names = wireproto.commands[cmd] keys = names.split() wireargs = {} for k in keys: if k == '*': wireargs['*'] = args break else: wireargs[k] = args[k] del args[k] for k, v in sorted(wireargs.iteritems()): self.pipeo.write("%s %d\n" % (k, len(v))) if isinstance(v, dict): for dk, dv in v.iteritems(): self.pipeo.write("%s %d\n" % (dk, len(dv))) self.pipeo.write(dv) else: self.pipeo.write(v) self.pipeo.flush() return self.pipei def _callcompressable(self, cmd, **args): return self._callstream(cmd, **args) def _call(self, cmd, **args): self._callstream(cmd, **args) return self._recv() def _callpush(self, cmd, fp, **args): r = self._call(cmd, **args) if r: return '', r while True: d = fp.read(4096) if not d: break self._send(d) self._send("", flush=True) r = self._recv() if r: return '', r return self._recv(), '' def _calltwowaystream(self, cmd, fp, **args): r = self._call(cmd, **args) if r: # XXX needs to be made better raise error.Abort('unexpected remote reply: %s' % r) while True: d = fp.read(4096) if not d: break self._send(d) self._send("", flush=True) return self.pipei def _recv(self): l = self.pipei.readline() if l == '\n': self.readerr() msg = _('check previous remote output') self._abort(error.OutOfBandError(hint=msg)) self.readerr() try: l = int(l) except ValueError: self._abort(error.ResponseError(_("unexpected response:"), l)) return self.pipei.read(l) def _send(self, data, flush=False): self.pipeo.write("%d\n" % len(data)) if data: self.pipeo.write(data) if flush: self.pipeo.flush() self.readerr() def lock(self): self._call("lock") return remotelock(self) def unlock(self): self._call("unlock") def addchangegroup(self, cg, source, url, lock=None): '''Send a changegroup to the remote server. Return an integer similar to unbundle(). DEPRECATED, since it requires locking the remote.''' d = self._call("addchangegroup") if d: self._abort(error.RepoError(_("push refused: %s") % d)) while True: d = cg.read(4096) if not d: break self.pipeo.write(d) self.readerr() self.pipeo.flush() self.readerr() r = self._recv() if not r: return 1 try: return int(r) except ValueError: self._abort(error.ResponseError(_("unexpected response:"), r)) instance = sshpeer mercurial-3.7.3/mercurial/__version__.py0000644000175000017500000000007312676531541017743 0ustar mpmmpm00000000000000# this file is autogenerated by setup.py version = "3.7.3" mercurial-3.7.3/mercurial/phases.py0000644000175000017500000004122512676531525016753 0ustar mpmmpm00000000000000""" Mercurial phases support code --- Copyright 2011 Pierre-Yves David Logilab SA Augie Fackler This software may be used and distributed according to the terms of the GNU General Public License version 2 or any later version. --- This module implements most phase logic in mercurial. Basic Concept ============= A 'changeset phase' is an indicator that tells us how a changeset is manipulated and communicated. The details of each phase is described below, here we describe the properties they have in common. Like bookmarks, phases are not stored in history and thus are not permanent and leave no audit trail. First, no changeset can be in two phases at once. Phases are ordered, so they can be considered from lowest to highest. The default, lowest phase is 'public' - this is the normal phase of existing changesets. A child changeset can not be in a lower phase than its parents. These phases share a hierarchy of traits: immutable shared public: X X draft: X secret: Local commits are draft by default. Phase Movement and Exchange =========================== Phase data is exchanged by pushkey on pull and push. Some servers have a publish option set, we call such a server a "publishing server". Pushing a draft changeset to a publishing server changes the phase to public. A small list of fact/rules define the exchange of phase: * old client never changes server states * pull never changes server states * publish and old server changesets are seen as public by client * any secret changeset seen in another repository is lowered to at least draft Here is the final table summing up the 49 possible use cases of phase exchange: server old publish non-publish N X N D P N D P old client pull N - X/X - X/D X/P - X/D X/P X - X/X - X/D X/P - X/D X/P push X X/X X/X X/P X/P X/P X/D X/D X/P new client pull N - P/X - P/D P/P - D/D P/P D - P/X - P/D P/P - D/D P/P P - P/X - P/D P/P - P/D P/P push D P/X P/X P/P P/P P/P D/D D/D P/P P P/X P/X P/P P/P P/P P/P P/P P/P Legend: A/B = final state on client / state on server * N = new/not present, * P = public, * D = draft, * X = not tracked (i.e., the old client or server has no internal way of recording the phase.) passive = only pushes A cell here can be read like this: "When a new client pushes a draft changeset (D) to a publishing server where it's not present (N), it's marked public on both sides (P/P)." Note: old client behave as a publishing server with draft only content - other people see it as public - content is pushed as draft """ from __future__ import absolute_import import errno import os from .i18n import _ from .node import ( bin, hex, nullid, nullrev, short, ) from . import ( error, ) allphases = public, draft, secret = range(3) trackedphases = allphases[1:] phasenames = ['public', 'draft', 'secret'] def _readroots(repo, phasedefaults=None): """Read phase roots from disk phasedefaults is a list of fn(repo, roots) callable, which are executed if the phase roots file does not exist. When phases are being initialized on an existing repository, this could be used to set selected changesets phase to something else than public. Return (roots, dirty) where dirty is true if roots differ from what is being stored. """ repo = repo.unfiltered() dirty = False roots = [set() for i in allphases] try: f = None if 'HG_PENDING' in os.environ: try: f = repo.svfs('phaseroots.pending') except IOError as inst: if inst.errno != errno.ENOENT: raise if f is None: f = repo.svfs('phaseroots') try: for line in f: phase, nh = line.split() roots[int(phase)].add(bin(nh)) finally: f.close() except IOError as inst: if inst.errno != errno.ENOENT: raise if phasedefaults: for f in phasedefaults: roots = f(repo, roots) dirty = True return roots, dirty class phasecache(object): def __init__(self, repo, phasedefaults, _load=True): if _load: # Cheap trick to allow shallow-copy without copy module self.phaseroots, self.dirty = _readroots(repo, phasedefaults) self._phaserevs = None self._phasesets = None self.filterunknown(repo) self.opener = repo.svfs def copy(self): # Shallow copy meant to ensure isolation in # advance/retractboundary(), nothing more. ph = self.__class__(None, None, _load=False) ph.phaseroots = self.phaseroots[:] ph.dirty = self.dirty ph.opener = self.opener ph._phaserevs = self._phaserevs ph._phasesets = self._phasesets return ph def replace(self, phcache): """replace all values in 'self' with content of phcache""" for a in ('phaseroots', 'dirty', 'opener', '_phaserevs', '_phasesets'): setattr(self, a, getattr(phcache, a)) def _getphaserevsnative(self, repo): repo = repo.unfiltered() nativeroots = [] for phase in trackedphases: nativeroots.append(map(repo.changelog.rev, self.phaseroots[phase])) return repo.changelog.computephases(nativeroots) def _computephaserevspure(self, repo): repo = repo.unfiltered() revs = [public] * len(repo.changelog) self._phaserevs = revs self._populatephaseroots(repo) for phase in trackedphases: roots = map(repo.changelog.rev, self.phaseroots[phase]) if roots: for rev in roots: revs[rev] = phase for rev in repo.changelog.descendants(roots): revs[rev] = phase def loadphaserevs(self, repo): """ensure phase information is loaded in the object""" if self._phaserevs is None: try: if repo.ui.configbool('experimental', 'nativephaseskillswitch'): self._computephaserevspure(repo) else: res = self._getphaserevsnative(repo) self._phaserevs, self._phasesets = res except AttributeError: self._computephaserevspure(repo) def invalidate(self): self._phaserevs = None self._phasesets = None def _populatephaseroots(self, repo): """Fills the _phaserevs cache with phases for the roots. """ cl = repo.changelog phaserevs = self._phaserevs for phase in trackedphases: roots = map(cl.rev, self.phaseroots[phase]) for root in roots: phaserevs[root] = phase def phase(self, repo, rev): # We need a repo argument here to be able to build _phaserevs # if necessary. The repository instance is not stored in # phasecache to avoid reference cycles. The changelog instance # is not stored because it is a filecache() property and can # be replaced without us being notified. if rev == nullrev: return public if rev < nullrev: raise ValueError(_('cannot lookup negative revision')) if self._phaserevs is None or rev >= len(self._phaserevs): self.invalidate() self.loadphaserevs(repo) return self._phaserevs[rev] def write(self): if not self.dirty: return f = self.opener('phaseroots', 'w', atomictemp=True) try: self._write(f) finally: f.close() def _write(self, fp): for phase, roots in enumerate(self.phaseroots): for h in roots: fp.write('%i %s\n' % (phase, hex(h))) self.dirty = False def _updateroots(self, phase, newroots, tr): self.phaseroots[phase] = newroots self.invalidate() self.dirty = True tr.addfilegenerator('phase', ('phaseroots',), self._write) tr.hookargs['phases_moved'] = '1' def advanceboundary(self, repo, tr, targetphase, nodes): # Be careful to preserve shallow-copied values: do not update # phaseroots values, replace them. repo = repo.unfiltered() delroots = [] # set of root deleted by this path for phase in xrange(targetphase + 1, len(allphases)): # filter nodes that are not in a compatible phase already nodes = [n for n in nodes if self.phase(repo, repo[n].rev()) >= phase] if not nodes: break # no roots to move anymore olds = self.phaseroots[phase] roots = set(ctx.node() for ctx in repo.set( 'roots((%ln::) - (%ln::%ln))', olds, olds, nodes)) if olds != roots: self._updateroots(phase, roots, tr) # some roots may need to be declared for lower phases delroots.extend(olds - roots) # declare deleted root in the target phase if targetphase != 0: self.retractboundary(repo, tr, targetphase, delroots) repo.invalidatevolatilesets() def retractboundary(self, repo, tr, targetphase, nodes): # Be careful to preserve shallow-copied values: do not update # phaseroots values, replace them. repo = repo.unfiltered() currentroots = self.phaseroots[targetphase] newroots = [n for n in nodes if self.phase(repo, repo[n].rev()) < targetphase] if newroots: if nullid in newroots: raise error.Abort(_('cannot change null revision phase')) currentroots = currentroots.copy() currentroots.update(newroots) # Only compute new roots for revs above the roots that are being # retracted. minnewroot = min(repo[n].rev() for n in newroots) aboveroots = [n for n in currentroots if repo[n].rev() >= minnewroot] updatedroots = repo.set('roots(%ln::)', aboveroots) finalroots = set(n for n in currentroots if repo[n].rev() < minnewroot) finalroots.update(ctx.node() for ctx in updatedroots) self._updateroots(targetphase, finalroots, tr) repo.invalidatevolatilesets() def filterunknown(self, repo): """remove unknown nodes from the phase boundary Nothing is lost as unknown nodes only hold data for their descendants. """ filtered = False nodemap = repo.changelog.nodemap # to filter unknown nodes for phase, nodes in enumerate(self.phaseroots): missing = sorted(node for node in nodes if node not in nodemap) if missing: for mnode in missing: repo.ui.debug( 'removing unknown node %s from %i-phase boundary\n' % (short(mnode), phase)) nodes.symmetric_difference_update(missing) filtered = True if filtered: self.dirty = True # filterunknown is called by repo.destroyed, we may have no changes in # root but phaserevs contents is certainly invalid (or at least we # have not proper way to check that). related to issue 3858. # # The other caller is __init__ that have no _phaserevs initialized # anyway. If this change we should consider adding a dedicated # "destroyed" function to phasecache or a proper cache key mechanism # (see branchmap one) self.invalidate() def advanceboundary(repo, tr, targetphase, nodes): """Add nodes to a phase changing other nodes phases if necessary. This function move boundary *forward* this means that all nodes are set in the target phase or kept in a *lower* phase. Simplify boundary to contains phase roots only.""" phcache = repo._phasecache.copy() phcache.advanceboundary(repo, tr, targetphase, nodes) repo._phasecache.replace(phcache) def retractboundary(repo, tr, targetphase, nodes): """Set nodes back to a phase changing other nodes phases if necessary. This function move boundary *backward* this means that all nodes are set in the target phase or kept in a *higher* phase. Simplify boundary to contains phase roots only.""" phcache = repo._phasecache.copy() phcache.retractboundary(repo, tr, targetphase, nodes) repo._phasecache.replace(phcache) def listphases(repo): """List phases root for serialization over pushkey""" keys = {} value = '%i' % draft for root in repo._phasecache.phaseroots[draft]: keys[hex(root)] = value if repo.publishing(): # Add an extra data to let remote know we are a publishing # repo. Publishing repo can't just pretend they are old repo. # When pushing to a publishing repo, the client still need to # push phase boundary # # Push do not only push changeset. It also push phase data. # New phase data may apply to common changeset which won't be # push (as they are common). Here is a very simple example: # # 1) repo A push changeset X as draft to repo B # 2) repo B make changeset X public # 3) repo B push to repo A. X is not pushed but the data that # X as now public should # # The server can't handle it on it's own as it has no idea of # client phase data. keys['publishing'] = 'True' return keys def pushphase(repo, nhex, oldphasestr, newphasestr): """List phases root for serialization over pushkey""" repo = repo.unfiltered() with repo.lock(): currentphase = repo[nhex].phase() newphase = abs(int(newphasestr)) # let's avoid negative index surprise oldphase = abs(int(oldphasestr)) # let's avoid negative index surprise if currentphase == oldphase and newphase < oldphase: with repo.transaction('pushkey-phase') as tr: advanceboundary(repo, tr, newphase, [bin(nhex)]) return 1 elif currentphase == newphase: # raced, but got correct result return 1 else: return 0 def analyzeremotephases(repo, subset, roots): """Compute phases heads and root in a subset of node from root dict * subset is heads of the subset * roots is { => phase} mapping. key and value are string. Accept unknown element input """ repo = repo.unfiltered() # build list from dictionary draftroots = [] nodemap = repo.changelog.nodemap # to filter unknown nodes for nhex, phase in roots.iteritems(): if nhex == 'publishing': # ignore data related to publish option continue node = bin(nhex) phase = int(phase) if phase == 0: if node != nullid: repo.ui.warn(_('ignoring inconsistent public root' ' from remote: %s\n') % nhex) elif phase == 1: if node in nodemap: draftroots.append(node) else: repo.ui.warn(_('ignoring unexpected root from remote: %i %s\n') % (phase, nhex)) # compute heads publicheads = newheads(repo, subset, draftroots) return publicheads, draftroots def newheads(repo, heads, roots): """compute new head of a subset minus another * `heads`: define the first subset * `roots`: define the second we subtract from the first""" repo = repo.unfiltered() revset = repo.set('heads((%ln + parents(%ln)) - (%ln::%ln))', heads, roots, roots, heads) return [c.node() for c in revset] def newcommitphase(ui): """helper to get the target phase of new commit Handle all possible values for the phases.new-commit options. """ v = ui.config('phases', 'new-commit', draft) try: return phasenames.index(v) except ValueError: try: return int(v) except ValueError: msg = _("phases.new-commit: not a valid phase name ('%s')") raise error.ConfigError(msg % v) def hassecret(repo): """utility function that check if a repo have any secret changeset.""" return bool(repo._phasecache.phaseroots[2]) mercurial-3.7.3/mercurial/changelog.py0000644000175000017500000003451512676531524017422 0ustar mpmmpm00000000000000# changelog.py - changelog class for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import from .i18n import _ from .node import ( bin, hex, nullid, ) from . import ( encoding, error, revlog, util, ) _defaultextra = {'branch': 'default'} def _string_escape(text): """ >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)} >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d >>> s 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n' >>> res = _string_escape(s) >>> s == res.decode('string_escape') True """ # subset of the string_escape codec text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r') return text.replace('\0', '\\0') def decodeextra(text): """ >>> sorted(decodeextra(encodeextra({'foo': 'bar', 'baz': chr(0) + '2'}) ... ).iteritems()) [('baz', '\\x002'), ('branch', 'default'), ('foo', 'bar')] >>> sorted(decodeextra(encodeextra({'foo': 'bar', ... 'baz': chr(92) + chr(0) + '2'}) ... ).iteritems()) [('baz', '\\\\\\x002'), ('branch', 'default'), ('foo', 'bar')] """ extra = _defaultextra.copy() for l in text.split('\0'): if l: if '\\0' in l: # fix up \0 without getting into trouble with \\0 l = l.replace('\\\\', '\\\\\n') l = l.replace('\\0', '\0') l = l.replace('\n', '') k, v = l.decode('string_escape').split(':', 1) extra[k] = v return extra def encodeextra(d): # keys must be sorted to produce a deterministic changelog entry items = [_string_escape('%s:%s' % (k, d[k])) for k in sorted(d)] return "\0".join(items) def stripdesc(desc): """strip trailing whitespace and leading and trailing empty lines""" return '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n') class appender(object): '''the changelog index must be updated last on disk, so we use this class to delay writes to it''' def __init__(self, vfs, name, mode, buf): self.data = buf fp = vfs(name, mode) self.fp = fp self.offset = fp.tell() self.size = vfs.fstat(fp).st_size def end(self): return self.size + len("".join(self.data)) def tell(self): return self.offset def flush(self): pass def close(self): self.fp.close() def seek(self, offset, whence=0): '''virtual file offset spans real file and data''' if whence == 0: self.offset = offset elif whence == 1: self.offset += offset elif whence == 2: self.offset = self.end() + offset if self.offset < self.size: self.fp.seek(self.offset) def read(self, count=-1): '''only trick here is reads that span real file and data''' ret = "" if self.offset < self.size: s = self.fp.read(count) ret = s self.offset += len(s) if count > 0: count -= len(s) if count != 0: doff = self.offset - self.size self.data.insert(0, "".join(self.data)) del self.data[1:] s = self.data[0][doff:doff + count] self.offset += len(s) ret += s return ret def write(self, s): self.data.append(str(s)) self.offset += len(s) def _divertopener(opener, target): """build an opener that writes in 'target.a' instead of 'target'""" def _divert(name, mode='r'): if name != target: return opener(name, mode) return opener(name + ".a", mode) return _divert def _delayopener(opener, target, buf): """build an opener that stores chunks in 'buf' instead of 'target'""" def _delay(name, mode='r'): if name != target: return opener(name, mode) return appender(opener, name, mode, buf) return _delay class changelog(revlog.revlog): def __init__(self, opener): revlog.revlog.__init__(self, opener, "00changelog.i") if self._initempty: # changelogs don't benefit from generaldelta self.version &= ~revlog.REVLOGGENERALDELTA self._generaldelta = False self._realopener = opener self._delayed = False self._delaybuf = None self._divert = False self.filteredrevs = frozenset() def tip(self): """filtered version of revlog.tip""" for i in xrange(len(self) -1, -2, -1): if i not in self.filteredrevs: return self.node(i) def __contains__(self, rev): """filtered version of revlog.__contains__""" return (0 <= rev < len(self) and rev not in self.filteredrevs) def __iter__(self): """filtered version of revlog.__iter__""" if len(self.filteredrevs) == 0: return revlog.revlog.__iter__(self) def filterediter(): for i in xrange(len(self)): if i not in self.filteredrevs: yield i return filterediter() def revs(self, start=0, stop=None): """filtered version of revlog.revs""" for i in super(changelog, self).revs(start, stop): if i not in self.filteredrevs: yield i @util.propertycache def nodemap(self): # XXX need filtering too self.rev(self.node(0)) return self._nodecache def reachableroots(self, minroot, heads, roots, includepath=False): return self.index.reachableroots2(minroot, heads, roots, includepath) def headrevs(self): if self.filteredrevs: try: return self.index.headrevsfiltered(self.filteredrevs) # AttributeError covers non-c-extension environments and # old c extensions without filter handling. except AttributeError: return self._headrevs() return super(changelog, self).headrevs() def strip(self, *args, **kwargs): # XXX make something better than assert # We can't expect proper strip behavior if we are filtered. assert not self.filteredrevs super(changelog, self).strip(*args, **kwargs) def rev(self, node): """filtered version of revlog.rev""" r = super(changelog, self).rev(node) if r in self.filteredrevs: raise error.FilteredLookupError(hex(node), self.indexfile, _('filtered node')) return r def node(self, rev): """filtered version of revlog.node""" if rev in self.filteredrevs: raise error.FilteredIndexError(rev) return super(changelog, self).node(rev) def linkrev(self, rev): """filtered version of revlog.linkrev""" if rev in self.filteredrevs: raise error.FilteredIndexError(rev) return super(changelog, self).linkrev(rev) def parentrevs(self, rev): """filtered version of revlog.parentrevs""" if rev in self.filteredrevs: raise error.FilteredIndexError(rev) return super(changelog, self).parentrevs(rev) def flags(self, rev): """filtered version of revlog.flags""" if rev in self.filteredrevs: raise error.FilteredIndexError(rev) return super(changelog, self).flags(rev) def delayupdate(self, tr): "delay visibility of index updates to other readers" if not self._delayed: if len(self) == 0: self._divert = True if self._realopener.exists(self.indexfile + '.a'): self._realopener.unlink(self.indexfile + '.a') self.opener = _divertopener(self._realopener, self.indexfile) else: self._delaybuf = [] self.opener = _delayopener(self._realopener, self.indexfile, self._delaybuf) self._delayed = True tr.addpending('cl-%i' % id(self), self._writepending) tr.addfinalize('cl-%i' % id(self), self._finalize) def _finalize(self, tr): "finalize index updates" self._delayed = False self.opener = self._realopener # move redirected index data back into place if self._divert: assert not self._delaybuf tmpname = self.indexfile + ".a" nfile = self.opener.open(tmpname) nfile.close() self.opener.rename(tmpname, self.indexfile) elif self._delaybuf: fp = self.opener(self.indexfile, 'a') fp.write("".join(self._delaybuf)) fp.close() self._delaybuf = None self._divert = False # split when we're done self.checkinlinesize(tr) def readpending(self, file): """read index data from a "pending" file During a transaction, the actual changeset data is already stored in the main file, but not yet finalized in the on-disk index. Instead, a "pending" index is written by the transaction logic. If this function is running, we are likely in a subprocess invoked in a hook. The subprocess is informed that it is within a transaction and needs to access its content. This function will read all the index data out of the pending file and overwrite the main index.""" if not self.opener.exists(file): return # no pending data for changelog r = revlog.revlog(self.opener, file) self.index = r.index self.nodemap = r.nodemap self._nodecache = r._nodecache self._chunkcache = r._chunkcache def _writepending(self, tr): "create a file containing the unfinalized state for pretxnchangegroup" if self._delaybuf: # make a temporary copy of the index fp1 = self._realopener(self.indexfile) pendingfilename = self.indexfile + ".a" # register as a temp file to ensure cleanup on failure tr.registertmp(pendingfilename) # write existing data fp2 = self._realopener(pendingfilename, "w") fp2.write(fp1.read()) # add pending data fp2.write("".join(self._delaybuf)) fp2.close() # switch modes so finalize can simply rename self._delaybuf = None self._divert = True self.opener = _divertopener(self._realopener, self.indexfile) if self._divert: return True return False def checkinlinesize(self, tr, fp=None): if not self._delayed: revlog.revlog.checkinlinesize(self, tr, fp) def read(self, node): """ format used: nodeid\n : manifest node in ascii user\n : user, no \n or \r allowed time tz extra\n : date (time is int or float, timezone is int) : extra is metadata, encoded and separated by '\0' : older versions ignore it files\n\n : files modified by the cset, no \n or \r allowed (.*) : comment (free text, ideally utf-8) changelog v0 doesn't use extra """ text = self.revision(node) if not text: return (nullid, "", (0, 0), [], "", _defaultextra) last = text.index("\n\n") desc = encoding.tolocal(text[last + 2:]) l = text[:last].split('\n') manifest = bin(l[0]) user = encoding.tolocal(l[1]) tdata = l[2].split(' ', 2) if len(tdata) != 3: time = float(tdata[0]) try: # various tools did silly things with the time zone field. timezone = int(tdata[1]) except ValueError: timezone = 0 extra = _defaultextra else: time, timezone = float(tdata[0]), int(tdata[1]) extra = decodeextra(tdata[2]) files = l[3:] return (manifest, user, (time, timezone), files, desc, extra) def readfiles(self, node): """ short version of read that only returns the files modified by the cset """ text = self.revision(node) if not text: return [] last = text.index("\n\n") l = text[:last].split('\n') return l[3:] def add(self, manifest, files, desc, transaction, p1, p2, user, date=None, extra=None): # Convert to UTF-8 encoded bytestrings as the very first # thing: calling any method on a localstr object will turn it # into a str object and the cached UTF-8 string is thus lost. user, desc = encoding.fromlocal(user), encoding.fromlocal(desc) user = user.strip() # An empty username or a username with a "\n" will make the # revision text contain two "\n\n" sequences -> corrupt # repository since read cannot unpack the revision. if not user: raise error.RevlogError(_("empty username")) if "\n" in user: raise error.RevlogError(_("username %s contains a newline") % repr(user)) desc = stripdesc(desc) if date: parseddate = "%d %d" % util.parsedate(date) else: parseddate = "%d %d" % util.makedate() if extra: branch = extra.get("branch") if branch in ("default", ""): del extra["branch"] elif branch in (".", "null", "tip"): raise error.RevlogError(_('the name \'%s\' is reserved') % branch) if extra: extra = encodeextra(extra) parseddate = "%s %s" % (parseddate, extra) l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc] text = "\n".join(l) return self.addrevision(text, transaction, len(self), p1, p2) def branchinfo(self, rev): """return the branch name and open/close state of a revision This function exists because creating a changectx object just to access this is costly.""" extra = self.read(rev)[5] return encoding.tolocal(extra.get("branch")), 'close' in extra mercurial-3.7.3/mercurial/fancyopts.py0000644000175000017500000000703412676531524017475 0ustar mpmmpm00000000000000# fancyopts.py - better command line parsing # # Copyright 2005-2009 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import getopt from .i18n import _ from . import error def gnugetopt(args, options, longoptions): """Parse options mostly like getopt.gnu_getopt. This is different from getopt.gnu_getopt in that an argument of - will become an argument of - instead of vanishing completely. """ extraargs = [] if '--' in args: stopindex = args.index('--') extraargs = args[stopindex + 1:] args = args[:stopindex] opts, parseargs = getopt.getopt(args, options, longoptions) args = [] while parseargs: arg = parseargs.pop(0) if arg and arg[0] == '-' and len(arg) > 1: parseargs.insert(0, arg) topts, newparseargs = getopt.getopt(parseargs, options, longoptions) opts = opts + topts parseargs = newparseargs else: args.append(arg) args.extend(extraargs) return opts, args def fancyopts(args, options, state, gnu=False): """ read args, parse options, and store options in state each option is a tuple of: short option or '' long option default value description option value label(optional) option types include: boolean or none - option sets variable in state to true string - parameter string is stored in state list - parameter string is added to a list integer - parameter strings is stored as int function - call function with parameter non-option args are returned """ namelist = [] shortlist = '' argmap = {} defmap = {} for option in options: if len(option) == 5: short, name, default, comment, dummy = option else: short, name, default, comment = option # convert opts to getopt format oname = name name = name.replace('-', '_') argmap['-' + short] = argmap['--' + oname] = name defmap[name] = default # copy defaults to state if isinstance(default, list): state[name] = default[:] elif callable(default): state[name] = None else: state[name] = default # does it take a parameter? if not (default is None or default is True or default is False): if short: short += ':' if oname: oname += '=' if short: shortlist += short if name: namelist.append(oname) # parse arguments if gnu: parse = gnugetopt else: parse = getopt.getopt opts, args = parse(args, shortlist, namelist) # transfer result to state for opt, val in opts: name = argmap[opt] obj = defmap[name] t = type(obj) if callable(obj): state[name] = defmap[name](val) elif t is type(1): try: state[name] = int(val) except ValueError: raise error.Abort(_('invalid value %r for option %s, ' 'expected int') % (val, opt)) elif t is type(''): state[name] = val elif t is type([]): state[name].append(val) elif t is type(None) or t is type(False): state[name] = True # return unparsed args return args mercurial-3.7.3/mercurial/worker.py0000644000175000017500000001070712676531525017002 0ustar mpmmpm00000000000000# worker.py - master-slave parallelism support # # Copyright 2013 Facebook, Inc. # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import os import signal import sys import threading from .i18n import _ from . import error def countcpus(): '''try to count the number of CPUs on the system''' # posix try: n = int(os.sysconf('SC_NPROCESSORS_ONLN')) if n > 0: return n except (AttributeError, ValueError): pass # windows try: n = int(os.environ['NUMBER_OF_PROCESSORS']) if n > 0: return n except (KeyError, ValueError): pass return 1 def _numworkers(ui): s = ui.config('worker', 'numcpus') if s: try: n = int(s) if n >= 1: return n except ValueError: raise error.Abort(_('number of cpus must be an integer')) return min(max(countcpus(), 4), 32) if os.name == 'posix': _startupcost = 0.01 else: _startupcost = 1e30 def worthwhile(ui, costperop, nops): '''try to determine whether the benefit of multiple processes can outweigh the cost of starting them''' linear = costperop * nops workers = _numworkers(ui) benefit = linear - (_startupcost * workers + linear / workers) return benefit >= 0.15 def worker(ui, costperarg, func, staticargs, args): '''run a function, possibly in parallel in multiple worker processes. returns a progress iterator costperarg - cost of a single task func - function to run staticargs - arguments to pass to every invocation of the function args - arguments to split into chunks, to pass to individual workers ''' if worthwhile(ui, costperarg, len(args)): return _platformworker(ui, func, staticargs, args) return func(*staticargs + (args,)) def _posixworker(ui, func, staticargs, args): rfd, wfd = os.pipe() workers = _numworkers(ui) oldhandler = signal.getsignal(signal.SIGINT) signal.signal(signal.SIGINT, signal.SIG_IGN) pids, problem = [], [0] for pargs in partition(args, workers): pid = os.fork() if pid == 0: signal.signal(signal.SIGINT, oldhandler) try: os.close(rfd) for i, item in func(*(staticargs + (pargs,))): os.write(wfd, '%d %s\n' % (i, item)) os._exit(0) except KeyboardInterrupt: os._exit(255) # other exceptions are allowed to propagate, we rely # on lock.py's pid checks to avoid release callbacks pids.append(pid) pids.reverse() os.close(wfd) fp = os.fdopen(rfd, 'rb', 0) def killworkers(): # if one worker bails, there's no good reason to wait for the rest for p in pids: try: os.kill(p, signal.SIGTERM) except OSError as err: if err.errno != errno.ESRCH: raise def waitforworkers(): for _pid in pids: st = _exitstatus(os.wait()[1]) if st and not problem[0]: problem[0] = st killworkers() t = threading.Thread(target=waitforworkers) t.start() def cleanup(): signal.signal(signal.SIGINT, oldhandler) t.join() status = problem[0] if status: if status < 0: os.kill(os.getpid(), -status) sys.exit(status) try: for line in fp: l = line.split(' ', 1) yield int(l[0]), l[1][:-1] except: # re-raises killworkers() cleanup() raise cleanup() def _posixexitstatus(code): '''convert a posix exit status into the same form returned by os.spawnv returns None if the process was stopped instead of exiting''' if os.WIFEXITED(code): return os.WEXITSTATUS(code) elif os.WIFSIGNALED(code): return -os.WTERMSIG(code) if os.name != 'nt': _platformworker = _posixworker _exitstatus = _posixexitstatus def partition(lst, nslices): '''partition a list into N slices of equal size''' n = len(lst) chunk, slop = n / nslices, n % nslices end = 0 for i in xrange(nslices): start = end end = start + chunk if slop: end += 1 slop -= 1 yield lst[start:end] mercurial-3.7.3/mercurial/hgweb/0000755000175000017500000000000012676531544016207 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/hgweb/common.py0000644000175000017500000001465312676531524020060 0ustar mpmmpm00000000000000# hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod # # Copyright 21 May 2005 - (c) 2005 Jake Edge # Copyright 2005, 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import BaseHTTPServer import errno import mimetypes import os HTTP_OK = 200 HTTP_NOT_MODIFIED = 304 HTTP_BAD_REQUEST = 400 HTTP_UNAUTHORIZED = 401 HTTP_FORBIDDEN = 403 HTTP_NOT_FOUND = 404 HTTP_METHOD_NOT_ALLOWED = 405 HTTP_SERVER_ERROR = 500 def ismember(ui, username, userlist): """Check if username is a member of userlist. If userlist has a single '*' member, all users are considered members. Can be overridden by extensions to provide more complex authorization schemes. """ return userlist == ['*'] or username in userlist def checkauthz(hgweb, req, op): '''Check permission for operation based on request data (including authentication info). Return if op allowed, else raise an ErrorResponse exception.''' user = req.env.get('REMOTE_USER') deny_read = hgweb.configlist('web', 'deny_read') if deny_read and (not user or ismember(hgweb.repo.ui, user, deny_read)): raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized') allow_read = hgweb.configlist('web', 'allow_read') if allow_read and (not ismember(hgweb.repo.ui, user, allow_read)): raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized') if op == 'pull' and not hgweb.allowpull: raise ErrorResponse(HTTP_UNAUTHORIZED, 'pull not authorized') elif op == 'pull' or op is None: # op is None for interface requests return # enforce that you can only push using POST requests if req.env['REQUEST_METHOD'] != 'POST': msg = 'push requires POST request' raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg) # require ssl by default for pushing, auth info cannot be sniffed # and replayed scheme = req.env.get('wsgi.url_scheme') if hgweb.configbool('web', 'push_ssl', True) and scheme != 'https': raise ErrorResponse(HTTP_FORBIDDEN, 'ssl required') deny = hgweb.configlist('web', 'deny_push') if deny and (not user or ismember(hgweb.repo.ui, user, deny)): raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized') allow = hgweb.configlist('web', 'allow_push') if not (allow and ismember(hgweb.repo.ui, user, allow)): raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized') # Hooks for hgweb permission checks; extensions can add hooks here. # Each hook is invoked like this: hook(hgweb, request, operation), # where operation is either read, pull or push. Hooks should either # raise an ErrorResponse exception, or just return. # # It is possible to do both authentication and authorization through # this. permhooks = [checkauthz] class ErrorResponse(Exception): def __init__(self, code, message=None, headers=[]): if message is None: message = _statusmessage(code) Exception.__init__(self, message) self.code = code self.headers = headers class continuereader(object): def __init__(self, f, write): self.f = f self._write = write self.continued = False def read(self, amt=-1): if not self.continued: self.continued = True self._write('HTTP/1.1 100 Continue\r\n\r\n') return self.f.read(amt) def __getattr__(self, attr): if attr in ('close', 'readline', 'readlines', '__iter__'): return getattr(self.f, attr) raise AttributeError def _statusmessage(code): responses = BaseHTTPServer.BaseHTTPRequestHandler.responses return responses.get(code, ('Error', 'Unknown error'))[0] def statusmessage(code, message=None): return '%d %s' % (code, message or _statusmessage(code)) def get_stat(spath, fn): """stat fn if it exists, spath otherwise""" cl_path = os.path.join(spath, fn) if os.path.exists(cl_path): return os.stat(cl_path) else: return os.stat(spath) def get_mtime(spath): return get_stat(spath, "00changelog.i").st_mtime def staticfile(directory, fname, req): """return a file inside directory with guessed Content-Type header fname always uses '/' as directory separator and isn't allowed to contain unusual path components. Content-Type is guessed using the mimetypes module. Return an empty string if fname is illegal or file not found. """ parts = fname.split('/') for part in parts: if (part in ('', os.curdir, os.pardir) or os.sep in part or os.altsep is not None and os.altsep in part): return fpath = os.path.join(*parts) if isinstance(directory, str): directory = [directory] for d in directory: path = os.path.join(d, fpath) if os.path.exists(path): break try: os.stat(path) ct = mimetypes.guess_type(path)[0] or "text/plain" fp = open(path, 'rb') data = fp.read() fp.close() req.respond(HTTP_OK, ct, body=data) except TypeError: raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal filename') except OSError as err: if err.errno == errno.ENOENT: raise ErrorResponse(HTTP_NOT_FOUND) else: raise ErrorResponse(HTTP_SERVER_ERROR, err.strerror) def paritygen(stripecount, offset=0): """count parity of horizontal stripes for easier reading""" if stripecount and offset: # account for offset, e.g. due to building the list in reverse count = (stripecount + offset) % stripecount parity = (stripecount + offset) / stripecount & 1 else: count = 0 parity = 0 while True: yield parity count += 1 if stripecount and count >= stripecount: parity = 1 - parity count = 0 def get_contact(config): """Return repo contact information or empty string. web.contact is the primary source, but if that is not set, try ui.username or $EMAIL as a fallback to display something useful. """ return (config("web", "contact") or config("ui", "username") or os.environ.get("EMAIL") or "") def caching(web, req): tag = str(web.mtime) if req.env.get('HTTP_IF_NONE_MATCH') == tag: raise ErrorResponse(HTTP_NOT_MODIFIED) req.headers.append(('ETag', tag)) mercurial-3.7.3/mercurial/hgweb/protocol.py0000644000175000017500000000630612676531525020426 0ustar mpmmpm00000000000000# # Copyright 21 May 2005 - (c) 2005 Jake Edge # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import cStringIO import cgi import urllib import zlib from .common import ( HTTP_OK, ) from .. import ( util, wireproto, ) HGTYPE = 'application/mercurial-0.1' HGERRTYPE = 'application/hg-error' class webproto(wireproto.abstractserverproto): def __init__(self, req, ui): self.req = req self.response = '' self.ui = ui def getargs(self, args): knownargs = self._args() data = {} keys = args.split() for k in keys: if k == '*': star = {} for key in knownargs.keys(): if key != 'cmd' and key not in keys: star[key] = knownargs[key][0] data['*'] = star else: data[k] = knownargs[k][0] return [data[k] for k in keys] def _args(self): args = self.req.form.copy() chunks = [] i = 1 while True: h = self.req.env.get('HTTP_X_HGARG_' + str(i)) if h is None: break chunks += [h] i += 1 args.update(cgi.parse_qs(''.join(chunks), keep_blank_values=True)) return args def getfile(self, fp): length = int(self.req.env['CONTENT_LENGTH']) for s in util.filechunkiter(self.req, limit=length): fp.write(s) def redirect(self): self.oldio = self.ui.fout, self.ui.ferr self.ui.ferr = self.ui.fout = cStringIO.StringIO() def restore(self): val = self.ui.fout.getvalue() self.ui.ferr, self.ui.fout = self.oldio return val def groupchunks(self, cg): z = zlib.compressobj() while True: chunk = cg.read(4096) if not chunk: break yield z.compress(chunk) yield z.flush() def _client(self): return 'remote:%s:%s:%s' % ( self.req.env.get('wsgi.url_scheme') or 'http', urllib.quote(self.req.env.get('REMOTE_HOST', '')), urllib.quote(self.req.env.get('REMOTE_USER', ''))) def iscmd(cmd): return cmd in wireproto.commands def call(repo, req, cmd): p = webproto(req, repo.ui) rsp = wireproto.dispatch(repo, p, cmd) if isinstance(rsp, str): req.respond(HTTP_OK, HGTYPE, body=rsp) return [] elif isinstance(rsp, wireproto.streamres): req.respond(HTTP_OK, HGTYPE) return rsp.gen elif isinstance(rsp, wireproto.pushres): val = p.restore() rsp = '%d\n%s' % (rsp.res, val) req.respond(HTTP_OK, HGTYPE, body=rsp) return [] elif isinstance(rsp, wireproto.pusherr): # drain the incoming bundle req.drain() p.restore() rsp = '0\n%s\n' % rsp.res req.respond(HTTP_OK, HGTYPE, body=rsp) return [] elif isinstance(rsp, wireproto.ooberror): rsp = rsp.message req.respond(HTTP_OK, HGERRTYPE, body=rsp) return [] mercurial-3.7.3/mercurial/hgweb/hgwebdir_mod.py0000644000175000017500000004476712676531525021234 0ustar mpmmpm00000000000000# hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. # # Copyright 21 May 2005 - (c) 2005 Jake Edge # Copyright 2005, 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import os import re import time from ..i18n import _ from .common import ( ErrorResponse, HTTP_NOT_FOUND, HTTP_OK, HTTP_SERVER_ERROR, get_contact, get_mtime, ismember, paritygen, staticfile, ) from .request import wsgirequest from .. import ( encoding, error, hg, scmutil, templater, ui as uimod, util, ) from . import ( hgweb_mod, webutil, wsgicgi, ) def cleannames(items): return [(util.pconvert(name).strip('/'), path) for name, path in items] def findrepos(paths): repos = [] for prefix, root in cleannames(paths): roothead, roottail = os.path.split(root) # "foo = /bar/*" or "foo = /bar/**" lets every repo /bar/N in or below # /bar/ be served as as foo/N . # '*' will not search inside dirs with .hg (except .hg/patches), # '**' will search inside dirs with .hg (and thus also find subrepos). try: recurse = {'*': False, '**': True}[roottail] except KeyError: repos.append((prefix, root)) continue roothead = os.path.normpath(os.path.abspath(roothead)) paths = scmutil.walkrepos(roothead, followsym=True, recurse=recurse) repos.extend(urlrepos(prefix, roothead, paths)) return repos def urlrepos(prefix, roothead, paths): """yield url paths and filesystem paths from a list of repo paths >>> conv = lambda seq: [(v, util.pconvert(p)) for v,p in seq] >>> conv(urlrepos('hg', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) [('hg/r', '/opt/r'), ('hg/r/r', '/opt/r/r'), ('hg', '/opt')] >>> conv(urlrepos('', '/opt', ['/opt/r', '/opt/r/r', '/opt'])) [('r', '/opt/r'), ('r/r', '/opt/r/r'), ('', '/opt')] """ for path in paths: path = os.path.normpath(path) yield (prefix + '/' + util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path def geturlcgivars(baseurl, port): """ Extract CGI variables from baseurl >>> geturlcgivars("http://host.org/base", "80") ('host.org', '80', '/base') >>> geturlcgivars("http://host.org:8000/base", "80") ('host.org', '8000', '/base') >>> geturlcgivars('/base', 8000) ('', '8000', '/base') >>> geturlcgivars("base", '8000') ('', '8000', '/base') >>> geturlcgivars("http://host", '8000') ('host', '8000', '/') >>> geturlcgivars("http://host/", '8000') ('host', '8000', '/') """ u = util.url(baseurl) name = u.host or '' if u.port: port = u.port path = u.path or "" if not path.startswith('/'): path = '/' + path return name, str(port), path class hgwebdir(object): """HTTP server for multiple repositories. Given a configuration, different repositories will be served depending on the request path. Instances are typically used as WSGI applications. """ def __init__(self, conf, baseui=None): self.conf = conf self.baseui = baseui self.ui = None self.lastrefresh = 0 self.motd = None self.refresh() def refresh(self): refreshinterval = 20 if self.ui: refreshinterval = self.ui.configint('web', 'refreshinterval', refreshinterval) # refreshinterval <= 0 means to always refresh. if (refreshinterval > 0 and self.lastrefresh + refreshinterval > time.time()): return if self.baseui: u = self.baseui.copy() else: u = uimod.ui() u.setconfig('ui', 'report_untrusted', 'off', 'hgwebdir') u.setconfig('ui', 'nontty', 'true', 'hgwebdir') # displaying bundling progress bar while serving feels wrong and may # break some wsgi implementations. u.setconfig('progress', 'disable', 'true', 'hgweb') if not isinstance(self.conf, (dict, list, tuple)): map = {'paths': 'hgweb-paths'} if not os.path.exists(self.conf): raise error.Abort(_('config file %s not found!') % self.conf) u.readconfig(self.conf, remap=map, trust=True) paths = [] for name, ignored in u.configitems('hgweb-paths'): for path in u.configlist('hgweb-paths', name): paths.append((name, path)) elif isinstance(self.conf, (list, tuple)): paths = self.conf elif isinstance(self.conf, dict): paths = self.conf.items() repos = findrepos(paths) for prefix, root in u.configitems('collections'): prefix = util.pconvert(prefix) for path in scmutil.walkrepos(root, followsym=True): repo = os.path.normpath(path) name = util.pconvert(repo) if name.startswith(prefix): name = name[len(prefix):] repos.append((name.lstrip('/'), repo)) self.repos = repos self.ui = u encoding.encoding = self.ui.config('web', 'encoding', encoding.encoding) self.style = self.ui.config('web', 'style', 'paper') self.templatepath = self.ui.config('web', 'templates', None) self.stripecount = self.ui.config('web', 'stripes', 1) if self.stripecount: self.stripecount = int(self.stripecount) self._baseurl = self.ui.config('web', 'baseurl') prefix = self.ui.config('web', 'prefix', '') if prefix.startswith('/'): prefix = prefix[1:] if prefix.endswith('/'): prefix = prefix[:-1] self.prefix = prefix self.lastrefresh = time.time() def run(self): if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): raise RuntimeError("This function is only intended to be " "called while running as a CGI script.") wsgicgi.launch(self) def __call__(self, env, respond): req = wsgirequest(env, respond) return self.run_wsgi(req) def read_allowed(self, ui, req): """Check allow_read and deny_read config options of a repo's ui object to determine user permissions. By default, with neither option set (or both empty), allow all users to read the repo. There are two ways a user can be denied read access: (1) deny_read is not empty, and the user is unauthenticated or deny_read contains user (or *), and (2) allow_read is not empty and the user is not in allow_read. Return True if user is allowed to read the repo, else return False.""" user = req.env.get('REMOTE_USER') deny_read = ui.configlist('web', 'deny_read', untrusted=True) if deny_read and (not user or ismember(ui, user, deny_read)): return False allow_read = ui.configlist('web', 'allow_read', untrusted=True) # by default, allow reading if no allow_read option has been set if (not allow_read) or ismember(ui, user, allow_read): return True return False def run_wsgi(self, req): try: self.refresh() virtual = req.env.get("PATH_INFO", "").strip('/') tmpl = self.templater(req) ctype = tmpl('mimetype', encoding=encoding.encoding) ctype = templater.stringify(ctype) # a static file if virtual.startswith('static/') or 'static' in req.form: if virtual.startswith('static/'): fname = virtual[7:] else: fname = req.form['static'][0] static = self.ui.config("web", "static", None, untrusted=False) if not static: tp = self.templatepath or templater.templatepaths() if isinstance(tp, str): tp = [tp] static = [os.path.join(p, 'static') for p in tp] staticfile(static, fname, req) return [] # top-level index elif not virtual: req.respond(HTTP_OK, ctype) return self.makeindex(req, tmpl) # nested indexes and hgwebs repos = dict(self.repos) virtualrepo = virtual while virtualrepo: real = repos.get(virtualrepo) if real: req.env['REPO_NAME'] = virtualrepo try: # ensure caller gets private copy of ui repo = hg.repository(self.ui.copy(), real) return hgweb_mod.hgweb(repo).run_wsgi(req) except IOError as inst: msg = inst.strerror raise ErrorResponse(HTTP_SERVER_ERROR, msg) except error.RepoError as inst: raise ErrorResponse(HTTP_SERVER_ERROR, str(inst)) up = virtualrepo.rfind('/') if up < 0: break virtualrepo = virtualrepo[:up] # browse subdirectories subdir = virtual + '/' if [r for r in repos if r.startswith(subdir)]: req.respond(HTTP_OK, ctype) return self.makeindex(req, tmpl, subdir) # prefixes not found req.respond(HTTP_NOT_FOUND, ctype) return tmpl("notfound", repo=virtual) except ErrorResponse as err: req.respond(err, ctype) return tmpl('error', error=err.message or '') finally: tmpl = None def makeindex(self, req, tmpl, subdir=""): def archivelist(ui, nodeid, url): allowed = ui.configlist("web", "allow_archive", untrusted=True) archives = [] for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: if i[0] in allowed or ui.configbool("web", "allow" + i[0], untrusted=True): archives.append({"type" : i[0], "extension": i[1], "node": nodeid, "url": url}) return archives def rawentries(subdir="", **map): descend = self.ui.configbool('web', 'descend', True) collapse = self.ui.configbool('web', 'collapse', False) seenrepos = set() seendirs = set() for name, path in self.repos: if not name.startswith(subdir): continue name = name[len(subdir):] directory = False if '/' in name: if not descend: continue nameparts = name.split('/') rootname = nameparts[0] if not collapse: pass elif rootname in seendirs: continue elif rootname in seenrepos: pass else: directory = True name = rootname # redefine the path to refer to the directory discarded = '/'.join(nameparts[1:]) # remove name parts plus accompanying slash path = path[:-len(discarded) - 1] try: r = hg.repository(self.ui, path) directory = False except (IOError, error.RepoError): pass parts = [name] if 'PATH_INFO' in req.env: parts.insert(0, req.env['PATH_INFO'].rstrip('/')) if req.env['SCRIPT_NAME']: parts.insert(0, req.env['SCRIPT_NAME']) url = re.sub(r'/+', '/', '/'.join(parts) + '/') # show either a directory entry or a repository if directory: # get the directory's time information try: d = (get_mtime(path), util.makedate()[1]) except OSError: continue # add '/' to the name to make it obvious that # the entry is a directory, not a regular repository row = {'contact': "", 'contact_sort': "", 'name': name + '/', 'name_sort': name, 'url': url, 'description': "", 'description_sort': "", 'lastchange': d, 'lastchange_sort': d[1]-d[0], 'archives': [], 'isdirectory': True} seendirs.add(name) yield row continue u = self.ui.copy() try: u.readconfig(os.path.join(path, '.hg', 'hgrc')) except Exception as e: u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e)) continue def get(section, name, default=None): return u.config(section, name, default, untrusted=True) if u.configbool("web", "hidden", untrusted=True): continue if not self.read_allowed(u, req): continue # update time with local timezone try: r = hg.repository(self.ui, path) except IOError: u.warn(_('error accessing repository at %s\n') % path) continue except error.RepoError: u.warn(_('error accessing repository at %s\n') % path) continue try: d = (get_mtime(r.spath), util.makedate()[1]) except OSError: continue contact = get_contact(get) description = get("web", "description", "") seenrepos.add(name) name = get("web", "name", name) row = {'contact': contact or "unknown", 'contact_sort': contact.upper() or "unknown", 'name': name, 'name_sort': name, 'url': url, 'description': description or "unknown", 'description_sort': description.upper() or "unknown", 'lastchange': d, 'lastchange_sort': d[1]-d[0], 'archives': archivelist(u, "tip", url), 'isdirectory': None, } yield row sortdefault = None, False def entries(sortcolumn="", descending=False, subdir="", **map): rows = rawentries(subdir=subdir, **map) if sortcolumn and sortdefault != (sortcolumn, descending): sortkey = '%s_sort' % sortcolumn rows = sorted(rows, key=lambda x: x[sortkey], reverse=descending) for row, parity in zip(rows, paritygen(self.stripecount)): row['parity'] = parity yield row self.refresh() sortable = ["name", "description", "contact", "lastchange"] sortcolumn, descending = sortdefault if 'sort' in req.form: sortcolumn = req.form['sort'][0] descending = sortcolumn.startswith('-') if descending: sortcolumn = sortcolumn[1:] if sortcolumn not in sortable: sortcolumn = "" sort = [("sort_%s" % column, "%s%s" % ((not descending and column == sortcolumn) and "-" or "", column)) for column in sortable] self.refresh() self.updatereqenv(req.env) return tmpl("index", entries=entries, subdir=subdir, pathdef=hgweb_mod.makebreadcrumb('/' + subdir, self.prefix), sortcolumn=sortcolumn, descending=descending, **dict(sort)) def templater(self, req): def motd(**map): if self.motd is not None: yield self.motd else: yield config('web', 'motd', '') def config(section, name, default=None, untrusted=True): return self.ui.config(section, name, default, untrusted) self.updatereqenv(req.env) url = req.env.get('SCRIPT_NAME', '') if not url.endswith('/'): url += '/' vars = {} styles = ( req.form.get('style', [None])[0], config('web', 'style'), 'paper' ) style, mapfile = templater.stylemap(styles, self.templatepath) if style == styles[0]: vars['style'] = style start = url[-1] == '?' and '&' or '?' sessionvars = webutil.sessionvars(vars, start) logourl = config('web', 'logourl', 'https://mercurial-scm.org/') logoimg = config('web', 'logoimg', 'hglogo.png') staticurl = config('web', 'staticurl') or url + 'static/' if not staticurl.endswith('/'): staticurl += '/' tmpl = templater.templater(mapfile, defaults={"encoding": encoding.encoding, "motd": motd, "url": url, "logourl": logourl, "logoimg": logoimg, "staticurl": staticurl, "sessionvars": sessionvars, "style": style, }) return tmpl def updatereqenv(self, env): if self._baseurl is not None: name, port, path = geturlcgivars(self._baseurl, env['SERVER_PORT']) env['SERVER_NAME'] = name env['SERVER_PORT'] = port env['SCRIPT_NAME'] = path mercurial-3.7.3/mercurial/hgweb/hgweb_mod.py0000644000175000017500000004103312676531525020514 0ustar mpmmpm00000000000000# hgweb/hgweb_mod.py - Web interface for a repository. # # Copyright 21 May 2005 - (c) 2005 Jake Edge # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import contextlib import os from .common import ( ErrorResponse, HTTP_BAD_REQUEST, HTTP_NOT_FOUND, HTTP_NOT_MODIFIED, HTTP_OK, HTTP_SERVER_ERROR, caching, permhooks, ) from .request import wsgirequest from .. import ( encoding, error, hg, hook, repoview, templatefilters, templater, ui as uimod, util, ) from . import ( protocol, webcommands, webutil, wsgicgi, ) perms = { 'changegroup': 'pull', 'changegroupsubset': 'pull', 'getbundle': 'pull', 'stream_out': 'pull', 'listkeys': 'pull', 'unbundle': 'push', 'pushkey': 'push', } def makebreadcrumb(url, prefix=''): '''Return a 'URL breadcrumb' list A 'URL breadcrumb' is a list of URL-name pairs, corresponding to each of the path items on a URL. This can be used to create path navigation entries. ''' if url.endswith('/'): url = url[:-1] if prefix: url = '/' + prefix + url relpath = url if relpath.startswith('/'): relpath = relpath[1:] breadcrumb = [] urlel = url pathitems = [''] + relpath.split('/') for pathel in reversed(pathitems): if not pathel or not urlel: break breadcrumb.append({'url': urlel, 'name': pathel}) urlel = os.path.dirname(urlel) return reversed(breadcrumb) class requestcontext(object): """Holds state/context for an individual request. Servers can be multi-threaded. Holding state on the WSGI application is prone to race conditions. Instances of this class exist to hold mutable and race-free state for requests. """ def __init__(self, app, repo): self.repo = repo self.reponame = app.reponame self.archives = ('zip', 'gz', 'bz2') self.maxchanges = self.configint('web', 'maxchanges', 10) self.stripecount = self.configint('web', 'stripes', 1) self.maxshortchanges = self.configint('web', 'maxshortchanges', 60) self.maxfiles = self.configint('web', 'maxfiles', 10) self.allowpull = self.configbool('web', 'allowpull', True) # we use untrusted=False to prevent a repo owner from using # web.templates in .hg/hgrc to get access to any file readable # by the user running the CGI script self.templatepath = self.config('web', 'templates', untrusted=False) # This object is more expensive to build than simple config values. # It is shared across requests. The app will replace the object # if it is updated. Since this is a reference and nothing should # modify the underlying object, it should be constant for the lifetime # of the request. self.websubtable = app.websubtable # Trust the settings from the .hg/hgrc files by default. def config(self, section, name, default=None, untrusted=True): return self.repo.ui.config(section, name, default, untrusted=untrusted) def configbool(self, section, name, default=False, untrusted=True): return self.repo.ui.configbool(section, name, default, untrusted=untrusted) def configint(self, section, name, default=None, untrusted=True): return self.repo.ui.configint(section, name, default, untrusted=untrusted) def configlist(self, section, name, default=None, untrusted=True): return self.repo.ui.configlist(section, name, default, untrusted=untrusted) archivespecs = { 'bz2': ('application/x-bzip2', 'tbz2', '.tar.bz2', None), 'gz': ('application/x-gzip', 'tgz', '.tar.gz', None), 'zip': ('application/zip', 'zip', '.zip', None), } def archivelist(self, nodeid): allowed = self.configlist('web', 'allow_archive') for typ, spec in self.archivespecs.iteritems(): if typ in allowed or self.configbool('web', 'allow%s' % typ): yield {'type': typ, 'extension': spec[2], 'node': nodeid} def templater(self, req): # determine scheme, port and server name # this is needed to create absolute urls proto = req.env.get('wsgi.url_scheme') if proto == 'https': proto = 'https' default_port = '443' else: proto = 'http' default_port = '80' port = req.env['SERVER_PORT'] port = port != default_port and (':' + port) or '' urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port) logourl = self.config('web', 'logourl', 'https://mercurial-scm.org/') logoimg = self.config('web', 'logoimg', 'hglogo.png') staticurl = self.config('web', 'staticurl') or req.url + 'static/' if not staticurl.endswith('/'): staticurl += '/' # some functions for the templater def motd(**map): yield self.config('web', 'motd', '') # figure out which style to use vars = {} styles = ( req.form.get('style', [None])[0], self.config('web', 'style'), 'paper', ) style, mapfile = templater.stylemap(styles, self.templatepath) if style == styles[0]: vars['style'] = style start = req.url[-1] == '?' and '&' or '?' sessionvars = webutil.sessionvars(vars, start) if not self.reponame: self.reponame = (self.config('web', 'name') or req.env.get('REPO_NAME') or req.url.strip('/') or self.repo.root) def websubfilter(text): return templatefilters.websub(text, self.websubtable) # create the templater tmpl = templater.templater(mapfile, filters={'websub': websubfilter}, defaults={'url': req.url, 'logourl': logourl, 'logoimg': logoimg, 'staticurl': staticurl, 'urlbase': urlbase, 'repo': self.reponame, 'encoding': encoding.encoding, 'motd': motd, 'sessionvars': sessionvars, 'pathdef': makebreadcrumb(req.url), 'style': style, }) return tmpl class hgweb(object): """HTTP server for individual repositories. Instances of this class serve HTTP responses for a particular repository. Instances are typically used as WSGI applications. Some servers are multi-threaded. On these servers, there may be multiple active threads inside __call__. """ def __init__(self, repo, name=None, baseui=None): if isinstance(repo, str): if baseui: u = baseui.copy() else: u = uimod.ui() r = hg.repository(u, repo) else: # we trust caller to give us a private copy r = repo r.ui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') r.baseui.setconfig('ui', 'report_untrusted', 'off', 'hgweb') r.ui.setconfig('ui', 'nontty', 'true', 'hgweb') r.baseui.setconfig('ui', 'nontty', 'true', 'hgweb') # resolve file patterns relative to repo root r.ui.setconfig('ui', 'forcecwd', r.root, 'hgweb') r.baseui.setconfig('ui', 'forcecwd', r.root, 'hgweb') # displaying bundling progress bar while serving feel wrong and may # break some wsgi implementation. r.ui.setconfig('progress', 'disable', 'true', 'hgweb') r.baseui.setconfig('progress', 'disable', 'true', 'hgweb') self._repos = [hg.cachedlocalrepo(self._webifyrepo(r))] self._lastrepo = self._repos[0] hook.redirect(True) self.reponame = name def _webifyrepo(self, repo): repo = getwebview(repo) self.websubtable = webutil.getwebsubs(repo) return repo @contextlib.contextmanager def _obtainrepo(self): """Obtain a repo unique to the caller. Internally we maintain a stack of cachedlocalrepo instances to be handed out. If one is available, we pop it and return it, ensuring it is up to date in the process. If one is not available, we clone the most recently used repo instance and return it. It is currently possible for the stack to grow without bounds if the server allows infinite threads. However, servers should have a thread limit, thus establishing our limit. """ if self._repos: cached = self._repos.pop() r, created = cached.fetch() else: cached = self._lastrepo.copy() r, created = cached.fetch() if created: r = self._webifyrepo(r) self._lastrepo = cached self.mtime = cached.mtime try: yield r finally: self._repos.append(cached) def run(self): """Start a server from CGI environment. Modern servers should be using WSGI and should avoid this method, if possible. """ if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): raise RuntimeError("This function is only intended to be " "called while running as a CGI script.") wsgicgi.launch(self) def __call__(self, env, respond): """Run the WSGI application. This may be called by multiple threads. """ req = wsgirequest(env, respond) return self.run_wsgi(req) def run_wsgi(self, req): """Internal method to run the WSGI application. This is typically only called by Mercurial. External consumers should be using instances of this class as the WSGI application. """ with self._obtainrepo() as repo: for r in self._runwsgi(req, repo): yield r def _runwsgi(self, req, repo): rctx = requestcontext(self, repo) # This state is global across all threads. encoding.encoding = rctx.config('web', 'encoding', encoding.encoding) rctx.repo.ui.environ = req.env # work with CGI variables to create coherent structure # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME req.url = req.env['SCRIPT_NAME'] if not req.url.endswith('/'): req.url += '/' if 'REPO_NAME' in req.env: req.url += req.env['REPO_NAME'] + '/' if 'PATH_INFO' in req.env: parts = req.env['PATH_INFO'].strip('/').split('/') repo_parts = req.env.get('REPO_NAME', '').split('/') if parts[:len(repo_parts)] == repo_parts: parts = parts[len(repo_parts):] query = '/'.join(parts) else: query = req.env['QUERY_STRING'].partition('&')[0] query = query.partition(';')[0] # process this if it's a protocol request # protocol bits don't need to create any URLs # and the clients always use the old URL structure cmd = req.form.get('cmd', [''])[0] if protocol.iscmd(cmd): try: if query: raise ErrorResponse(HTTP_NOT_FOUND) if cmd in perms: self.check_perm(rctx, req, perms[cmd]) return protocol.call(rctx.repo, req, cmd) except ErrorResponse as inst: # A client that sends unbundle without 100-continue will # break if we respond early. if (cmd == 'unbundle' and (req.env.get('HTTP_EXPECT', '').lower() != '100-continue') or req.env.get('X-HgHttp2', '')): req.drain() else: req.headers.append(('Connection', 'Close')) req.respond(inst, protocol.HGTYPE, body='0\n%s\n' % inst) return '' # translate user-visible url structure to internal structure args = query.split('/', 2) if 'cmd' not in req.form and args and args[0]: cmd = args.pop(0) style = cmd.rfind('-') if style != -1: req.form['style'] = [cmd[:style]] cmd = cmd[style + 1:] # avoid accepting e.g. style parameter as command if util.safehasattr(webcommands, cmd): req.form['cmd'] = [cmd] if cmd == 'static': req.form['file'] = ['/'.join(args)] else: if args and args[0]: node = args.pop(0).replace('%2F', '/') req.form['node'] = [node] if args: req.form['file'] = args ua = req.env.get('HTTP_USER_AGENT', '') if cmd == 'rev' and 'mercurial' in ua: req.form['style'] = ['raw'] if cmd == 'archive': fn = req.form['node'][0] for type_, spec in rctx.archivespecs.iteritems(): ext = spec[2] if fn.endswith(ext): req.form['node'] = [fn[:-len(ext)]] req.form['type'] = [type_] # process the web interface request try: tmpl = rctx.templater(req) ctype = tmpl('mimetype', encoding=encoding.encoding) ctype = templater.stringify(ctype) # check read permissions non-static content if cmd != 'static': self.check_perm(rctx, req, None) if cmd == '': req.form['cmd'] = [tmpl.cache['default']] cmd = req.form['cmd'][0] if rctx.configbool('web', 'cache', True): caching(self, req) # sets ETag header or raises NOT_MODIFIED if cmd not in webcommands.__all__: msg = 'no such method: %s' % cmd raise ErrorResponse(HTTP_BAD_REQUEST, msg) elif cmd == 'file' and 'raw' in req.form.get('style', []): rctx.ctype = ctype content = webcommands.rawfile(rctx, req, tmpl) else: content = getattr(webcommands, cmd)(rctx, req, tmpl) req.respond(HTTP_OK, ctype) return content except (error.LookupError, error.RepoLookupError) as err: req.respond(HTTP_NOT_FOUND, ctype) msg = str(err) if (util.safehasattr(err, 'name') and not isinstance(err, error.ManifestLookupError)): msg = 'revision not found: %s' % err.name return tmpl('error', error=msg) except (error.RepoError, error.RevlogError) as inst: req.respond(HTTP_SERVER_ERROR, ctype) return tmpl('error', error=str(inst)) except ErrorResponse as inst: req.respond(inst, ctype) if inst.code == HTTP_NOT_MODIFIED: # Not allowed to return a body on a 304 return [''] return tmpl('error', error=str(inst)) def check_perm(self, rctx, req, op): for permhook in permhooks: permhook(rctx, req, op) def getwebview(repo): """The 'web.view' config controls changeset filter to hgweb. Possible values are ``served``, ``visible`` and ``all``. Default is ``served``. The ``served`` filter only shows changesets that can be pulled from the hgweb instance. The``visible`` filter includes secret changesets but still excludes "hidden" one. See the repoview module for details. The option has been around undocumented since Mercurial 2.5, but no user ever asked about it. So we better keep it undocumented for now.""" viewconfig = repo.ui.config('web', 'view', 'served', untrusted=True) if viewconfig == 'all': return repo.unfiltered() elif viewconfig in repoview.filtertable: return repo.filtered(viewconfig) else: return repo.filtered('served') mercurial-3.7.3/mercurial/hgweb/webutil.py0000644000175000017500000004564312676531525020247 0ustar mpmmpm00000000000000# hgweb/webutil.py - utility library for the web interface. # # Copyright 21 May 2005 - (c) 2005 Jake Edge # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import copy import difflib import os import re from ..i18n import _ from ..node import hex, nullid, short from .common import ( ErrorResponse, HTTP_NOT_FOUND, paritygen, ) from .. import ( context, error, match, patch, pathutil, templatefilters, ui as uimod, util, ) def up(p): if p[0] != "/": p = "/" + p if p[-1] == "/": p = p[:-1] up = os.path.dirname(p) if up == "/": return "/" return up + "/" def _navseq(step, firststep=None): if firststep: yield firststep if firststep >= 20 and firststep <= 40: firststep = 50 yield firststep assert step > 0 assert firststep > 0 while step <= firststep: step *= 10 while True: yield 1 * step yield 3 * step step *= 10 class revnav(object): def __init__(self, repo): """Navigation generation object :repo: repo object we generate nav for """ # used for hex generation self._revlog = repo.changelog def __nonzero__(self): """return True if any revision to navigate over""" return self._first() is not None def _first(self): """return the minimum non-filtered changeset or None""" try: return iter(self._revlog).next() except StopIteration: return None def hex(self, rev): return hex(self._revlog.node(rev)) def gen(self, pos, pagelen, limit): """computes label and revision id for navigation link :pos: is the revision relative to which we generate navigation. :pagelen: the size of each navigation page :limit: how far shall we link The return is: - a single element tuple - containing a dictionary with a `before` and `after` key - values are generator functions taking arbitrary number of kwargs - yield items are dictionaries with `label` and `node` keys """ if not self: # empty repo return ({'before': (), 'after': ()},) targets = [] for f in _navseq(1, pagelen): if f > limit: break targets.append(pos + f) targets.append(pos - f) targets.sort() first = self._first() navbefore = [("(%i)" % first, self.hex(first))] navafter = [] for rev in targets: if rev not in self._revlog: continue if pos < rev < limit: navafter.append(("+%d" % abs(rev - pos), self.hex(rev))) if 0 < rev < pos: navbefore.append(("-%d" % abs(rev - pos), self.hex(rev))) navafter.append(("tip", "tip")) data = lambda i: {"label": i[0], "node": i[1]} return ({'before': lambda **map: (data(i) for i in navbefore), 'after': lambda **map: (data(i) for i in navafter)},) class filerevnav(revnav): def __init__(self, repo, path): """Navigation generation object :repo: repo object we generate nav for :path: path of the file we generate nav for """ # used for iteration self._changelog = repo.unfiltered().changelog # used for hex generation self._revlog = repo.file(path) def hex(self, rev): return hex(self._changelog.node(self._revlog.linkrev(rev))) class _siblings(object): def __init__(self, siblings=[], hiderev=None): self.siblings = [s for s in siblings if s.node() != nullid] if len(self.siblings) == 1 and self.siblings[0].rev() == hiderev: self.siblings = [] def __iter__(self): for s in self.siblings: d = { 'node': s.hex(), 'rev': s.rev(), 'user': s.user(), 'date': s.date(), 'description': s.description(), 'branch': s.branch(), } if util.safehasattr(s, 'path'): d['file'] = s.path() yield d def __len__(self): return len(self.siblings) def parents(ctx, hide=None): if isinstance(ctx, context.basefilectx): introrev = ctx.introrev() if ctx.changectx().rev() != introrev: return _siblings([ctx.repo()[introrev]], hide) return _siblings(ctx.parents(), hide) def children(ctx, hide=None): return _siblings(ctx.children(), hide) def renamelink(fctx): r = fctx.renamed() if r: return [{'file': r[0], 'node': hex(r[1])}] return [] def nodetagsdict(repo, node): return [{"name": i} for i in repo.nodetags(node)] def nodebookmarksdict(repo, node): return [{"name": i} for i in repo.nodebookmarks(node)] def nodebranchdict(repo, ctx): branches = [] branch = ctx.branch() # If this is an empty repo, ctx.node() == nullid, # ctx.branch() == 'default'. try: branchnode = repo.branchtip(branch) except error.RepoLookupError: branchnode = None if branchnode == ctx.node(): branches.append({"name": branch}) return branches def nodeinbranch(repo, ctx): branches = [] branch = ctx.branch() try: branchnode = repo.branchtip(branch) except error.RepoLookupError: branchnode = None if branch != 'default' and branchnode != ctx.node(): branches.append({"name": branch}) return branches def nodebranchnodefault(ctx): branches = [] branch = ctx.branch() if branch != 'default': branches.append({"name": branch}) return branches def showtag(repo, tmpl, t1, node=nullid, **args): for t in repo.nodetags(node): yield tmpl(t1, tag=t, **args) def showbookmark(repo, tmpl, t1, node=nullid, **args): for t in repo.nodebookmarks(node): yield tmpl(t1, bookmark=t, **args) def branchentries(repo, stripecount, limit=0): tips = [] heads = repo.heads() parity = paritygen(stripecount) sortkey = lambda item: (not item[1], item[0].rev()) def entries(**map): count = 0 if not tips: for tag, hs, tip, closed in repo.branchmap().iterbranches(): tips.append((repo[tip], closed)) for ctx, closed in sorted(tips, key=sortkey, reverse=True): if limit > 0 and count >= limit: return count += 1 if closed: status = 'closed' elif ctx.node() not in heads: status = 'inactive' else: status = 'open' yield { 'parity': parity.next(), 'branch': ctx.branch(), 'status': status, 'node': ctx.hex(), 'date': ctx.date() } return entries def cleanpath(repo, path): path = path.lstrip('/') return pathutil.canonpath(repo.root, '', path) def changeidctx(repo, changeid): try: ctx = repo[changeid] except error.RepoError: man = repo.manifest ctx = repo[man.linkrev(man.rev(man.lookup(changeid)))] return ctx def changectx(repo, req): changeid = "tip" if 'node' in req.form: changeid = req.form['node'][0] ipos = changeid.find(':') if ipos != -1: changeid = changeid[(ipos + 1):] elif 'manifest' in req.form: changeid = req.form['manifest'][0] return changeidctx(repo, changeid) def basechangectx(repo, req): if 'node' in req.form: changeid = req.form['node'][0] ipos = changeid.find(':') if ipos != -1: changeid = changeid[:ipos] return changeidctx(repo, changeid) return None def filectx(repo, req): if 'file' not in req.form: raise ErrorResponse(HTTP_NOT_FOUND, 'file not given') path = cleanpath(repo, req.form['file'][0]) if 'node' in req.form: changeid = req.form['node'][0] elif 'filenode' in req.form: changeid = req.form['filenode'][0] else: raise ErrorResponse(HTTP_NOT_FOUND, 'node or filenode not given') try: fctx = repo[changeid][path] except error.RepoError: fctx = repo.filectx(path, fileid=changeid) return fctx def commonentry(repo, ctx): node = ctx.node() return { 'rev': ctx.rev(), 'node': hex(node), 'author': ctx.user(), 'desc': ctx.description(), 'date': ctx.date(), 'extra': ctx.extra(), 'phase': ctx.phasestr(), 'branch': nodebranchnodefault(ctx), 'inbranch': nodeinbranch(repo, ctx), 'branches': nodebranchdict(repo, ctx), 'tags': nodetagsdict(repo, node), 'bookmarks': nodebookmarksdict(repo, node), 'parent': lambda **x: parents(ctx), 'child': lambda **x: children(ctx), } def changelistentry(web, ctx, tmpl): '''Obtain a dictionary to be used for entries in a changelist. This function is called when producing items for the "entries" list passed to the "shortlog" and "changelog" templates. ''' repo = web.repo rev = ctx.rev() n = ctx.node() showtags = showtag(repo, tmpl, 'changelogtag', n) files = listfilediffs(tmpl, ctx.files(), n, web.maxfiles) entry = commonentry(repo, ctx) entry.update( parent=lambda **x: parents(ctx, rev - 1), child=lambda **x: children(ctx, rev + 1), changelogtag=showtags, files=files, ) return entry def symrevorshortnode(req, ctx): if 'node' in req.form: return templatefilters.revescape(req.form['node'][0]) else: return short(ctx.node()) def changesetentry(web, req, tmpl, ctx): '''Obtain a dictionary to be used to render the "changeset" template.''' showtags = showtag(web.repo, tmpl, 'changesettag', ctx.node()) showbookmarks = showbookmark(web.repo, tmpl, 'changesetbookmark', ctx.node()) showbranch = nodebranchnodefault(ctx) files = [] parity = paritygen(web.stripecount) for blockno, f in enumerate(ctx.files()): template = f in ctx and 'filenodelink' or 'filenolink' files.append(tmpl(template, node=ctx.hex(), file=f, blockno=blockno + 1, parity=parity.next())) basectx = basechangectx(web.repo, req) if basectx is None: basectx = ctx.p1() style = web.config('web', 'style', 'paper') if 'style' in req.form: style = req.form['style'][0] parity = paritygen(web.stripecount) diff = diffs(web.repo, tmpl, ctx, basectx, None, parity, style) parity = paritygen(web.stripecount) diffstatsgen = diffstatgen(ctx, basectx) diffstats = diffstat(tmpl, ctx, diffstatsgen, parity) return dict( diff=diff, symrev=symrevorshortnode(req, ctx), basenode=basectx.hex(), changesettag=showtags, changesetbookmark=showbookmarks, changesetbranch=showbranch, files=files, diffsummary=lambda **x: diffsummary(diffstatsgen), diffstat=diffstats, archives=web.archivelist(ctx.hex()), **commonentry(web.repo, ctx)) def listfilediffs(tmpl, files, node, max): for f in files[:max]: yield tmpl('filedifflink', node=hex(node), file=f) if len(files) > max: yield tmpl('fileellipses') def diffs(repo, tmpl, ctx, basectx, files, parity, style): def countgen(): start = 1 while True: yield start start += 1 blockcount = countgen() def prettyprintlines(diff, blockno): for lineno, l in enumerate(diff.splitlines(True)): difflineno = "%d.%d" % (blockno, lineno + 1) if l.startswith('+'): ltype = "difflineplus" elif l.startswith('-'): ltype = "difflineminus" elif l.startswith('@'): ltype = "difflineat" else: ltype = "diffline" yield tmpl(ltype, line=l, lineno=lineno + 1, lineid="l%s" % difflineno, linenumber="% 8s" % difflineno) if files: m = match.exact(repo.root, repo.getcwd(), files) else: m = match.always(repo.root, repo.getcwd()) diffopts = patch.diffopts(repo.ui, untrusted=True) if basectx is None: parents = ctx.parents() if parents: node1 = parents[0].node() else: node1 = nullid else: node1 = basectx.node() node2 = ctx.node() block = [] for chunk in patch.diff(repo, node1, node2, m, opts=diffopts): if chunk.startswith('diff') and block: blockno = blockcount.next() yield tmpl('diffblock', parity=parity.next(), blockno=blockno, lines=prettyprintlines(''.join(block), blockno)) block = [] if chunk.startswith('diff') and style != 'raw': chunk = ''.join(chunk.splitlines(True)[1:]) block.append(chunk) blockno = blockcount.next() yield tmpl('diffblock', parity=parity.next(), blockno=blockno, lines=prettyprintlines(''.join(block), blockno)) def compare(tmpl, context, leftlines, rightlines): '''Generator function that provides side-by-side comparison data.''' def compline(type, leftlineno, leftline, rightlineno, rightline): lineid = leftlineno and ("l%s" % leftlineno) or '' lineid += rightlineno and ("r%s" % rightlineno) or '' return tmpl('comparisonline', type=type, lineid=lineid, leftlineno=leftlineno, leftlinenumber="% 6s" % (leftlineno or ''), leftline=leftline or '', rightlineno=rightlineno, rightlinenumber="% 6s" % (rightlineno or ''), rightline=rightline or '') def getblock(opcodes): for type, llo, lhi, rlo, rhi in opcodes: len1 = lhi - llo len2 = rhi - rlo count = min(len1, len2) for i in xrange(count): yield compline(type=type, leftlineno=llo + i + 1, leftline=leftlines[llo + i], rightlineno=rlo + i + 1, rightline=rightlines[rlo + i]) if len1 > len2: for i in xrange(llo + count, lhi): yield compline(type=type, leftlineno=i + 1, leftline=leftlines[i], rightlineno=None, rightline=None) elif len2 > len1: for i in xrange(rlo + count, rhi): yield compline(type=type, leftlineno=None, leftline=None, rightlineno=i + 1, rightline=rightlines[i]) s = difflib.SequenceMatcher(None, leftlines, rightlines) if context < 0: yield tmpl('comparisonblock', lines=getblock(s.get_opcodes())) else: for oc in s.get_grouped_opcodes(n=context): yield tmpl('comparisonblock', lines=getblock(oc)) def diffstatgen(ctx, basectx): '''Generator function that provides the diffstat data.''' stats = patch.diffstatdata(util.iterlines(ctx.diff(basectx))) maxname, maxtotal, addtotal, removetotal, binary = patch.diffstatsum(stats) while True: yield stats, maxname, maxtotal, addtotal, removetotal, binary def diffsummary(statgen): '''Return a short summary of the diff.''' stats, maxname, maxtotal, addtotal, removetotal, binary = statgen.next() return _(' %d files changed, %d insertions(+), %d deletions(-)\n') % ( len(stats), addtotal, removetotal) def diffstat(tmpl, ctx, statgen, parity): '''Return a diffstat template for each file in the diff.''' stats, maxname, maxtotal, addtotal, removetotal, binary = statgen.next() files = ctx.files() def pct(i): if maxtotal == 0: return 0 return (float(i) / maxtotal) * 100 fileno = 0 for filename, adds, removes, isbinary in stats: template = filename in files and 'diffstatlink' or 'diffstatnolink' total = adds + removes fileno += 1 yield tmpl(template, node=ctx.hex(), file=filename, fileno=fileno, total=total, addpct=pct(adds), removepct=pct(removes), parity=parity.next()) class sessionvars(object): def __init__(self, vars, start='?'): self.start = start self.vars = vars def __getitem__(self, key): return self.vars[key] def __setitem__(self, key, value): self.vars[key] = value def __copy__(self): return sessionvars(copy.copy(self.vars), self.start) def __iter__(self): separator = self.start for key, value in sorted(self.vars.iteritems()): yield {'name': key, 'value': str(value), 'separator': separator} separator = '&' class wsgiui(uimod.ui): # default termwidth breaks under mod_wsgi def termwidth(self): return 80 def getwebsubs(repo): websubtable = [] websubdefs = repo.ui.configitems('websub') # we must maintain interhg backwards compatibility websubdefs += repo.ui.configitems('interhg') for key, pattern in websubdefs: # grab the delimiter from the character after the "s" unesc = pattern[1] delim = re.escape(unesc) # identify portions of the pattern, taking care to avoid escaped # delimiters. the replace format and flags are optional, but # delimiters are required. match = re.match( r'^s%s(.+)(?:(?<=\\\\)|(? # Copyright 2005, 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import cgi import errno import socket from .common import ( ErrorResponse, HTTP_NOT_MODIFIED, statusmessage, ) from .. import ( util, ) shortcuts = { 'cl': [('cmd', ['changelog']), ('rev', None)], 'sl': [('cmd', ['shortlog']), ('rev', None)], 'cs': [('cmd', ['changeset']), ('node', None)], 'f': [('cmd', ['file']), ('filenode', None)], 'fl': [('cmd', ['filelog']), ('filenode', None)], 'fd': [('cmd', ['filediff']), ('node', None)], 'fa': [('cmd', ['annotate']), ('filenode', None)], 'mf': [('cmd', ['manifest']), ('manifest', None)], 'ca': [('cmd', ['archive']), ('node', None)], 'tags': [('cmd', ['tags'])], 'tip': [('cmd', ['changeset']), ('node', ['tip'])], 'static': [('cmd', ['static']), ('file', None)] } def normalize(form): # first expand the shortcuts for k in shortcuts.iterkeys(): if k in form: for name, value in shortcuts[k]: if value is None: value = form[k] form[name] = value del form[k] # And strip the values for k, v in form.iteritems(): form[k] = [i.strip() for i in v] return form class wsgirequest(object): """Higher-level API for a WSGI request. WSGI applications are invoked with 2 arguments. They are used to instantiate instances of this class, which provides higher-level APIs for obtaining request parameters, writing HTTP output, etc. """ def __init__(self, wsgienv, start_response): version = wsgienv['wsgi.version'] if (version < (1, 0)) or (version >= (2, 0)): raise RuntimeError("Unknown and unsupported WSGI version %d.%d" % version) self.inp = wsgienv['wsgi.input'] self.err = wsgienv['wsgi.errors'] self.threaded = wsgienv['wsgi.multithread'] self.multiprocess = wsgienv['wsgi.multiprocess'] self.run_once = wsgienv['wsgi.run_once'] self.env = wsgienv self.form = normalize(cgi.parse(self.inp, self.env, keep_blank_values=1)) self._start_response = start_response self.server_write = None self.headers = [] def __iter__(self): return iter([]) def read(self, count=-1): return self.inp.read(count) def drain(self): '''need to read all data from request, httplib is half-duplex''' length = int(self.env.get('CONTENT_LENGTH') or 0) for s in util.filechunkiter(self.inp, limit=length): pass def respond(self, status, type, filename=None, body=None): if self._start_response is not None: self.headers.append(('Content-Type', type)) if filename: filename = (filename.rpartition('/')[-1] .replace('\\', '\\\\').replace('"', '\\"')) self.headers.append(('Content-Disposition', 'inline; filename="%s"' % filename)) if body is not None: self.headers.append(('Content-Length', str(len(body)))) for k, v in self.headers: if not isinstance(v, str): raise TypeError('header value must be string: %r' % (v,)) if isinstance(status, ErrorResponse): self.headers.extend(status.headers) if status.code == HTTP_NOT_MODIFIED: # RFC 2616 Section 10.3.5: 304 Not Modified has cases where # it MUST NOT include any headers other than these and no # body self.headers = [(k, v) for (k, v) in self.headers if k in ('Date', 'ETag', 'Expires', 'Cache-Control', 'Vary')] status = statusmessage(status.code, str(status)) elif status == 200: status = '200 Script output follows' elif isinstance(status, int): status = statusmessage(status) self.server_write = self._start_response(status, self.headers) self._start_response = None self.headers = [] if body is not None: self.write(body) self.server_write = None def write(self, thing): if thing: try: self.server_write(thing) except socket.error as inst: if inst[0] != errno.ECONNRESET: raise def writelines(self, lines): for line in lines: self.write(line) def flush(self): return None def close(self): return None def wsgiapplication(app_maker): '''For compatibility with old CGI scripts. A plain hgweb() or hgwebdir() can and should now be used as a WSGI application.''' application = app_maker() def run_wsgi(env, respond): return application(env, respond) return run_wsgi mercurial-3.7.3/mercurial/hgweb/webcommands.py0000644000175000017500000011700312676531525021061 0ustar mpmmpm00000000000000# # Copyright 21 May 2005 - (c) 2005 Jake Edge # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import cgi import copy import mimetypes import os import re from ..i18n import _ from ..node import hex, short from .common import ( ErrorResponse, HTTP_FORBIDDEN, HTTP_NOT_FOUND, HTTP_OK, get_contact, paritygen, staticfile, ) from .. import ( archival, encoding, error, graphmod, patch, revset, scmutil, templatefilters, templater, util, ) from . import ( webutil, ) __all__ = [] commands = {} class webcommand(object): """Decorator used to register a web command handler. The decorator takes as its positional arguments the name/path the command should be accessible under. Usage: @webcommand('mycommand') def mycommand(web, req, tmpl): pass """ def __init__(self, name): self.name = name def __call__(self, func): __all__.append(self.name) commands[self.name] = func return func @webcommand('log') def log(web, req, tmpl): """ /log[/{revision}[/{path}]] -------------------------- Show repository or file history. For URLs of the form ``/log/{revision}``, a list of changesets starting at the specified changeset identifier is shown. If ``{revision}`` is not defined, the default is ``tip``. This form is equivalent to the ``changelog`` handler. For URLs of the form ``/log/{revision}/{file}``, the history for a specific file will be shown. This form is equivalent to the ``filelog`` handler. """ if 'file' in req.form and req.form['file'][0]: return filelog(web, req, tmpl) else: return changelog(web, req, tmpl) @webcommand('rawfile') def rawfile(web, req, tmpl): guessmime = web.configbool('web', 'guessmime', False) path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0]) if not path: content = manifest(web, req, tmpl) req.respond(HTTP_OK, web.ctype) return content try: fctx = webutil.filectx(web.repo, req) except error.LookupError as inst: try: content = manifest(web, req, tmpl) req.respond(HTTP_OK, web.ctype) return content except ErrorResponse: raise inst path = fctx.path() text = fctx.data() mt = 'application/binary' if guessmime: mt = mimetypes.guess_type(path)[0] if mt is None: if util.binary(text): mt = 'application/binary' else: mt = 'text/plain' if mt.startswith('text/'): mt += '; charset="%s"' % encoding.encoding req.respond(HTTP_OK, mt, path, body=text) return [] def _filerevision(web, req, tmpl, fctx): f = fctx.path() text = fctx.data() parity = paritygen(web.stripecount) if util.binary(text): mt = mimetypes.guess_type(f)[0] or 'application/octet-stream' text = '(binary:%s)' % mt def lines(): for lineno, t in enumerate(text.splitlines(True)): yield {"line": t, "lineid": "l%d" % (lineno + 1), "linenumber": "% 6d" % (lineno + 1), "parity": parity.next()} return tmpl("filerevision", file=f, path=webutil.up(f), text=lines(), symrev=webutil.symrevorshortnode(req, fctx), rename=webutil.renamelink(fctx), permissions=fctx.manifest().flags(f), **webutil.commonentry(web.repo, fctx)) @webcommand('file') def file(web, req, tmpl): """ /file/{revision}[/{path}] ------------------------- Show information about a directory or file in the repository. Info about the ``path`` given as a URL parameter will be rendered. If ``path`` is a directory, information about the entries in that directory will be rendered. This form is equivalent to the ``manifest`` handler. If ``path`` is a file, information about that file will be shown via the ``filerevision`` template. If ``path`` is not defined, information about the root directory will be rendered. """ path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0]) if not path: return manifest(web, req, tmpl) try: return _filerevision(web, req, tmpl, webutil.filectx(web.repo, req)) except error.LookupError as inst: try: return manifest(web, req, tmpl) except ErrorResponse: raise inst def _search(web, req, tmpl): MODE_REVISION = 'rev' MODE_KEYWORD = 'keyword' MODE_REVSET = 'revset' def revsearch(ctx): yield ctx def keywordsearch(query): lower = encoding.lower qw = lower(query).split() def revgen(): cl = web.repo.changelog for i in xrange(len(web.repo) - 1, 0, -100): l = [] for j in cl.revs(max(0, i - 99), i): ctx = web.repo[j] l.append(ctx) l.reverse() for e in l: yield e for ctx in revgen(): miss = 0 for q in qw: if not (q in lower(ctx.user()) or q in lower(ctx.description()) or q in lower(" ".join(ctx.files()))): miss = 1 break if miss: continue yield ctx def revsetsearch(revs): for r in revs: yield web.repo[r] searchfuncs = { MODE_REVISION: (revsearch, 'exact revision search'), MODE_KEYWORD: (keywordsearch, 'literal keyword search'), MODE_REVSET: (revsetsearch, 'revset expression search'), } def getsearchmode(query): try: ctx = web.repo[query] except (error.RepoError, error.LookupError): # query is not an exact revision pointer, need to # decide if it's a revset expression or keywords pass else: return MODE_REVISION, ctx revdef = 'reverse(%s)' % query try: tree = revset.parse(revdef) except error.ParseError: # can't parse to a revset tree return MODE_KEYWORD, query if revset.depth(tree) <= 2: # no revset syntax used return MODE_KEYWORD, query if any((token, (value or '')[:3]) == ('string', 're:') for token, value, pos in revset.tokenize(revdef)): return MODE_KEYWORD, query funcsused = revset.funcsused(tree) if not funcsused.issubset(revset.safesymbols): return MODE_KEYWORD, query mfunc = revset.match(web.repo.ui, revdef) try: revs = mfunc(web.repo) return MODE_REVSET, revs # ParseError: wrongly placed tokens, wrongs arguments, etc # RepoLookupError: no such revision, e.g. in 'revision:' # Abort: bookmark/tag not exists # LookupError: ambiguous identifier, e.g. in '(bc)' on a large repo except (error.ParseError, error.RepoLookupError, error.Abort, LookupError): return MODE_KEYWORD, query def changelist(**map): count = 0 for ctx in searchfunc[0](funcarg): count += 1 n = ctx.node() showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n) files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles) yield tmpl('searchentry', parity=parity.next(), changelogtag=showtags, files=files, **webutil.commonentry(web.repo, ctx)) if count >= revcount: break query = req.form['rev'][0] revcount = web.maxchanges if 'revcount' in req.form: try: revcount = int(req.form.get('revcount', [revcount])[0]) revcount = max(revcount, 1) tmpl.defaults['sessionvars']['revcount'] = revcount except ValueError: pass lessvars = copy.copy(tmpl.defaults['sessionvars']) lessvars['revcount'] = max(revcount / 2, 1) lessvars['rev'] = query morevars = copy.copy(tmpl.defaults['sessionvars']) morevars['revcount'] = revcount * 2 morevars['rev'] = query mode, funcarg = getsearchmode(query) if 'forcekw' in req.form: showforcekw = '' showunforcekw = searchfuncs[mode][1] mode = MODE_KEYWORD funcarg = query else: if mode != MODE_KEYWORD: showforcekw = searchfuncs[MODE_KEYWORD][1] else: showforcekw = '' showunforcekw = '' searchfunc = searchfuncs[mode] tip = web.repo['tip'] parity = paritygen(web.stripecount) return tmpl('search', query=query, node=tip.hex(), symrev='tip', entries=changelist, archives=web.archivelist("tip"), morevars=morevars, lessvars=lessvars, modedesc=searchfunc[1], showforcekw=showforcekw, showunforcekw=showunforcekw) @webcommand('changelog') def changelog(web, req, tmpl, shortlog=False): """ /changelog[/{revision}] ----------------------- Show information about multiple changesets. If the optional ``revision`` URL argument is absent, information about all changesets starting at ``tip`` will be rendered. If the ``revision`` argument is present, changesets will be shown starting from the specified revision. If ``revision`` is absent, the ``rev`` query string argument may be defined. This will perform a search for changesets. The argument for ``rev`` can be a single revision, a revision set, or a literal keyword to search for in changeset data (equivalent to :hg:`log -k`). The ``revcount`` query string argument defines the maximum numbers of changesets to render. For non-searches, the ``changelog`` template will be rendered. """ query = '' if 'node' in req.form: ctx = webutil.changectx(web.repo, req) symrev = webutil.symrevorshortnode(req, ctx) elif 'rev' in req.form: return _search(web, req, tmpl) else: ctx = web.repo['tip'] symrev = 'tip' def changelist(): revs = [] if pos != -1: revs = web.repo.changelog.revs(pos, 0) curcount = 0 for rev in revs: curcount += 1 if curcount > revcount + 1: break entry = webutil.changelistentry(web, web.repo[rev], tmpl) entry['parity'] = parity.next() yield entry if shortlog: revcount = web.maxshortchanges else: revcount = web.maxchanges if 'revcount' in req.form: try: revcount = int(req.form.get('revcount', [revcount])[0]) revcount = max(revcount, 1) tmpl.defaults['sessionvars']['revcount'] = revcount except ValueError: pass lessvars = copy.copy(tmpl.defaults['sessionvars']) lessvars['revcount'] = max(revcount / 2, 1) morevars = copy.copy(tmpl.defaults['sessionvars']) morevars['revcount'] = revcount * 2 count = len(web.repo) pos = ctx.rev() parity = paritygen(web.stripecount) changenav = webutil.revnav(web.repo).gen(pos, revcount, count) entries = list(changelist()) latestentry = entries[:1] if len(entries) > revcount: nextentry = entries[-1:] entries = entries[:-1] else: nextentry = [] return tmpl(shortlog and 'shortlog' or 'changelog', changenav=changenav, node=ctx.hex(), rev=pos, symrev=symrev, changesets=count, entries=entries, latestentry=latestentry, nextentry=nextentry, archives=web.archivelist("tip"), revcount=revcount, morevars=morevars, lessvars=lessvars, query=query) @webcommand('shortlog') def shortlog(web, req, tmpl): """ /shortlog --------- Show basic information about a set of changesets. This accepts the same parameters as the ``changelog`` handler. The only difference is the ``shortlog`` template will be rendered instead of the ``changelog`` template. """ return changelog(web, req, tmpl, shortlog=True) @webcommand('changeset') def changeset(web, req, tmpl): """ /changeset[/{revision}] ----------------------- Show information about a single changeset. A URL path argument is the changeset identifier to show. See ``hg help revisions`` for possible values. If not defined, the ``tip`` changeset will be shown. The ``changeset`` template is rendered. Contents of the ``changesettag``, ``changesetbookmark``, ``filenodelink``, ``filenolink``, and the many templates related to diffs may all be used to produce the output. """ ctx = webutil.changectx(web.repo, req) return tmpl('changeset', **webutil.changesetentry(web, req, tmpl, ctx)) rev = webcommand('rev')(changeset) def decodepath(path): """Hook for mapping a path in the repository to a path in the working copy. Extensions (e.g., largefiles) can override this to remap files in the virtual file system presented by the manifest command below.""" return path @webcommand('manifest') def manifest(web, req, tmpl): """ /manifest[/{revision}[/{path}]] ------------------------------- Show information about a directory. If the URL path arguments are omitted, information about the root directory for the ``tip`` changeset will be shown. Because this handler can only show information for directories, it is recommended to use the ``file`` handler instead, as it can handle both directories and files. The ``manifest`` template will be rendered for this handler. """ if 'node' in req.form: ctx = webutil.changectx(web.repo, req) symrev = webutil.symrevorshortnode(req, ctx) else: ctx = web.repo['tip'] symrev = 'tip' path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0]) mf = ctx.manifest() node = ctx.node() files = {} dirs = {} parity = paritygen(web.stripecount) if path and path[-1] != "/": path += "/" l = len(path) abspath = "/" + path for full, n in mf.iteritems(): # the virtual path (working copy path) used for the full # (repository) path f = decodepath(full) if f[:l] != path: continue remain = f[l:] elements = remain.split('/') if len(elements) == 1: files[remain] = full else: h = dirs # need to retain ref to dirs (root) for elem in elements[0:-1]: if elem not in h: h[elem] = {} h = h[elem] if len(h) > 1: break h[None] = None # denotes files present if mf and not files and not dirs: raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path) def filelist(**map): for f in sorted(files): full = files[f] fctx = ctx.filectx(full) yield {"file": full, "parity": parity.next(), "basename": f, "date": fctx.date(), "size": fctx.size(), "permissions": mf.flags(full)} def dirlist(**map): for d in sorted(dirs): emptydirs = [] h = dirs[d] while isinstance(h, dict) and len(h) == 1: k, v = h.items()[0] if v: emptydirs.append(k) h = v path = "%s%s" % (abspath, d) yield {"parity": parity.next(), "path": path, "emptydirs": "/".join(emptydirs), "basename": d} return tmpl("manifest", symrev=symrev, path=abspath, up=webutil.up(abspath), upparity=parity.next(), fentries=filelist, dentries=dirlist, archives=web.archivelist(hex(node)), **webutil.commonentry(web.repo, ctx)) @webcommand('tags') def tags(web, req, tmpl): """ /tags ----- Show information about tags. No arguments are accepted. The ``tags`` template is rendered. """ i = list(reversed(web.repo.tagslist())) parity = paritygen(web.stripecount) def entries(notip, latestonly, **map): t = i if notip: t = [(k, n) for k, n in i if k != "tip"] if latestonly: t = t[:1] for k, n in t: yield {"parity": parity.next(), "tag": k, "date": web.repo[n].date(), "node": hex(n)} return tmpl("tags", node=hex(web.repo.changelog.tip()), entries=lambda **x: entries(False, False, **x), entriesnotip=lambda **x: entries(True, False, **x), latestentry=lambda **x: entries(True, True, **x)) @webcommand('bookmarks') def bookmarks(web, req, tmpl): """ /bookmarks ---------- Show information about bookmarks. No arguments are accepted. The ``bookmarks`` template is rendered. """ i = [b for b in web.repo._bookmarks.items() if b[1] in web.repo] parity = paritygen(web.stripecount) def entries(latestonly, **map): if latestonly: t = [min(i)] else: t = sorted(i) for k, n in t: yield {"parity": parity.next(), "bookmark": k, "date": web.repo[n].date(), "node": hex(n)} return tmpl("bookmarks", node=hex(web.repo.changelog.tip()), entries=lambda **x: entries(latestonly=False, **x), latestentry=lambda **x: entries(latestonly=True, **x)) @webcommand('branches') def branches(web, req, tmpl): """ /branches --------- Show information about branches. All known branches are contained in the output, even closed branches. No arguments are accepted. The ``branches`` template is rendered. """ entries = webutil.branchentries(web.repo, web.stripecount) latestentry = webutil.branchentries(web.repo, web.stripecount, 1) return tmpl('branches', node=hex(web.repo.changelog.tip()), entries=entries, latestentry=latestentry) @webcommand('summary') def summary(web, req, tmpl): """ /summary -------- Show a summary of repository state. Information about the latest changesets, bookmarks, tags, and branches is captured by this handler. The ``summary`` template is rendered. """ i = reversed(web.repo.tagslist()) def tagentries(**map): parity = paritygen(web.stripecount) count = 0 for k, n in i: if k == "tip": # skip tip continue count += 1 if count > 10: # limit to 10 tags break yield tmpl("tagentry", parity=parity.next(), tag=k, node=hex(n), date=web.repo[n].date()) def bookmarks(**map): parity = paritygen(web.stripecount) marks = [b for b in web.repo._bookmarks.items() if b[1] in web.repo] for k, n in sorted(marks)[:10]: # limit to 10 bookmarks yield {'parity': parity.next(), 'bookmark': k, 'date': web.repo[n].date(), 'node': hex(n)} def changelist(**map): parity = paritygen(web.stripecount, offset=start - end) l = [] # build a list in forward order for efficiency revs = [] if start < end: revs = web.repo.changelog.revs(start, end - 1) for i in revs: ctx = web.repo[i] l.append(tmpl( 'shortlogentry', parity=parity.next(), **webutil.commonentry(web.repo, ctx))) l.reverse() yield l tip = web.repo['tip'] count = len(web.repo) start = max(0, count - web.maxchanges) end = min(count, start + web.maxchanges) return tmpl("summary", desc=web.config("web", "description", "unknown"), owner=get_contact(web.config) or "unknown", lastchange=tip.date(), tags=tagentries, bookmarks=bookmarks, branches=webutil.branchentries(web.repo, web.stripecount, 10), shortlog=changelist, node=tip.hex(), symrev='tip', archives=web.archivelist("tip")) @webcommand('filediff') def filediff(web, req, tmpl): """ /diff/{revision}/{path} ----------------------- Show how a file changed in a particular commit. The ``filediff`` template is rendered. This handler is registered under both the ``/diff`` and ``/filediff`` paths. ``/diff`` is used in modern code. """ fctx, ctx = None, None try: fctx = webutil.filectx(web.repo, req) except LookupError: ctx = webutil.changectx(web.repo, req) path = webutil.cleanpath(web.repo, req.form['file'][0]) if path not in ctx.files(): raise if fctx is not None: path = fctx.path() ctx = fctx.changectx() parity = paritygen(web.stripecount) style = web.config('web', 'style', 'paper') if 'style' in req.form: style = req.form['style'][0] diffs = webutil.diffs(web.repo, tmpl, ctx, None, [path], parity, style) if fctx is not None: rename = webutil.renamelink(fctx) ctx = fctx else: rename = [] ctx = ctx return tmpl("filediff", file=path, symrev=webutil.symrevorshortnode(req, ctx), rename=rename, diff=diffs, **webutil.commonentry(web.repo, ctx)) diff = webcommand('diff')(filediff) @webcommand('comparison') def comparison(web, req, tmpl): """ /comparison/{revision}/{path} ----------------------------- Show a comparison between the old and new versions of a file from changes made on a particular revision. This is similar to the ``diff`` handler. However, this form features a split or side-by-side diff rather than a unified diff. The ``context`` query string argument can be used to control the lines of context in the diff. The ``filecomparison`` template is rendered. """ ctx = webutil.changectx(web.repo, req) if 'file' not in req.form: raise ErrorResponse(HTTP_NOT_FOUND, 'file not given') path = webutil.cleanpath(web.repo, req.form['file'][0]) parsecontext = lambda v: v == 'full' and -1 or int(v) if 'context' in req.form: context = parsecontext(req.form['context'][0]) else: context = parsecontext(web.config('web', 'comparisoncontext', '5')) def filelines(f): if util.binary(f.data()): mt = mimetypes.guess_type(f.path())[0] if not mt: mt = 'application/octet-stream' return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))] return f.data().splitlines() fctx = None parent = ctx.p1() leftrev = parent.rev() leftnode = parent.node() rightrev = ctx.rev() rightnode = ctx.node() if path in ctx: fctx = ctx[path] rightlines = filelines(fctx) if path not in parent: leftlines = () else: pfctx = parent[path] leftlines = filelines(pfctx) else: rightlines = () pfctx = ctx.parents()[0][path] leftlines = filelines(pfctx) comparison = webutil.compare(tmpl, context, leftlines, rightlines) if fctx is not None: rename = webutil.renamelink(fctx) ctx = fctx else: rename = [] ctx = ctx return tmpl('filecomparison', file=path, symrev=webutil.symrevorshortnode(req, ctx), rename=rename, leftrev=leftrev, leftnode=hex(leftnode), rightrev=rightrev, rightnode=hex(rightnode), comparison=comparison, **webutil.commonentry(web.repo, ctx)) @webcommand('annotate') def annotate(web, req, tmpl): """ /annotate/{revision}/{path} --------------------------- Show changeset information for each line in a file. The ``fileannotate`` template is rendered. """ fctx = webutil.filectx(web.repo, req) f = fctx.path() parity = paritygen(web.stripecount) diffopts = patch.difffeatureopts(web.repo.ui, untrusted=True, section='annotate', whitespace=True) def annotate(**map): last = None if util.binary(fctx.data()): mt = (mimetypes.guess_type(fctx.path())[0] or 'application/octet-stream') lines = enumerate([((fctx.filectx(fctx.filerev()), 1), '(binary:%s)' % mt)]) else: lines = enumerate(fctx.annotate(follow=True, linenumber=True, diffopts=diffopts)) for lineno, ((f, targetline), l) in lines: fnode = f.filenode() if last != fnode: last = fnode yield {"parity": parity.next(), "node": f.hex(), "rev": f.rev(), "author": f.user(), "desc": f.description(), "extra": f.extra(), "file": f.path(), "targetline": targetline, "line": l, "lineno": lineno + 1, "lineid": "l%d" % (lineno + 1), "linenumber": "% 6d" % (lineno + 1), "revdate": f.date()} return tmpl("fileannotate", file=f, annotate=annotate, path=webutil.up(f), symrev=webutil.symrevorshortnode(req, fctx), rename=webutil.renamelink(fctx), permissions=fctx.manifest().flags(f), **webutil.commonentry(web.repo, fctx)) @webcommand('filelog') def filelog(web, req, tmpl): """ /filelog/{revision}/{path} -------------------------- Show information about the history of a file in the repository. The ``revcount`` query string argument can be defined to control the maximum number of entries to show. The ``filelog`` template will be rendered. """ try: fctx = webutil.filectx(web.repo, req) f = fctx.path() fl = fctx.filelog() except error.LookupError: f = webutil.cleanpath(web.repo, req.form['file'][0]) fl = web.repo.file(f) numrevs = len(fl) if not numrevs: # file doesn't exist at all raise rev = webutil.changectx(web.repo, req).rev() first = fl.linkrev(0) if rev < first: # current rev is from before file existed raise frev = numrevs - 1 while fl.linkrev(frev) > rev: frev -= 1 fctx = web.repo.filectx(f, fl.linkrev(frev)) revcount = web.maxshortchanges if 'revcount' in req.form: try: revcount = int(req.form.get('revcount', [revcount])[0]) revcount = max(revcount, 1) tmpl.defaults['sessionvars']['revcount'] = revcount except ValueError: pass lessvars = copy.copy(tmpl.defaults['sessionvars']) lessvars['revcount'] = max(revcount / 2, 1) morevars = copy.copy(tmpl.defaults['sessionvars']) morevars['revcount'] = revcount * 2 count = fctx.filerev() + 1 start = max(0, fctx.filerev() - revcount + 1) # first rev on this page end = min(count, start + revcount) # last rev on this page parity = paritygen(web.stripecount, offset=start - end) def entries(): l = [] repo = web.repo revs = fctx.filelog().revs(start, end - 1) for i in revs: iterfctx = fctx.filectx(i) l.append(dict( parity=parity.next(), filerev=i, file=f, rename=webutil.renamelink(iterfctx), **webutil.commonentry(repo, iterfctx))) for e in reversed(l): yield e entries = list(entries()) latestentry = entries[:1] revnav = webutil.filerevnav(web.repo, fctx.path()) nav = revnav.gen(end - 1, revcount, count) return tmpl("filelog", file=f, nav=nav, symrev=webutil.symrevorshortnode(req, fctx), entries=entries, latestentry=latestentry, revcount=revcount, morevars=morevars, lessvars=lessvars, **webutil.commonentry(web.repo, fctx)) @webcommand('archive') def archive(web, req, tmpl): """ /archive/{revision}.{format}[/{path}] ------------------------------------- Obtain an archive of repository content. The content and type of the archive is defined by a URL path parameter. ``format`` is the file extension of the archive type to be generated. e.g. ``zip`` or ``tar.bz2``. Not all archive types may be allowed by your server configuration. The optional ``path`` URL parameter controls content to include in the archive. If omitted, every file in the specified revision is present in the archive. If included, only the specified file or contents of the specified directory will be included in the archive. No template is used for this handler. Raw, binary content is generated. """ type_ = req.form.get('type', [None])[0] allowed = web.configlist("web", "allow_archive") key = req.form['node'][0] if type_ not in web.archives: msg = 'Unsupported archive type: %s' % type_ raise ErrorResponse(HTTP_NOT_FOUND, msg) if not ((type_ in allowed or web.configbool("web", "allow" + type_, False))): msg = 'Archive type not allowed: %s' % type_ raise ErrorResponse(HTTP_FORBIDDEN, msg) reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame)) cnode = web.repo.lookup(key) arch_version = key if cnode == key or key == 'tip': arch_version = short(cnode) name = "%s-%s" % (reponame, arch_version) ctx = webutil.changectx(web.repo, req) pats = [] matchfn = scmutil.match(ctx, []) file = req.form.get('file', None) if file: pats = ['path:' + file[0]] matchfn = scmutil.match(ctx, pats, default='path') if pats: files = [f for f in ctx.manifest().keys() if matchfn(f)] if not files: raise ErrorResponse(HTTP_NOT_FOUND, 'file(s) not found: %s' % file[0]) mimetype, artype, extension, encoding = web.archivespecs[type_] headers = [ ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension)) ] if encoding: headers.append(('Content-Encoding', encoding)) req.headers.extend(headers) req.respond(HTTP_OK, mimetype) archival.archive(web.repo, req, cnode, artype, prefix=name, matchfn=matchfn, subrepos=web.configbool("web", "archivesubrepos")) return [] @webcommand('static') def static(web, req, tmpl): fname = req.form['file'][0] # a repo owner may set web.static in .hg/hgrc to get any file # readable by the user running the CGI script static = web.config("web", "static", None, untrusted=False) if not static: tp = web.templatepath or templater.templatepaths() if isinstance(tp, str): tp = [tp] static = [os.path.join(p, 'static') for p in tp] staticfile(static, fname, req) return [] @webcommand('graph') def graph(web, req, tmpl): """ /graph[/{revision}] ------------------- Show information about the graphical topology of the repository. Information rendered by this handler can be used to create visual representations of repository topology. The ``revision`` URL parameter controls the starting changeset. The ``revcount`` query string argument can define the number of changesets to show information for. This handler will render the ``graph`` template. """ if 'node' in req.form: ctx = webutil.changectx(web.repo, req) symrev = webutil.symrevorshortnode(req, ctx) else: ctx = web.repo['tip'] symrev = 'tip' rev = ctx.rev() bg_height = 39 revcount = web.maxshortchanges if 'revcount' in req.form: try: revcount = int(req.form.get('revcount', [revcount])[0]) revcount = max(revcount, 1) tmpl.defaults['sessionvars']['revcount'] = revcount except ValueError: pass lessvars = copy.copy(tmpl.defaults['sessionvars']) lessvars['revcount'] = max(revcount / 2, 1) morevars = copy.copy(tmpl.defaults['sessionvars']) morevars['revcount'] = revcount * 2 count = len(web.repo) pos = rev uprev = min(max(0, count - 1), rev + revcount) downrev = max(0, rev - revcount) changenav = webutil.revnav(web.repo).gen(pos, revcount, count) tree = [] if pos != -1: allrevs = web.repo.changelog.revs(pos, 0) revs = [] for i in allrevs: revs.append(i) if len(revs) >= revcount: break # We have to feed a baseset to dagwalker as it is expecting smartset # object. This does not have a big impact on hgweb performance itself # since hgweb graphing code is not itself lazy yet. dag = graphmod.dagwalker(web.repo, revset.baseset(revs)) # As we said one line above... not lazy. tree = list(graphmod.colored(dag, web.repo)) def getcolumns(tree): cols = 0 for (id, type, ctx, vtx, edges) in tree: if type != graphmod.CHANGESET: continue cols = max(cols, max([edge[0] for edge in edges] or [0]), max([edge[1] for edge in edges] or [0])) return cols def graphdata(usetuples, **map): data = [] row = 0 for (id, type, ctx, vtx, edges) in tree: if type != graphmod.CHANGESET: continue node = str(ctx) age = templatefilters.age(ctx.date()) desc = templatefilters.firstline(ctx.description()) desc = cgi.escape(templatefilters.nonempty(desc)) user = cgi.escape(templatefilters.person(ctx.user())) branch = cgi.escape(ctx.branch()) try: branchnode = web.repo.branchtip(branch) except error.RepoLookupError: branchnode = None branch = branch, branchnode == ctx.node() if usetuples: data.append((node, vtx, edges, desc, user, age, branch, [cgi.escape(x) for x in ctx.tags()], [cgi.escape(x) for x in ctx.bookmarks()])) else: edgedata = [{'col': edge[0], 'nextcol': edge[1], 'color': (edge[2] - 1) % 6 + 1, 'width': edge[3], 'bcolor': edge[4]} for edge in edges] data.append( {'node': node, 'col': vtx[0], 'color': (vtx[1] - 1) % 6 + 1, 'edges': edgedata, 'row': row, 'nextrow': row + 1, 'desc': desc, 'user': user, 'age': age, 'bookmarks': webutil.nodebookmarksdict( web.repo, ctx.node()), 'branches': webutil.nodebranchdict(web.repo, ctx), 'inbranch': webutil.nodeinbranch(web.repo, ctx), 'tags': webutil.nodetagsdict(web.repo, ctx.node())}) row += 1 return data cols = getcolumns(tree) rows = len(tree) canvasheight = (rows + 1) * bg_height - 27 return tmpl('graph', rev=rev, symrev=symrev, revcount=revcount, uprev=uprev, lessvars=lessvars, morevars=morevars, downrev=downrev, cols=cols, rows=rows, canvaswidth=(cols + 1) * bg_height, truecanvasheight=rows * bg_height, canvasheight=canvasheight, bg_height=bg_height, jsdata=lambda **x: graphdata(True, **x), nodes=lambda **x: graphdata(False, **x), node=ctx.hex(), changenav=changenav) def _getdoc(e): doc = e[0].__doc__ if doc: doc = _(doc).partition('\n')[0] else: doc = _('(no help text available)') return doc @webcommand('help') def help(web, req, tmpl): """ /help[/{topic}] --------------- Render help documentation. This web command is roughly equivalent to :hg:`help`. If a ``topic`` is defined, that help topic will be rendered. If not, an index of available help topics will be rendered. The ``help`` template will be rendered when requesting help for a topic. ``helptopics`` will be rendered for the index of help topics. """ from .. import commands, help as helpmod # avoid cycle topicname = req.form.get('node', [None])[0] if not topicname: def topics(**map): for entries, summary, _doc in helpmod.helptable: yield {'topic': entries[0], 'summary': summary} early, other = [], [] primary = lambda s: s.partition('|')[0] for c, e in commands.table.iteritems(): doc = _getdoc(e) if 'DEPRECATED' in doc or c.startswith('debug'): continue cmd = primary(c) if cmd.startswith('^'): early.append((cmd[1:], doc)) else: other.append((cmd, doc)) early.sort() other.sort() def earlycommands(**map): for c, doc in early: yield {'topic': c, 'summary': doc} def othercommands(**map): for c, doc in other: yield {'topic': c, 'summary': doc} return tmpl('helptopics', topics=topics, earlycommands=earlycommands, othercommands=othercommands, title='Index') # Render an index of sub-topics. if topicname in helpmod.subtopics: topics = [] for entries, summary, _doc in helpmod.subtopics[topicname]: topics.append({ 'topic': '%s.%s' % (topicname, entries[0]), 'basename': entries[0], 'summary': summary, }) return tmpl('helptopics', topics=topics, title=topicname, subindex=True) u = webutil.wsgiui() u.verbose = True # Render a page from a sub-topic. if '.' in topicname: # TODO implement support for rendering sections, like # `hg help` works. topic, subtopic = topicname.split('.', 1) if topic not in helpmod.subtopics: raise ErrorResponse(HTTP_NOT_FOUND) else: topic = topicname subtopic = None try: doc = helpmod.help_(u, topic, subtopic=subtopic) except error.UnknownCommand: raise ErrorResponse(HTTP_NOT_FOUND) return tmpl('help', topic=topicname, doc=doc) # tell hggettext to extract docstrings from these functions: i18nfunctions = commands.values() mercurial-3.7.3/mercurial/hgweb/wsgicgi.py0000644000175000017500000000543112676531524020216 0ustar mpmmpm00000000000000# hgweb/wsgicgi.py - CGI->WSGI translator # # Copyright 2006 Eric Hopper # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. # # This was originally copied from the public domain code at # http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side from __future__ import absolute_import import os import sys from .. import ( util, ) from . import ( common, ) def launch(application): util.setbinary(sys.stdin) util.setbinary(sys.stdout) environ = dict(os.environ.iteritems()) environ.setdefault('PATH_INFO', '') if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'): # IIS includes script_name in PATH_INFO scriptname = environ['SCRIPT_NAME'] if environ['PATH_INFO'].startswith(scriptname): environ['PATH_INFO'] = environ['PATH_INFO'][len(scriptname):] stdin = sys.stdin if environ.get('HTTP_EXPECT', '').lower() == '100-continue': stdin = common.continuereader(stdin, sys.stdout.write) environ['wsgi.input'] = stdin environ['wsgi.errors'] = sys.stderr environ['wsgi.version'] = (1, 0) environ['wsgi.multithread'] = False environ['wsgi.multiprocess'] = True environ['wsgi.run_once'] = True if environ.get('HTTPS', 'off').lower() in ('on', '1', 'yes'): environ['wsgi.url_scheme'] = 'https' else: environ['wsgi.url_scheme'] = 'http' headers_set = [] headers_sent = [] out = sys.stdout def write(data): if not headers_set: raise AssertionError("write() before start_response()") elif not headers_sent: # Before the first output, send the stored headers status, response_headers = headers_sent[:] = headers_set out.write('Status: %s\r\n' % status) for header in response_headers: out.write('%s: %s\r\n' % header) out.write('\r\n') out.write(data) out.flush() def start_response(status, response_headers, exc_info=None): if exc_info: try: if headers_sent: # Re-raise original exception if headers sent raise exc_info[0](exc_info[1], exc_info[2]) finally: exc_info = None # avoid dangling circular ref elif headers_set: raise AssertionError("Headers already set!") headers_set[:] = [status, response_headers] return write content = application(environ, start_response) try: for chunk in content: write(chunk) if not headers_sent: write('') # send headers now if body was empty finally: getattr(content, 'close', lambda : None)() mercurial-3.7.3/mercurial/hgweb/server.py0000644000175000017500000002546512676531525020102 0ustar mpmmpm00000000000000# hgweb/server.py - The standalone hg web server. # # Copyright 21 May 2005 - (c) 2005 Jake Edge # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import BaseHTTPServer import SocketServer import errno import os import socket import sys import traceback import urllib from ..i18n import _ from .. import ( error, util, ) from . import ( common, ) def _splitURI(uri): """Return path and query that has been split from uri Just like CGI environment, the path is unquoted, the query is not. """ if '?' in uri: path, query = uri.split('?', 1) else: path, query = uri, '' return urllib.unquote(path), query class _error_logger(object): def __init__(self, handler): self.handler = handler def flush(self): pass def write(self, str): self.writelines(str.split('\n')) def writelines(self, seq): for msg in seq: self.handler.log_error("HG error: %s", msg) class _httprequesthandler(BaseHTTPServer.BaseHTTPRequestHandler): url_scheme = 'http' @staticmethod def preparehttpserver(httpserver, ssl_cert): """Prepare .socket of new HTTPServer instance""" pass def __init__(self, *args, **kargs): self.protocol_version = 'HTTP/1.1' BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs) def _log_any(self, fp, format, *args): fp.write("%s - - [%s] %s\n" % (self.client_address[0], self.log_date_time_string(), format % args)) fp.flush() def log_error(self, format, *args): self._log_any(self.server.errorlog, format, *args) def log_message(self, format, *args): self._log_any(self.server.accesslog, format, *args) def log_request(self, code='-', size='-'): xheaders = [] if util.safehasattr(self, 'headers'): xheaders = [h for h in self.headers.items() if h[0].startswith('x-')] self.log_message('"%s" %s %s%s', self.requestline, str(code), str(size), ''.join([' %s:%s' % h for h in sorted(xheaders)])) def do_write(self): try: self.do_hgweb() except socket.error as inst: if inst[0] != errno.EPIPE: raise def do_POST(self): try: self.do_write() except Exception: self._start_response("500 Internal Server Error", []) self._write("Internal Server Error") self._done() tb = "".join(traceback.format_exception(*sys.exc_info())) self.log_error("Exception happened during processing " "request '%s':\n%s", self.path, tb) def do_GET(self): self.do_POST() def do_hgweb(self): path, query = _splitURI(self.path) env = {} env['GATEWAY_INTERFACE'] = 'CGI/1.1' env['REQUEST_METHOD'] = self.command env['SERVER_NAME'] = self.server.server_name env['SERVER_PORT'] = str(self.server.server_port) env['REQUEST_URI'] = self.path env['SCRIPT_NAME'] = self.server.prefix env['PATH_INFO'] = path[len(self.server.prefix):] env['REMOTE_HOST'] = self.client_address[0] env['REMOTE_ADDR'] = self.client_address[0] if query: env['QUERY_STRING'] = query if self.headers.typeheader is None: env['CONTENT_TYPE'] = self.headers.type else: env['CONTENT_TYPE'] = self.headers.typeheader length = self.headers.getheader('content-length') if length: env['CONTENT_LENGTH'] = length for header in [h for h in self.headers.keys() if h not in ('content-type', 'content-length')]: hkey = 'HTTP_' + header.replace('-', '_').upper() hval = self.headers.getheader(header) hval = hval.replace('\n', '').strip() if hval: env[hkey] = hval env['SERVER_PROTOCOL'] = self.request_version env['wsgi.version'] = (1, 0) env['wsgi.url_scheme'] = self.url_scheme if env.get('HTTP_EXPECT', '').lower() == '100-continue': self.rfile = common.continuereader(self.rfile, self.wfile.write) env['wsgi.input'] = self.rfile env['wsgi.errors'] = _error_logger(self) env['wsgi.multithread'] = isinstance(self.server, SocketServer.ThreadingMixIn) env['wsgi.multiprocess'] = isinstance(self.server, SocketServer.ForkingMixIn) env['wsgi.run_once'] = 0 self.saved_status = None self.saved_headers = [] self.sent_headers = False self.length = None self._chunked = None for chunk in self.server.application(env, self._start_response): self._write(chunk) if not self.sent_headers: self.send_headers() self._done() def send_headers(self): if not self.saved_status: raise AssertionError("Sending headers before " "start_response() called") saved_status = self.saved_status.split(None, 1) saved_status[0] = int(saved_status[0]) self.send_response(*saved_status) self.length = None self._chunked = False for h in self.saved_headers: self.send_header(*h) if h[0].lower() == 'content-length': self.length = int(h[1]) if (self.length is None and saved_status[0] != common.HTTP_NOT_MODIFIED): self._chunked = (not self.close_connection and self.request_version == "HTTP/1.1") if self._chunked: self.send_header('Transfer-Encoding', 'chunked') else: self.send_header('Connection', 'close') self.end_headers() self.sent_headers = True def _start_response(self, http_status, headers, exc_info=None): code, msg = http_status.split(None, 1) code = int(code) self.saved_status = http_status bad_headers = ('connection', 'transfer-encoding') self.saved_headers = [h for h in headers if h[0].lower() not in bad_headers] return self._write def _write(self, data): if not self.saved_status: raise AssertionError("data written before start_response() called") elif not self.sent_headers: self.send_headers() if self.length is not None: if len(data) > self.length: raise AssertionError("Content-length header sent, but more " "bytes than specified are being written.") self.length = self.length - len(data) elif self._chunked and data: data = '%x\r\n%s\r\n' % (len(data), data) self.wfile.write(data) self.wfile.flush() def _done(self): if self._chunked: self.wfile.write('0\r\n\r\n') self.wfile.flush() class _httprequesthandlerssl(_httprequesthandler): """HTTPS handler based on Python's ssl module""" url_scheme = 'https' @staticmethod def preparehttpserver(httpserver, ssl_cert): try: import ssl ssl.wrap_socket except ImportError: raise error.Abort(_("SSL support is unavailable")) httpserver.socket = ssl.wrap_socket( httpserver.socket, server_side=True, certfile=ssl_cert, ssl_version=ssl.PROTOCOL_TLSv1) def setup(self): self.connection = self.request self.rfile = socket._fileobject(self.request, "rb", self.rbufsize) self.wfile = socket._fileobject(self.request, "wb", self.wbufsize) try: import threading threading.activeCount() # silence pyflakes and bypass demandimport _mixin = SocketServer.ThreadingMixIn except ImportError: if util.safehasattr(os, "fork"): _mixin = SocketServer.ForkingMixIn else: class _mixin(object): pass def openlog(opt, default): if opt and opt != '-': return open(opt, 'a') return default class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer): # SO_REUSEADDR has broken semantics on windows if os.name == 'nt': allow_reuse_address = 0 def __init__(self, ui, app, addr, handler, **kwargs): BaseHTTPServer.HTTPServer.__init__(self, addr, handler, **kwargs) self.daemon_threads = True self.application = app handler.preparehttpserver(self, ui.config('web', 'certificate')) prefix = ui.config('web', 'prefix', '') if prefix: prefix = '/' + prefix.strip('/') self.prefix = prefix alog = openlog(ui.config('web', 'accesslog', '-'), sys.stdout) elog = openlog(ui.config('web', 'errorlog', '-'), sys.stderr) self.accesslog = alog self.errorlog = elog self.addr, self.port = self.socket.getsockname()[0:2] self.fqaddr = socket.getfqdn(addr[0]) class IPv6HTTPServer(MercurialHTTPServer): address_family = getattr(socket, 'AF_INET6', None) def __init__(self, *args, **kwargs): if self.address_family is None: raise error.RepoError(_('IPv6 is not available on this system')) super(IPv6HTTPServer, self).__init__(*args, **kwargs) def create_server(ui, app): if ui.config('web', 'certificate'): handler = _httprequesthandlerssl else: handler = _httprequesthandler if ui.configbool('web', 'ipv6'): cls = IPv6HTTPServer else: cls = MercurialHTTPServer # ugly hack due to python issue5853 (for threaded use) try: import mimetypes mimetypes.init() except UnicodeDecodeError: # Python 2.x's mimetypes module attempts to decode strings # from Windows' ANSI APIs as ascii (fail), then re-encode them # as ascii (clown fail), because the default Python Unicode # codec is hardcoded as ascii. sys.argv # unwrap demand-loader so that reload() works reload(sys) # resurrect sys.setdefaultencoding() oldenc = sys.getdefaultencoding() sys.setdefaultencoding("latin1") # or any full 8-bit encoding mimetypes.init() sys.setdefaultencoding(oldenc) address = ui.config('web', 'address', '') port = util.getport(ui.config('web', 'port', 8000)) try: return cls(ui, app, (address, port), handler) except socket.error as inst: raise error.Abort(_("cannot start server at '%s:%d': %s") % (address, port, inst.args[1])) mercurial-3.7.3/mercurial/hgweb/__init__.py0000644000175000017500000000722512676531525020325 0ustar mpmmpm00000000000000# hgweb/__init__.py - web interface to a mercurial repository # # Copyright 21 May 2005 - (c) 2005 Jake Edge # Copyright 2005 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import os from ..i18n import _ from .. import ( error, util, ) from . import ( hgweb_mod, hgwebdir_mod, server, ) def hgweb(config, name=None, baseui=None): '''create an hgweb wsgi object config can be one of: - repo object (single repo view) - path to repo (single repo view) - path to config file (multi-repo view) - dict of virtual:real pairs (multi-repo view) - list of virtual:real tuples (multi-repo view) ''' if ((isinstance(config, str) and not os.path.isdir(config)) or isinstance(config, dict) or isinstance(config, list)): # create a multi-dir interface return hgwebdir_mod.hgwebdir(config, baseui=baseui) return hgweb_mod.hgweb(config, name=name, baseui=baseui) def hgwebdir(config, baseui=None): return hgwebdir_mod.hgwebdir(config, baseui=baseui) class httpservice(object): def __init__(self, ui, app, opts): self.ui = ui self.app = app self.opts = opts def init(self): util.setsignalhandler() self.httpd = server.create_server(self.ui, self.app) if self.opts['port'] and not self.ui.verbose: return if self.httpd.prefix: prefix = self.httpd.prefix.strip('/') + '/' else: prefix = '' port = ':%d' % self.httpd.port if port == ':80': port = '' bindaddr = self.httpd.addr if bindaddr == '0.0.0.0': bindaddr = '*' elif ':' in bindaddr: # IPv6 bindaddr = '[%s]' % bindaddr fqaddr = self.httpd.fqaddr if ':' in fqaddr: fqaddr = '[%s]' % fqaddr if self.opts['port']: write = self.ui.status else: write = self.ui.write write(_('listening at http://%s%s/%s (bound to %s:%d)\n') % (fqaddr, port, prefix, bindaddr, self.httpd.port)) self.ui.flush() # avoid buffering of status message def run(self): self.httpd.serve_forever() def createservice(ui, repo, opts): # this way we can check if something was given in the command-line if opts.get('port'): opts['port'] = util.getport(opts.get('port')) alluis = set([ui]) if repo: baseui = repo.baseui alluis.update([repo.baseui, repo.ui]) else: baseui = ui webconf = opts.get('web_conf') or opts.get('webdir_conf') if webconf: # load server settings (e.g. web.port) to "copied" ui, which allows # hgwebdir to reload webconf cleanly servui = ui.copy() servui.readconfig(webconf, sections=['web']) alluis.add(servui) else: servui = ui optlist = ("name templates style address port prefix ipv6" " accesslog errorlog certificate encoding") for o in optlist.split(): val = opts.get(o, '') if val in (None, ''): # should check against default options instead continue for u in alluis: u.setconfig("web", o, val, 'serve') if webconf: app = hgwebdir_mod.hgwebdir(webconf, baseui=baseui) else: if not repo: raise error.RepoError(_("there is no Mercurial repository" " here (.hg not found)")) app = hgweb_mod.hgweb(repo, baseui=baseui) return httpservice(servui, app, opts) mercurial-3.7.3/mercurial/manifest.py0000644000175000017500000010271412676531525017277 0ustar mpmmpm00000000000000# manifest.py - manifest revision class for mercurial # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import array import heapq import os import struct from .i18n import _ from . import ( error, mdiff, parsers, revlog, util, ) propertycache = util.propertycache def _parsev1(data): # This method does a little bit of excessive-looking # precondition checking. This is so that the behavior of this # class exactly matches its C counterpart to try and help # prevent surprise breakage for anyone that develops against # the pure version. if data and data[-1] != '\n': raise ValueError('Manifest did not end in a newline.') prev = None for l in data.splitlines(): if prev is not None and prev > l: raise ValueError('Manifest lines not in sorted order.') prev = l f, n = l.split('\0') if len(n) > 40: yield f, revlog.bin(n[:40]), n[40:] else: yield f, revlog.bin(n), '' def _parsev2(data): metadataend = data.find('\n') # Just ignore metadata for now pos = metadataend + 1 prevf = '' while pos < len(data): end = data.find('\n', pos + 1) # +1 to skip stem length byte if end == -1: raise ValueError('Manifest ended with incomplete file entry.') stemlen = ord(data[pos]) items = data[pos + 1:end].split('\0') f = prevf[:stemlen] + items[0] if prevf > f: raise ValueError('Manifest entries not in sorted order.') fl = items[1] # Just ignore metadata (items[2:] for now) n = data[end + 1:end + 21] yield f, n, fl pos = end + 22 prevf = f def _parse(data): """Generates (path, node, flags) tuples from a manifest text""" if data.startswith('\0'): return iter(_parsev2(data)) else: return iter(_parsev1(data)) def _text(it, usemanifestv2): """Given an iterator over (path, node, flags) tuples, returns a manifest text""" if usemanifestv2: return _textv2(it) else: return _textv1(it) def _textv1(it): files = [] lines = [] _hex = revlog.hex for f, n, fl in it: files.append(f) # if this is changed to support newlines in filenames, # be sure to check the templates/ dir again (especially *-raw.tmpl) lines.append("%s\0%s%s\n" % (f, _hex(n), fl)) _checkforbidden(files) return ''.join(lines) def _textv2(it): files = [] lines = ['\0\n'] prevf = '' for f, n, fl in it: files.append(f) stem = os.path.commonprefix([prevf, f]) stemlen = min(len(stem), 255) lines.append("%c%s\0%s\n%s\n" % (stemlen, f[stemlen:], fl, n)) prevf = f _checkforbidden(files) return ''.join(lines) class _lazymanifest(dict): """This is the pure implementation of lazymanifest. It has not been optimized *at all* and is not lazy. """ def __init__(self, data): dict.__init__(self) for f, n, fl in _parse(data): self[f] = n, fl def __setitem__(self, k, v): node, flag = v assert node is not None if len(node) > 21: node = node[:21] # match c implementation behavior dict.__setitem__(self, k, (node, flag)) def __iter__(self): return iter(sorted(dict.keys(self))) def iterkeys(self): return iter(sorted(dict.keys(self))) def iterentries(self): return ((f, e[0], e[1]) for f, e in sorted(self.iteritems())) def copy(self): c = _lazymanifest('') c.update(self) return c def diff(self, m2, clean=False): '''Finds changes between the current manifest and m2.''' diff = {} for fn, e1 in self.iteritems(): if fn not in m2: diff[fn] = e1, (None, '') else: e2 = m2[fn] if e1 != e2: diff[fn] = e1, e2 elif clean: diff[fn] = None for fn, e2 in m2.iteritems(): if fn not in self: diff[fn] = (None, ''), e2 return diff def filtercopy(self, filterfn): c = _lazymanifest('') for f, n, fl in self.iterentries(): if filterfn(f): c[f] = n, fl return c def text(self): """Get the full data of this manifest as a bytestring.""" return _textv1(self.iterentries()) try: _lazymanifest = parsers.lazymanifest except AttributeError: pass class manifestdict(object): def __init__(self, data=''): if data.startswith('\0'): #_lazymanifest can not parse v2 self._lm = _lazymanifest('') for f, n, fl in _parsev2(data): self._lm[f] = n, fl else: self._lm = _lazymanifest(data) def __getitem__(self, key): return self._lm[key][0] def find(self, key): return self._lm[key] def __len__(self): return len(self._lm) def __setitem__(self, key, node): self._lm[key] = node, self.flags(key, '') def __contains__(self, key): return key in self._lm def __delitem__(self, key): del self._lm[key] def __iter__(self): return self._lm.__iter__() def iterkeys(self): return self._lm.iterkeys() def keys(self): return list(self.iterkeys()) def filesnotin(self, m2): '''Set of files in this manifest that are not in the other''' files = set(self) files.difference_update(m2) return files @propertycache def _dirs(self): return util.dirs(self) def dirs(self): return self._dirs def hasdir(self, dir): return dir in self._dirs def _filesfastpath(self, match): '''Checks whether we can correctly and quickly iterate over matcher files instead of over manifest files.''' files = match.files() return (len(files) < 100 and (match.isexact() or (match.prefix() and all(fn in self for fn in files)))) def walk(self, match): '''Generates matching file names. Equivalent to manifest.matches(match).iterkeys(), but without creating an entirely new manifest. It also reports nonexistent files by marking them bad with match.bad(). ''' if match.always(): for f in iter(self): yield f return fset = set(match.files()) # avoid the entire walk if we're only looking for specific files if self._filesfastpath(match): for fn in sorted(fset): yield fn return for fn in self: if fn in fset: # specified pattern is the exact name fset.remove(fn) if match(fn): yield fn # for dirstate.walk, files=['.'] means "walk the whole tree". # follow that here, too fset.discard('.') for fn in sorted(fset): if not self.hasdir(fn): match.bad(fn, None) def matches(self, match): '''generate a new manifest filtered by the match argument''' if match.always(): return self.copy() if self._filesfastpath(match): m = manifestdict() lm = self._lm for fn in match.files(): if fn in lm: m._lm[fn] = lm[fn] return m m = manifestdict() m._lm = self._lm.filtercopy(match) return m def diff(self, m2, clean=False): '''Finds changes between the current manifest and m2. Args: m2: the manifest to which this manifest should be compared. clean: if true, include files unchanged between these manifests with a None value in the returned dictionary. The result is returned as a dict with filename as key and values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the nodeid in the current/other manifest and fl1/fl2 is the flag in the current/other manifest. Where the file does not exist, the nodeid will be None and the flags will be the empty string. ''' return self._lm.diff(m2._lm, clean) def setflag(self, key, flag): self._lm[key] = self[key], flag def get(self, key, default=None): try: return self._lm[key][0] except KeyError: return default def flags(self, key, default=''): try: return self._lm[key][1] except KeyError: return default def copy(self): c = manifestdict() c._lm = self._lm.copy() return c def iteritems(self): return (x[:2] for x in self._lm.iterentries()) def text(self, usemanifestv2=False): if usemanifestv2: return _textv2(self._lm.iterentries()) else: # use (probably) native version for v1 return self._lm.text() def fastdelta(self, base, changes): """Given a base manifest text as an array.array and a list of changes relative to that text, compute a delta that can be used by revlog. """ delta = [] dstart = None dend = None dline = [""] start = 0 # zero copy representation of base as a buffer addbuf = util.buffer(base) changes = list(changes) if len(changes) < 1000: # start with a readonly loop that finds the offset of # each line and creates the deltas for f, todelete in changes: # bs will either be the index of the item or the insert point start, end = _msearch(addbuf, f, start) if not todelete: h, fl = self._lm[f] l = "%s\0%s%s\n" % (f, revlog.hex(h), fl) else: if start == end: # item we want to delete was not found, error out raise AssertionError( _("failed to remove %s from manifest") % f) l = "" if dstart is not None and dstart <= start and dend >= start: if dend < end: dend = end if l: dline.append(l) else: if dstart is not None: delta.append([dstart, dend, "".join(dline)]) dstart = start dend = end dline = [l] if dstart is not None: delta.append([dstart, dend, "".join(dline)]) # apply the delta to the base, and get a delta for addrevision deltatext, arraytext = _addlistdelta(base, delta) else: # For large changes, it's much cheaper to just build the text and # diff it. arraytext = array.array('c', self.text()) deltatext = mdiff.textdiff(base, arraytext) return arraytext, deltatext def _msearch(m, s, lo=0, hi=None): '''return a tuple (start, end) that says where to find s within m. If the string is found m[start:end] are the line containing that string. If start == end the string was not found and they indicate the proper sorted insertion point. m should be a buffer or a string s is a string''' def advance(i, c): while i < lenm and m[i] != c: i += 1 return i if not s: return (lo, lo) lenm = len(m) if not hi: hi = lenm while lo < hi: mid = (lo + hi) // 2 start = mid while start > 0 and m[start - 1] != '\n': start -= 1 end = advance(start, '\0') if m[start:end] < s: # we know that after the null there are 40 bytes of sha1 # this translates to the bisect lo = mid + 1 lo = advance(end + 40, '\n') + 1 else: # this translates to the bisect hi = mid hi = start end = advance(lo, '\0') found = m[lo:end] if s == found: # we know that after the null there are 40 bytes of sha1 end = advance(end + 40, '\n') return (lo, end + 1) else: return (lo, lo) def _checkforbidden(l): """Check filenames for illegal characters.""" for f in l: if '\n' in f or '\r' in f: raise error.RevlogError( _("'\\n' and '\\r' disallowed in filenames: %r") % f) # apply the changes collected during the bisect loop to our addlist # return a delta suitable for addrevision def _addlistdelta(addlist, x): # for large addlist arrays, building a new array is cheaper # than repeatedly modifying the existing one currentposition = 0 newaddlist = array.array('c') for start, end, content in x: newaddlist += addlist[currentposition:start] if content: newaddlist += array.array('c', content) currentposition = end newaddlist += addlist[currentposition:] deltatext = "".join(struct.pack(">lll", start, end, len(content)) + content for start, end, content in x) return deltatext, newaddlist def _splittopdir(f): if '/' in f: dir, subpath = f.split('/', 1) return dir + '/', subpath else: return '', f _noop = lambda s: None class treemanifest(object): def __init__(self, dir='', text=''): self._dir = dir self._node = revlog.nullid self._loadfunc = _noop self._copyfunc = _noop self._dirty = False self._dirs = {} # Using _lazymanifest here is a little slower than plain old dicts self._files = {} self._flags = {} if text: def readsubtree(subdir, subm): raise AssertionError('treemanifest constructor only accepts ' 'flat manifests') self.parse(text, readsubtree) self._dirty = True # Mark flat manifest dirty after parsing def _subpath(self, path): return self._dir + path def __len__(self): self._load() size = len(self._files) for m in self._dirs.values(): size += m.__len__() return size def _isempty(self): self._load() # for consistency; already loaded by all callers return (not self._files and (not self._dirs or all(m._isempty() for m in self._dirs.values()))) def __repr__(self): return ('' % (self._dir, revlog.hex(self._node), bool(self._loadfunc is _noop), self._dirty, id(self))) def dir(self): '''The directory that this tree manifest represents, including a trailing '/'. Empty string for the repo root directory.''' return self._dir def node(self): '''This node of this instance. nullid for unsaved instances. Should be updated when the instance is read or written from a revlog. ''' assert not self._dirty return self._node def setnode(self, node): self._node = node self._dirty = False def iteritems(self): self._load() for p, n in sorted(self._dirs.items() + self._files.items()): if p in self._files: yield self._subpath(p), n else: for f, sn in n.iteritems(): yield f, sn def iterkeys(self): self._load() for p in sorted(self._dirs.keys() + self._files.keys()): if p in self._files: yield self._subpath(p) else: for f in self._dirs[p].iterkeys(): yield f def keys(self): return list(self.iterkeys()) def __iter__(self): return self.iterkeys() def __contains__(self, f): if f is None: return False self._load() dir, subpath = _splittopdir(f) if dir: if dir not in self._dirs: return False return self._dirs[dir].__contains__(subpath) else: return f in self._files def get(self, f, default=None): self._load() dir, subpath = _splittopdir(f) if dir: if dir not in self._dirs: return default return self._dirs[dir].get(subpath, default) else: return self._files.get(f, default) def __getitem__(self, f): self._load() dir, subpath = _splittopdir(f) if dir: return self._dirs[dir].__getitem__(subpath) else: return self._files[f] def flags(self, f): self._load() dir, subpath = _splittopdir(f) if dir: if dir not in self._dirs: return '' return self._dirs[dir].flags(subpath) else: if f in self._dirs: return '' return self._flags.get(f, '') def find(self, f): self._load() dir, subpath = _splittopdir(f) if dir: return self._dirs[dir].find(subpath) else: return self._files[f], self._flags.get(f, '') def __delitem__(self, f): self._load() dir, subpath = _splittopdir(f) if dir: self._dirs[dir].__delitem__(subpath) # If the directory is now empty, remove it if self._dirs[dir]._isempty(): del self._dirs[dir] else: del self._files[f] if f in self._flags: del self._flags[f] self._dirty = True def __setitem__(self, f, n): assert n is not None self._load() dir, subpath = _splittopdir(f) if dir: if dir not in self._dirs: self._dirs[dir] = treemanifest(self._subpath(dir)) self._dirs[dir].__setitem__(subpath, n) else: self._files[f] = n[:21] # to match manifestdict's behavior self._dirty = True def _load(self): if self._loadfunc is not _noop: lf, self._loadfunc = self._loadfunc, _noop lf(self) elif self._copyfunc is not _noop: cf, self._copyfunc = self._copyfunc, _noop cf(self) def setflag(self, f, flags): """Set the flags (symlink, executable) for path f.""" assert 't' not in flags self._load() dir, subpath = _splittopdir(f) if dir: if dir not in self._dirs: self._dirs[dir] = treemanifest(self._subpath(dir)) self._dirs[dir].setflag(subpath, flags) else: self._flags[f] = flags self._dirty = True def copy(self): copy = treemanifest(self._dir) copy._node = self._node copy._dirty = self._dirty if self._copyfunc is _noop: def _copyfunc(s): self._load() for d in self._dirs: s._dirs[d] = self._dirs[d].copy() s._files = dict.copy(self._files) s._flags = dict.copy(self._flags) if self._loadfunc is _noop: _copyfunc(copy) else: copy._copyfunc = _copyfunc else: copy._copyfunc = self._copyfunc return copy def filesnotin(self, m2): '''Set of files in this manifest that are not in the other''' files = set() def _filesnotin(t1, t2): if t1._node == t2._node and not t1._dirty and not t2._dirty: return t1._load() t2._load() for d, m1 in t1._dirs.iteritems(): if d in t2._dirs: m2 = t2._dirs[d] _filesnotin(m1, m2) else: files.update(m1.iterkeys()) for fn in t1._files.iterkeys(): if fn not in t2._files: files.add(t1._subpath(fn)) _filesnotin(self, m2) return files @propertycache def _alldirs(self): return util.dirs(self) def dirs(self): return self._alldirs def hasdir(self, dir): self._load() topdir, subdir = _splittopdir(dir) if topdir: if topdir in self._dirs: return self._dirs[topdir].hasdir(subdir) return False return (dir + '/') in self._dirs def walk(self, match): '''Generates matching file names. Equivalent to manifest.matches(match).iterkeys(), but without creating an entirely new manifest. It also reports nonexistent files by marking them bad with match.bad(). ''' if match.always(): for f in iter(self): yield f return fset = set(match.files()) for fn in self._walk(match): if fn in fset: # specified pattern is the exact name fset.remove(fn) yield fn # for dirstate.walk, files=['.'] means "walk the whole tree". # follow that here, too fset.discard('.') for fn in sorted(fset): if not self.hasdir(fn): match.bad(fn, None) def _walk(self, match): '''Recursively generates matching file names for walk().''' if not match.visitdir(self._dir[:-1] or '.'): return # yield this dir's files and walk its submanifests self._load() for p in sorted(self._dirs.keys() + self._files.keys()): if p in self._files: fullp = self._subpath(p) if match(fullp): yield fullp else: for f in self._dirs[p]._walk(match): yield f def matches(self, match): '''generate a new manifest filtered by the match argument''' if match.always(): return self.copy() return self._matches(match) def _matches(self, match): '''recursively generate a new manifest filtered by the match argument. ''' visit = match.visitdir(self._dir[:-1] or '.') if visit == 'all': return self.copy() ret = treemanifest(self._dir) if not visit: return ret self._load() for fn in self._files: fullp = self._subpath(fn) if not match(fullp): continue ret._files[fn] = self._files[fn] if fn in self._flags: ret._flags[fn] = self._flags[fn] for dir, subm in self._dirs.iteritems(): m = subm._matches(match) if not m._isempty(): ret._dirs[dir] = m if not ret._isempty(): ret._dirty = True return ret def diff(self, m2, clean=False): '''Finds changes between the current manifest and m2. Args: m2: the manifest to which this manifest should be compared. clean: if true, include files unchanged between these manifests with a None value in the returned dictionary. The result is returned as a dict with filename as key and values of the form ((n1,fl1),(n2,fl2)), where n1/n2 is the nodeid in the current/other manifest and fl1/fl2 is the flag in the current/other manifest. Where the file does not exist, the nodeid will be None and the flags will be the empty string. ''' result = {} emptytree = treemanifest() def _diff(t1, t2): if t1._node == t2._node and not t1._dirty and not t2._dirty: return t1._load() t2._load() for d, m1 in t1._dirs.iteritems(): m2 = t2._dirs.get(d, emptytree) _diff(m1, m2) for d, m2 in t2._dirs.iteritems(): if d not in t1._dirs: _diff(emptytree, m2) for fn, n1 in t1._files.iteritems(): fl1 = t1._flags.get(fn, '') n2 = t2._files.get(fn, None) fl2 = t2._flags.get(fn, '') if n1 != n2 or fl1 != fl2: result[t1._subpath(fn)] = ((n1, fl1), (n2, fl2)) elif clean: result[t1._subpath(fn)] = None for fn, n2 in t2._files.iteritems(): if fn not in t1._files: fl2 = t2._flags.get(fn, '') result[t2._subpath(fn)] = ((None, ''), (n2, fl2)) _diff(self, m2) return result def unmodifiedsince(self, m2): return not self._dirty and not m2._dirty and self._node == m2._node def parse(self, text, readsubtree): for f, n, fl in _parse(text): if fl == 't': f = f + '/' self._dirs[f] = readsubtree(self._subpath(f), n) elif '/' in f: # This is a flat manifest, so use __setitem__ and setflag rather # than assigning directly to _files and _flags, so we can # assign a path in a subdirectory, and to mark dirty (compared # to nullid). self[f] = n if fl: self.setflag(f, fl) else: # Assigning to _files and _flags avoids marking as dirty, # and should be a little faster. self._files[f] = n if fl: self._flags[f] = fl def text(self, usemanifestv2=False): """Get the full data of this manifest as a bytestring.""" self._load() flags = self.flags return _text(((f, self[f], flags(f)) for f in self.keys()), usemanifestv2) def dirtext(self, usemanifestv2=False): """Get the full data of this directory as a bytestring. Make sure that any submanifests have been written first, so their nodeids are correct. """ self._load() flags = self.flags dirs = [(d[:-1], self._dirs[d]._node, 't') for d in self._dirs] files = [(f, self._files[f], flags(f)) for f in self._files] return _text(sorted(dirs + files), usemanifestv2) def read(self, gettext, readsubtree): def _load_for_read(s): s.parse(gettext(), readsubtree) s._dirty = False self._loadfunc = _load_for_read def writesubtrees(self, m1, m2, writesubtree): self._load() # for consistency; should never have any effect here emptytree = treemanifest() for d, subm in self._dirs.iteritems(): subp1 = m1._dirs.get(d, emptytree)._node subp2 = m2._dirs.get(d, emptytree)._node if subp1 == revlog.nullid: subp1, subp2 = subp2, subp1 writesubtree(subm, subp1, subp2) class manifest(revlog.revlog): def __init__(self, opener, dir='', dirlogcache=None): '''The 'dir' and 'dirlogcache' arguments are for internal use by manifest.manifest only. External users should create a root manifest log with manifest.manifest(opener) and call dirlog() on it. ''' # During normal operations, we expect to deal with not more than four # revs at a time (such as during commit --amend). When rebasing large # stacks of commits, the number can go up, hence the config knob below. cachesize = 4 usetreemanifest = False usemanifestv2 = False opts = getattr(opener, 'options', None) if opts is not None: cachesize = opts.get('manifestcachesize', cachesize) usetreemanifest = opts.get('treemanifest', usetreemanifest) usemanifestv2 = opts.get('manifestv2', usemanifestv2) self._mancache = util.lrucachedict(cachesize) self._treeinmem = usetreemanifest self._treeondisk = usetreemanifest self._usemanifestv2 = usemanifestv2 indexfile = "00manifest.i" if dir: assert self._treeondisk if not dir.endswith('/'): dir = dir + '/' indexfile = "meta/" + dir + "00manifest.i" revlog.revlog.__init__(self, opener, indexfile) self._dir = dir # The dirlogcache is kept on the root manifest log if dir: self._dirlogcache = dirlogcache else: self._dirlogcache = {'': self} def _newmanifest(self, data=''): if self._treeinmem: return treemanifest(self._dir, data) return manifestdict(data) def dirlog(self, dir): assert self._treeondisk if dir not in self._dirlogcache: self._dirlogcache[dir] = manifest(self.opener, dir, self._dirlogcache) return self._dirlogcache[dir] def _slowreaddelta(self, node): r0 = self.deltaparent(self.rev(node)) m0 = self.read(self.node(r0)) m1 = self.read(node) md = self._newmanifest() for f, ((n0, fl0), (n1, fl1)) in m0.diff(m1).iteritems(): if n1: md[f] = n1 if fl1: md.setflag(f, fl1) return md def readdelta(self, node): if self._usemanifestv2 or self._treeondisk: return self._slowreaddelta(node) r = self.rev(node) d = mdiff.patchtext(self.revdiff(self.deltaparent(r), r)) return self._newmanifest(d) def readfast(self, node): '''use the faster of readdelta or read This will return a manifest which is either only the files added/modified relative to p1, or all files in the manifest. Which one is returned depends on the codepath used to retrieve the data. ''' r = self.rev(node) deltaparent = self.deltaparent(r) if deltaparent != revlog.nullrev and deltaparent in self.parentrevs(r): return self.readdelta(node) return self.read(node) def read(self, node): if node == revlog.nullid: return self._newmanifest() # don't upset local cache if node in self._mancache: return self._mancache[node][0] if self._treeondisk: def gettext(): return self.revision(node) def readsubtree(dir, subm): return self.dirlog(dir).read(subm) m = self._newmanifest() m.read(gettext, readsubtree) m.setnode(node) arraytext = None else: text = self.revision(node) m = self._newmanifest(text) arraytext = array.array('c', text) self._mancache[node] = (m, arraytext) return m def find(self, node, f): '''look up entry for a single file efficiently. return (node, flags) pair if found, (None, None) if not.''' m = self.read(node) try: return m.find(f) except KeyError: return None, None def add(self, m, transaction, link, p1, p2, added, removed): if (p1 in self._mancache and not self._treeinmem and not self._usemanifestv2): # If our first parent is in the manifest cache, we can # compute a delta here using properties we know about the # manifest up-front, which may save time later for the # revlog layer. _checkforbidden(added) # combine the changed lists into one sorted iterator work = heapq.merge([(x, False) for x in added], [(x, True) for x in removed]) arraytext, deltatext = m.fastdelta(self._mancache[p1][1], work) cachedelta = self.rev(p1), deltatext text = util.buffer(arraytext) n = self.addrevision(text, transaction, link, p1, p2, cachedelta) else: # The first parent manifest isn't already loaded, so we'll # just encode a fulltext of the manifest and pass that # through to the revlog layer, and let it handle the delta # process. if self._treeondisk: m1 = self.read(p1) m2 = self.read(p2) n = self._addtree(m, transaction, link, m1, m2) arraytext = None else: text = m.text(self._usemanifestv2) n = self.addrevision(text, transaction, link, p1, p2) arraytext = array.array('c', text) self._mancache[n] = (m, arraytext) return n def _addtree(self, m, transaction, link, m1, m2): # If the manifest is unchanged compared to one parent, # don't write a new revision if m.unmodifiedsince(m1) or m.unmodifiedsince(m2): return m.node() def writesubtree(subm, subp1, subp2): sublog = self.dirlog(subm.dir()) sublog.add(subm, transaction, link, subp1, subp2, None, None) m.writesubtrees(m1, m2, writesubtree) text = m.dirtext(self._usemanifestv2) # Double-check whether contents are unchanged to one parent if text == m1.dirtext(self._usemanifestv2): n = m1.node() elif text == m2.dirtext(self._usemanifestv2): n = m2.node() else: n = self.addrevision(text, transaction, link, m1.node(), m2.node()) # Save nodeid so parent manifest can calculate its nodeid m.setnode(n) return n def clearcaches(self): super(manifest, self).clearcaches() self._mancache.clear() self._dirlogcache = {'': self} mercurial-3.7.3/mercurial/repoview.py0000644000175000017500000003225312676531525017331 0ustar mpmmpm00000000000000# repoview.py - Filtered view of a localrepo object # # Copyright 2012 Pierre-Yves David # Logilab SA # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import copy import heapq import struct from .node import nullrev from . import ( error, obsolete, phases, tags as tagsmod, util, ) def hideablerevs(repo): """Revisions candidates to be hidden This is a standalone function to help extensions to wrap it.""" return obsolete.getrevs(repo, 'obsolete') def _getstatichidden(repo): """Revision to be hidden (disregarding dynamic blocker) To keep a consistent graph, we cannot hide any revisions with non-hidden descendants. This function computes the set of revisions that could be hidden while keeping the graph consistent. A second pass will be done to apply "dynamic blocker" like bookmarks or working directory parents. """ assert not repo.changelog.filteredrevs hidden = set(hideablerevs(repo)) if hidden: getphase = repo._phasecache.phase getparentrevs = repo.changelog.parentrevs # Skip heads which are public (guaranteed to not be hidden) heap = [-r for r in repo.changelog.headrevs() if getphase(repo, r)] heapq.heapify(heap) heappop = heapq.heappop heappush = heapq.heappush seen = set() # no need to init it with heads, they have no children while heap: rev = -heappop(heap) # All children have been processed so at that point, if no children # removed 'rev' from the 'hidden' set, 'rev' is going to be hidden. blocker = rev not in hidden for parent in getparentrevs(rev): if parent == nullrev: continue if blocker: # If visible, ensure parent will be visible too hidden.discard(parent) # - Avoid adding the same revision twice # - Skip nodes which are public (guaranteed to not be hidden) pre = len(seen) seen.add(parent) if pre < len(seen) and getphase(repo, rev): heappush(heap, -parent) return hidden def _getdynamicblockers(repo): """Non-cacheable revisions blocking hidden changesets from being filtered. Get revisions that will block hidden changesets and are likely to change, but unlikely to create hidden blockers. They won't be cached, so be careful with adding additional computation.""" cl = repo.changelog blockers = set() blockers.update([par.rev() for par in repo[None].parents()]) blockers.update([cl.rev(bm) for bm in repo._bookmarks.values()]) tags = {} tagsmod.readlocaltags(repo.ui, repo, tags, {}) if tags: rev, nodemap = cl.rev, cl.nodemap blockers.update(rev(t[0]) for t in tags.values() if t[0] in nodemap) return blockers cacheversion = 1 cachefile = 'cache/hidden' def cachehash(repo, hideable): """return sha1 hash of repository data to identify a valid cache. We calculate a sha1 of repo heads and the content of the obsstore and write it to the cache. Upon reading we can easily validate by checking the hash against the stored one and discard the cache in case the hashes don't match. """ h = util.sha1() h.update(''.join(repo.heads())) h.update(str(hash(frozenset(hideable)))) return h.digest() def _writehiddencache(cachefile, cachehash, hidden): """write hidden data to a cache file""" data = struct.pack('>%ii' % len(hidden), *sorted(hidden)) cachefile.write(struct.pack(">H", cacheversion)) cachefile.write(cachehash) cachefile.write(data) def trywritehiddencache(repo, hideable, hidden): """write cache of hidden changesets to disk Will not write the cache if a wlock cannot be obtained lazily. The cache consists of a head of 22byte: 2 byte version number of the cache 20 byte sha1 to validate the cache n*4 byte hidden revs """ wlock = fh = None try: wlock = repo.wlock(wait=False) # write cache to file newhash = cachehash(repo, hideable) fh = repo.vfs.open(cachefile, 'w+b', atomictemp=True) _writehiddencache(fh, newhash, hidden) except (IOError, OSError): repo.ui.debug('error writing hidden changesets cache\n') except error.LockHeld: repo.ui.debug('cannot obtain lock to write hidden changesets cache\n') finally: if fh: fh.close() if wlock: wlock.release() def tryreadcache(repo, hideable): """read a cache if the cache exists and is valid, otherwise returns None.""" hidden = fh = None try: if repo.vfs.exists(cachefile): fh = repo.vfs.open(cachefile, 'rb') version, = struct.unpack(">H", fh.read(2)) oldhash = fh.read(20) newhash = cachehash(repo, hideable) if (cacheversion, oldhash) == (version, newhash): # cache is valid, so we can start reading the hidden revs data = fh.read() count = len(data) / 4 hidden = frozenset(struct.unpack('>%ii' % count, data)) return hidden except struct.error: repo.ui.debug('corrupted hidden cache\n') # No need to fix the content as it will get rewritten return None except (IOError, OSError): repo.ui.debug('cannot read hidden cache\n') return None finally: if fh: fh.close() def computehidden(repo): """compute the set of hidden revision to filter During most operation hidden should be filtered.""" assert not repo.changelog.filteredrevs hidden = frozenset() hideable = hideablerevs(repo) if hideable: cl = repo.changelog hidden = tryreadcache(repo, hideable) if hidden is None: hidden = frozenset(_getstatichidden(repo)) trywritehiddencache(repo, hideable, hidden) # check if we have wd parents, bookmarks or tags pointing to hidden # changesets and remove those. dynamic = hidden & _getdynamicblockers(repo) if dynamic: blocked = cl.ancestors(dynamic, inclusive=True) hidden = frozenset(r for r in hidden if r not in blocked) return hidden def computeunserved(repo): """compute the set of revision that should be filtered when used a server Secret and hidden changeset should not pretend to be here.""" assert not repo.changelog.filteredrevs # fast path in simple case to avoid impact of non optimised code hiddens = filterrevs(repo, 'visible') if phases.hassecret(repo): cl = repo.changelog secret = phases.secret getphase = repo._phasecache.phase first = min(cl.rev(n) for n in repo._phasecache.phaseroots[secret]) revs = cl.revs(start=first) secrets = set(r for r in revs if getphase(repo, r) >= secret) return frozenset(hiddens | secrets) else: return hiddens def computemutable(repo): """compute the set of revision that should be filtered when used a server Secret and hidden changeset should not pretend to be here.""" assert not repo.changelog.filteredrevs # fast check to avoid revset call on huge repo if any(repo._phasecache.phaseroots[1:]): getphase = repo._phasecache.phase maymutable = filterrevs(repo, 'base') return frozenset(r for r in maymutable if getphase(repo, r)) return frozenset() def computeimpactable(repo): """Everything impactable by mutable revision The immutable filter still have some chance to get invalidated. This will happen when: - you garbage collect hidden changeset, - public phase is moved backward, - something is changed in the filtering (this could be fixed) This filter out any mutable changeset and any public changeset that may be impacted by something happening to a mutable revision. This is achieved by filtered everything with a revision number egal or higher than the first mutable changeset is filtered.""" assert not repo.changelog.filteredrevs cl = repo.changelog firstmutable = len(cl) for roots in repo._phasecache.phaseroots[1:]: if roots: firstmutable = min(firstmutable, min(cl.rev(r) for r in roots)) # protect from nullrev root firstmutable = max(0, firstmutable) return frozenset(xrange(firstmutable, len(cl))) # function to compute filtered set # # When adding a new filter you MUST update the table at: # mercurial.branchmap.subsettable # Otherwise your filter will have to recompute all its branches cache # from scratch (very slow). filtertable = {'visible': computehidden, 'served': computeunserved, 'immutable': computemutable, 'base': computeimpactable} def filterrevs(repo, filtername): """returns set of filtered revision for this filter name""" if filtername not in repo.filteredrevcache: func = filtertable[filtername] repo.filteredrevcache[filtername] = func(repo.unfiltered()) return repo.filteredrevcache[filtername] class repoview(object): """Provide a read/write view of a repo through a filtered changelog This object is used to access a filtered version of a repository without altering the original repository object itself. We can not alter the original object for two main reasons: - It prevents the use of a repo with multiple filters at the same time. In particular when multiple threads are involved. - It makes scope of the filtering harder to control. This object behaves very closely to the original repository. All attribute operations are done on the original repository: - An access to `repoview.someattr` actually returns `repo.someattr`, - A write to `repoview.someattr` actually sets value of `repo.someattr`, - A deletion of `repoview.someattr` actually drops `someattr` from `repo.__dict__`. The only exception is the `changelog` property. It is overridden to return a (surface) copy of `repo.changelog` with some revisions filtered. The `filtername` attribute of the view control the revisions that need to be filtered. (the fact the changelog is copied is an implementation detail). Unlike attributes, this object intercepts all method calls. This means that all methods are run on the `repoview` object with the filtered `changelog` property. For this purpose the simple `repoview` class must be mixed with the actual class of the repository. This ensures that the resulting `repoview` object have the very same methods than the repo object. This leads to the property below. repoview.method() --> repo.__class__.method(repoview) The inheritance has to be done dynamically because `repo` can be of any subclasses of `localrepo`. Eg: `bundlerepo` or `statichttprepo`. """ def __init__(self, repo, filtername): object.__setattr__(self, '_unfilteredrepo', repo) object.__setattr__(self, 'filtername', filtername) object.__setattr__(self, '_clcachekey', None) object.__setattr__(self, '_clcache', None) # not a propertycache on purpose we shall implement a proper cache later @property def changelog(self): """return a filtered version of the changeset this changelog must not be used for writing""" # some cache may be implemented later unfi = self._unfilteredrepo unfichangelog = unfi.changelog # bypass call to changelog.method unfiindex = unfichangelog.index unfilen = len(unfiindex) - 1 unfinode = unfiindex[unfilen - 1][7] revs = filterrevs(unfi, self.filtername) cl = self._clcache newkey = (unfilen, unfinode, hash(revs), unfichangelog._delayed) if cl is not None and newkey != self._clcachekey: cl = None # could have been made None by the previous if if cl is None: cl = copy.copy(unfichangelog) cl.filteredrevs = revs object.__setattr__(self, '_clcache', cl) object.__setattr__(self, '_clcachekey', newkey) return cl def unfiltered(self): """Return an unfiltered version of a repo""" return self._unfilteredrepo def filtered(self, name): """Return a filtered version of a repository""" if name == self.filtername: return self return self.unfiltered().filtered(name) # everything access are forwarded to the proxied repo def __getattr__(self, attr): return getattr(self._unfilteredrepo, attr) def __setattr__(self, attr, value): return setattr(self._unfilteredrepo, attr, value) def __delattr__(self, attr): return delattr(self._unfilteredrepo, attr) # The `requirements` attribute is initialized during __init__. But # __getattr__ won't be called as it also exists on the class. We need # explicit forwarding to main repo here @property def requirements(self): return self._unfilteredrepo.requirements mercurial-3.7.3/mercurial/similar.py0000644000175000017500000000716612676531525017136 0ustar mpmmpm00000000000000# similar.py - mechanisms for finding similar files # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import from .i18n import _ from . import ( bdiff, mdiff, util, ) def _findexactmatches(repo, added, removed): '''find renamed files that have no changes Takes a list of new filectxs and a list of removed filectxs, and yields (before, after) tuples of exact matches. ''' numfiles = len(added) + len(removed) # Get hashes of removed files. hashes = {} for i, fctx in enumerate(removed): repo.ui.progress(_('searching for exact renames'), i, total=numfiles) h = util.sha1(fctx.data()).digest() hashes[h] = fctx # For each added file, see if it corresponds to a removed file. for i, fctx in enumerate(added): repo.ui.progress(_('searching for exact renames'), i + len(removed), total=numfiles) h = util.sha1(fctx.data()).digest() if h in hashes: yield (hashes[h], fctx) # Done repo.ui.progress(_('searching for exact renames'), None) def _findsimilarmatches(repo, added, removed, threshold): '''find potentially renamed files based on similar file content Takes a list of new filectxs and a list of removed filectxs, and yields (before, after, score) tuples of partial matches. ''' copies = {} for i, r in enumerate(removed): repo.ui.progress(_('searching for similar files'), i, total=len(removed)) # lazily load text @util.cachefunc def data(): orig = r.data() return orig, mdiff.splitnewlines(orig) def score(text): orig, lines = data() # bdiff.blocks() returns blocks of matching lines # count the number of bytes in each equal = 0 matches = bdiff.blocks(text, orig) for x1, x2, y1, y2 in matches: for line in lines[y1:y2]: equal += len(line) lengths = len(text) + len(orig) return equal * 2.0 / lengths for a in added: bestscore = copies.get(a, (None, threshold))[1] myscore = score(a.data()) if myscore >= bestscore: copies[a] = (r, myscore) repo.ui.progress(_('searching'), None) for dest, v in copies.iteritems(): source, score = v yield source, dest, score def findrenames(repo, added, removed, threshold): '''find renamed files -- yields (before, after, score) tuples''' parentctx = repo['.'] workingctx = repo[None] # Zero length files will be frequently unrelated to each other, and # tracking the deletion/addition of such a file will probably cause more # harm than good. We strip them out here to avoid matching them later on. addedfiles = set([workingctx[fp] for fp in added if workingctx[fp].size() > 0]) removedfiles = set([parentctx[fp] for fp in removed if fp in parentctx and parentctx[fp].size() > 0]) # Find exact matches. for (a, b) in _findexactmatches(repo, sorted(addedfiles), sorted(removedfiles)): addedfiles.remove(b) yield (a.path(), b.path(), 1.0) # If the user requested similar files to be matched, search for them also. if threshold < 1.0: for (a, b, score) in _findsimilarmatches(repo, sorted(addedfiles), sorted(removedfiles), threshold): yield (a.path(), b.path(), score) mercurial-3.7.3/mercurial/scmutil.py0000644000175000017500000013310112676531525017143 0ustar mpmmpm00000000000000# scmutil.py - Mercurial core utility functions # # Copyright Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import Queue import contextlib import errno import glob import os import re import shutil import stat import tempfile import threading from .i18n import _ from .node import wdirrev from . import ( encoding, error, match as matchmod, osutil, pathutil, phases, revset, similar, util, ) if os.name == 'nt': from . import scmwindows as scmplatform else: from . import scmposix as scmplatform systemrcpath = scmplatform.systemrcpath userrcpath = scmplatform.userrcpath class status(tuple): '''Named tuple with a list of files per status. The 'deleted', 'unknown' and 'ignored' properties are only relevant to the working copy. ''' __slots__ = () def __new__(cls, modified, added, removed, deleted, unknown, ignored, clean): return tuple.__new__(cls, (modified, added, removed, deleted, unknown, ignored, clean)) @property def modified(self): '''files that have been modified''' return self[0] @property def added(self): '''files that have been added''' return self[1] @property def removed(self): '''files that have been removed''' return self[2] @property def deleted(self): '''files that are in the dirstate, but have been deleted from the working copy (aka "missing") ''' return self[3] @property def unknown(self): '''files not in the dirstate that are not ignored''' return self[4] @property def ignored(self): '''files not in the dirstate that are ignored (by _dirignore())''' return self[5] @property def clean(self): '''files that have not been modified''' return self[6] def __repr__(self, *args, **kwargs): return (('') % self) def itersubrepos(ctx1, ctx2): """find subrepos in ctx1 or ctx2""" # Create a (subpath, ctx) mapping where we prefer subpaths from # ctx1. The subpaths from ctx2 are important when the .hgsub file # has been modified (in ctx2) but not yet committed (in ctx1). subpaths = dict.fromkeys(ctx2.substate, ctx2) subpaths.update(dict.fromkeys(ctx1.substate, ctx1)) missing = set() for subpath in ctx2.substate: if subpath not in ctx1.substate: del subpaths[subpath] missing.add(subpath) for subpath, ctx in sorted(subpaths.iteritems()): yield subpath, ctx.sub(subpath) # Yield an empty subrepo based on ctx1 for anything only in ctx2. That way, # status and diff will have an accurate result when it does # 'sub.{status|diff}(rev2)'. Otherwise, the ctx2 subrepo is compared # against itself. for subpath in missing: yield subpath, ctx2.nullsub(subpath, ctx1) def nochangesfound(ui, repo, excluded=None): '''Report no changes for push/pull, excluded is None or a list of nodes excluded from the push/pull. ''' secretlist = [] if excluded: for n in excluded: if n not in repo: # discovery should not have included the filtered revision, # we have to explicitly exclude it until discovery is cleanup. continue ctx = repo[n] if ctx.phase() >= phases.secret and not ctx.extinct(): secretlist.append(n) if secretlist: ui.status(_("no changes found (ignored %d secret changesets)\n") % len(secretlist)) else: ui.status(_("no changes found\n")) def checknewlabel(repo, lbl, kind): # Do not use the "kind" parameter in ui output. # It makes strings difficult to translate. if lbl in ['tip', '.', 'null']: raise error.Abort(_("the name '%s' is reserved") % lbl) for c in (':', '\0', '\n', '\r'): if c in lbl: raise error.Abort(_("%r cannot be used in a name") % c) try: int(lbl) raise error.Abort(_("cannot use an integer as a name")) except ValueError: pass def checkfilename(f): '''Check that the filename f is an acceptable filename for a tracked file''' if '\r' in f or '\n' in f: raise error.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f) def checkportable(ui, f): '''Check if filename f is portable and warn or abort depending on config''' checkfilename(f) abort, warn = checkportabilityalert(ui) if abort or warn: msg = util.checkwinfilename(f) if msg: msg = "%s: %r" % (msg, f) if abort: raise error.Abort(msg) ui.warn(_("warning: %s\n") % msg) def checkportabilityalert(ui): '''check if the user's config requests nothing, a warning, or abort for non-portable filenames''' val = ui.config('ui', 'portablefilenames', 'warn') lval = val.lower() bval = util.parsebool(val) abort = os.name == 'nt' or lval == 'abort' warn = bval or lval == 'warn' if bval is None and not (warn or abort or lval == 'ignore'): raise error.ConfigError( _("ui.portablefilenames value is invalid ('%s')") % val) return abort, warn class casecollisionauditor(object): def __init__(self, ui, abort, dirstate): self._ui = ui self._abort = abort allfiles = '\0'.join(dirstate._map) self._loweredfiles = set(encoding.lower(allfiles).split('\0')) self._dirstate = dirstate # The purpose of _newfiles is so that we don't complain about # case collisions if someone were to call this object with the # same filename twice. self._newfiles = set() def __call__(self, f): if f in self._newfiles: return fl = encoding.lower(f) if fl in self._loweredfiles and f not in self._dirstate: msg = _('possible case-folding collision for %s') % f if self._abort: raise error.Abort(msg) self._ui.warn(_("warning: %s\n") % msg) self._loweredfiles.add(fl) self._newfiles.add(f) def filteredhash(repo, maxrev): """build hash of filtered revisions in the current repoview. Multiple caches perform up-to-date validation by checking that the tiprev and tipnode stored in the cache file match the current repository. However, this is not sufficient for validating repoviews because the set of revisions in the view may change without the repository tiprev and tipnode changing. This function hashes all the revs filtered from the view and returns that SHA-1 digest. """ cl = repo.changelog if not cl.filteredrevs: return None key = None revs = sorted(r for r in cl.filteredrevs if r <= maxrev) if revs: s = util.sha1() for rev in revs: s.update('%s;' % rev) key = s.digest() return key class abstractvfs(object): """Abstract base class; cannot be instantiated""" def __init__(self, *args, **kwargs): '''Prevent instantiation; don't call this from subclasses.''' raise NotImplementedError('attempted instantiating ' + str(type(self))) def tryread(self, path): '''gracefully return an empty string for missing files''' try: return self.read(path) except IOError as inst: if inst.errno != errno.ENOENT: raise return "" def tryreadlines(self, path, mode='rb'): '''gracefully return an empty array for missing files''' try: return self.readlines(path, mode=mode) except IOError as inst: if inst.errno != errno.ENOENT: raise return [] def open(self, path, mode="r", text=False, atomictemp=False, notindexed=False, backgroundclose=False): '''Open ``path`` file, which is relative to vfs root. Newly created directories are marked as "not to be indexed by the content indexing service", if ``notindexed`` is specified for "write" mode access. ''' self.open = self.__call__ return self.__call__(path, mode, text, atomictemp, notindexed, backgroundclose=backgroundclose) def read(self, path): with self(path, 'rb') as fp: return fp.read() def readlines(self, path, mode='rb'): with self(path, mode=mode) as fp: return fp.readlines() def write(self, path, data): with self(path, 'wb') as fp: return fp.write(data) def writelines(self, path, data, mode='wb', notindexed=False): with self(path, mode=mode, notindexed=notindexed) as fp: return fp.writelines(data) def append(self, path, data): with self(path, 'ab') as fp: return fp.write(data) def basename(self, path): """return base element of a path (as os.path.basename would do) This exists to allow handling of strange encoding if needed.""" return os.path.basename(path) def chmod(self, path, mode): return os.chmod(self.join(path), mode) def dirname(self, path): """return dirname element of a path (as os.path.dirname would do) This exists to allow handling of strange encoding if needed.""" return os.path.dirname(path) def exists(self, path=None): return os.path.exists(self.join(path)) def fstat(self, fp): return util.fstat(fp) def isdir(self, path=None): return os.path.isdir(self.join(path)) def isfile(self, path=None): return os.path.isfile(self.join(path)) def islink(self, path=None): return os.path.islink(self.join(path)) def isfileorlink(self, path=None): '''return whether path is a regular file or a symlink Unlike isfile, this doesn't follow symlinks.''' try: st = self.lstat(path) except OSError: return False mode = st.st_mode return stat.S_ISREG(mode) or stat.S_ISLNK(mode) def reljoin(self, *paths): """join various elements of a path together (as os.path.join would do) The vfs base is not injected so that path stay relative. This exists to allow handling of strange encoding if needed.""" return os.path.join(*paths) def split(self, path): """split top-most element of a path (as os.path.split would do) This exists to allow handling of strange encoding if needed.""" return os.path.split(path) def lexists(self, path=None): return os.path.lexists(self.join(path)) def lstat(self, path=None): return os.lstat(self.join(path)) def listdir(self, path=None): return os.listdir(self.join(path)) def makedir(self, path=None, notindexed=True): return util.makedir(self.join(path), notindexed) def makedirs(self, path=None, mode=None): return util.makedirs(self.join(path), mode) def makelock(self, info, path): return util.makelock(info, self.join(path)) def mkdir(self, path=None): return os.mkdir(self.join(path)) def mkstemp(self, suffix='', prefix='tmp', dir=None, text=False): fd, name = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=self.join(dir), text=text) dname, fname = util.split(name) if dir: return fd, os.path.join(dir, fname) else: return fd, fname def readdir(self, path=None, stat=None, skip=None): return osutil.listdir(self.join(path), stat, skip) def readlock(self, path): return util.readlock(self.join(path)) def rename(self, src, dst): return util.rename(self.join(src), self.join(dst)) def readlink(self, path): return os.readlink(self.join(path)) def removedirs(self, path=None): """Remove a leaf directory and all empty intermediate ones """ return util.removedirs(self.join(path)) def rmtree(self, path=None, ignore_errors=False, forcibly=False): """Remove a directory tree recursively If ``forcibly``, this tries to remove READ-ONLY files, too. """ if forcibly: def onerror(function, path, excinfo): if function is not os.remove: raise # read-only files cannot be unlinked under Windows s = os.stat(path) if (s.st_mode & stat.S_IWRITE) != 0: raise os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE) os.remove(path) else: onerror = None return shutil.rmtree(self.join(path), ignore_errors=ignore_errors, onerror=onerror) def setflags(self, path, l, x): return util.setflags(self.join(path), l, x) def stat(self, path=None): return os.stat(self.join(path)) def unlink(self, path=None): return util.unlink(self.join(path)) def unlinkpath(self, path=None, ignoremissing=False): return util.unlinkpath(self.join(path), ignoremissing) def utime(self, path=None, t=None): return os.utime(self.join(path), t) def walk(self, path=None, onerror=None): """Yield (dirpath, dirs, files) tuple for each directories under path ``dirpath`` is relative one from the root of this vfs. This uses ``os.sep`` as path separator, even you specify POSIX style ``path``. "The root of this vfs" is represented as empty ``dirpath``. """ root = os.path.normpath(self.join(None)) # when dirpath == root, dirpath[prefixlen:] becomes empty # because len(dirpath) < prefixlen. prefixlen = len(pathutil.normasprefix(root)) for dirpath, dirs, files in os.walk(self.join(path), onerror=onerror): yield (dirpath[prefixlen:], dirs, files) @contextlib.contextmanager def backgroundclosing(self, ui, expectedcount=-1): """Allow files to be closed asynchronously. When this context manager is active, ``backgroundclose`` can be passed to ``__call__``/``open`` to result in the file possibly being closed asynchronously, on a background thread. """ # This is an arbitrary restriction and could be changed if we ever # have a use case. vfs = getattr(self, 'vfs', self) if getattr(vfs, '_backgroundfilecloser', None): raise error.Abort('can only have 1 active background file closer') with backgroundfilecloser(ui, expectedcount=expectedcount) as bfc: try: vfs._backgroundfilecloser = bfc yield bfc finally: vfs._backgroundfilecloser = None class vfs(abstractvfs): '''Operate files relative to a base directory This class is used to hide the details of COW semantics and remote file access from higher level code. ''' def __init__(self, base, audit=True, expandpath=False, realpath=False): if expandpath: base = util.expandpath(base) if realpath: base = os.path.realpath(base) self.base = base self.mustaudit = audit self.createmode = None self._trustnlink = None @property def mustaudit(self): return self._audit @mustaudit.setter def mustaudit(self, onoff): self._audit = onoff if onoff: self.audit = pathutil.pathauditor(self.base) else: self.audit = util.always @util.propertycache def _cansymlink(self): return util.checklink(self.base) @util.propertycache def _chmod(self): return util.checkexec(self.base) def _fixfilemode(self, name): if self.createmode is None or not self._chmod: return os.chmod(name, self.createmode & 0o666) def __call__(self, path, mode="r", text=False, atomictemp=False, notindexed=False, backgroundclose=False): '''Open ``path`` file, which is relative to vfs root. Newly created directories are marked as "not to be indexed by the content indexing service", if ``notindexed`` is specified for "write" mode access. If ``backgroundclose`` is passed, the file may be closed asynchronously. It can only be used if the ``self.backgroundclosing()`` context manager is active. This should only be specified if the following criteria hold: 1. There is a potential for writing thousands of files. Unless you are writing thousands of files, the performance benefits of asynchronously closing files is not realized. 2. Files are opened exactly once for the ``backgroundclosing`` active duration and are therefore free of race conditions between closing a file on a background thread and reopening it. (If the file were opened multiple times, there could be unflushed data because the original file handle hasn't been flushed/closed yet.) ''' if self._audit: r = util.checkosfilename(path) if r: raise error.Abort("%s: %r" % (r, path)) self.audit(path) f = self.join(path) if not text and "b" not in mode: mode += "b" # for that other OS nlink = -1 if mode not in ('r', 'rb'): dirname, basename = util.split(f) # If basename is empty, then the path is malformed because it points # to a directory. Let the posixfile() call below raise IOError. if basename: if atomictemp: util.ensuredirs(dirname, self.createmode, notindexed) return util.atomictempfile(f, mode, self.createmode) try: if 'w' in mode: util.unlink(f) nlink = 0 else: # nlinks() may behave differently for files on Windows # shares if the file is open. with util.posixfile(f): nlink = util.nlinks(f) if nlink < 1: nlink = 2 # force mktempcopy (issue1922) except (OSError, IOError) as e: if e.errno != errno.ENOENT: raise nlink = 0 util.ensuredirs(dirname, self.createmode, notindexed) if nlink > 0: if self._trustnlink is None: self._trustnlink = nlink > 1 or util.checknlink(f) if nlink > 1 or not self._trustnlink: util.rename(util.mktempcopy(f), f) fp = util.posixfile(f, mode) if nlink == 0: self._fixfilemode(f) if backgroundclose: if not self._backgroundfilecloser: raise error.Abort('backgroundclose can only be used when a ' 'backgroundclosing context manager is active') fp = delayclosedfile(fp, self._backgroundfilecloser) return fp def symlink(self, src, dst): self.audit(dst) linkname = self.join(dst) try: os.unlink(linkname) except OSError: pass util.ensuredirs(os.path.dirname(linkname), self.createmode) if self._cansymlink: try: os.symlink(src, linkname) except OSError as err: raise OSError(err.errno, _('could not symlink to %r: %s') % (src, err.strerror), linkname) else: self.write(dst, src) def join(self, path, *insidef): if path: return os.path.join(self.base, path, *insidef) else: return self.base opener = vfs class auditvfs(object): def __init__(self, vfs): self.vfs = vfs @property def mustaudit(self): return self.vfs.mustaudit @mustaudit.setter def mustaudit(self, onoff): self.vfs.mustaudit = onoff class filtervfs(abstractvfs, auditvfs): '''Wrapper vfs for filtering filenames with a function.''' def __init__(self, vfs, filter): auditvfs.__init__(self, vfs) self._filter = filter def __call__(self, path, *args, **kwargs): return self.vfs(self._filter(path), *args, **kwargs) def join(self, path, *insidef): if path: return self.vfs.join(self._filter(self.vfs.reljoin(path, *insidef))) else: return self.vfs.join(path) filteropener = filtervfs class readonlyvfs(abstractvfs, auditvfs): '''Wrapper vfs preventing any writing.''' def __init__(self, vfs): auditvfs.__init__(self, vfs) def __call__(self, path, mode='r', *args, **kw): if mode not in ('r', 'rb'): raise error.Abort('this vfs is read only') return self.vfs(path, mode, *args, **kw) def join(self, path, *insidef): return self.vfs.join(path, *insidef) def walkrepos(path, followsym=False, seen_dirs=None, recurse=False): '''yield every hg repository under path, always recursively. The recurse flag will only control recursion into repo working dirs''' def errhandler(err): if err.filename == path: raise err samestat = getattr(os.path, 'samestat', None) if followsym and samestat is not None: def adddir(dirlst, dirname): match = False dirstat = os.stat(dirname) for lstdirstat in dirlst: if samestat(dirstat, lstdirstat): match = True break if not match: dirlst.append(dirstat) return not match else: followsym = False if (seen_dirs is None) and followsym: seen_dirs = [] adddir(seen_dirs, path) for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler): dirs.sort() if '.hg' in dirs: yield root # found a repository qroot = os.path.join(root, '.hg', 'patches') if os.path.isdir(os.path.join(qroot, '.hg')): yield qroot # we have a patch queue repo here if recurse: # avoid recursing inside the .hg directory dirs.remove('.hg') else: dirs[:] = [] # don't descend further elif followsym: newdirs = [] for d in dirs: fname = os.path.join(root, d) if adddir(seen_dirs, fname): if os.path.islink(fname): for hgname in walkrepos(fname, True, seen_dirs): yield hgname else: newdirs.append(d) dirs[:] = newdirs def osrcpath(): '''return default os-specific hgrc search path''' path = [] defaultpath = os.path.join(util.datapath, 'default.d') if os.path.isdir(defaultpath): for f, kind in osutil.listdir(defaultpath): if f.endswith('.rc'): path.append(os.path.join(defaultpath, f)) path.extend(systemrcpath()) path.extend(userrcpath()) path = [os.path.normpath(f) for f in path] return path _rcpath = None def rcpath(): '''return hgrc search path. if env var HGRCPATH is set, use it. for each item in path, if directory, use files ending in .rc, else use item. make HGRCPATH empty to only look in .hg/hgrc of current repo. if no HGRCPATH, use default os-specific path.''' global _rcpath if _rcpath is None: if 'HGRCPATH' in os.environ: _rcpath = [] for p in os.environ['HGRCPATH'].split(os.pathsep): if not p: continue p = util.expandpath(p) if os.path.isdir(p): for f, kind in osutil.listdir(p): if f.endswith('.rc'): _rcpath.append(os.path.join(p, f)) else: _rcpath.append(p) else: _rcpath = osrcpath() return _rcpath def intrev(rev): """Return integer for a given revision that can be used in comparison or arithmetic operation""" if rev is None: return wdirrev return rev def revsingle(repo, revspec, default='.'): if not revspec and revspec != 0: return repo[default] l = revrange(repo, [revspec]) if not l: raise error.Abort(_('empty revision set')) return repo[l.last()] def _pairspec(revspec): tree = revset.parse(revspec) tree = revset.optimize(tree, True)[1] # fix up "x^:y" -> "(x^):y" return tree and tree[0] in ('range', 'rangepre', 'rangepost', 'rangeall') def revpair(repo, revs): if not revs: return repo.dirstate.p1(), None l = revrange(repo, revs) if not l: first = second = None elif l.isascending(): first = l.min() second = l.max() elif l.isdescending(): first = l.max() second = l.min() else: first = l.first() second = l.last() if first is None: raise error.Abort(_('empty revision range')) if (first == second and len(revs) >= 2 and not all(revrange(repo, [r]) for r in revs)): raise error.Abort(_('empty revision on one side of range')) # if top-level is range expression, the result must always be a pair if first == second and len(revs) == 1 and not _pairspec(revs[0]): return repo.lookup(first), None return repo.lookup(first), repo.lookup(second) def revrange(repo, revs): """Yield revision as strings from a list of revision specifications.""" allspecs = [] for spec in revs: if isinstance(spec, int): spec = revset.formatspec('rev(%d)', spec) allspecs.append(spec) m = revset.matchany(repo.ui, allspecs, repo) return m(repo) def meaningfulparents(repo, ctx): """Return list of meaningful (or all if debug) parentrevs for rev. For merges (two non-nullrev revisions) both parents are meaningful. Otherwise the first parent revision is considered meaningful if it is not the preceding revision. """ parents = ctx.parents() if len(parents) > 1: return parents if repo.ui.debugflag: return [parents[0], repo['null']] if parents[0].rev() >= intrev(ctx.rev()) - 1: return [] return parents def expandpats(pats): '''Expand bare globs when running on windows. On posix we assume it already has already been done by sh.''' if not util.expandglobs: return list(pats) ret = [] for kindpat in pats: kind, pat = matchmod._patsplit(kindpat, None) if kind is None: try: globbed = glob.glob(pat) except re.error: globbed = [pat] if globbed: ret.extend(globbed) continue ret.append(kindpat) return ret def matchandpats(ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None): '''Return a matcher and the patterns that were used. The matcher will warn about bad matches, unless an alternate badfn callback is provided.''' if pats == ("",): pats = [] if opts is None: opts = {} if not globbed and default == 'relpath': pats = expandpats(pats or []) def bad(f, msg): ctx.repo().ui.warn("%s: %s\n" % (m.rel(f), msg)) if badfn is None: badfn = bad m = ctx.match(pats, opts.get('include'), opts.get('exclude'), default, listsubrepos=opts.get('subrepos'), badfn=badfn) if m.always(): pats = [] return m, pats def match(ctx, pats=(), opts=None, globbed=False, default='relpath', badfn=None): '''Return a matcher that will warn about bad matches.''' return matchandpats(ctx, pats, opts, globbed, default, badfn=badfn)[0] def matchall(repo): '''Return a matcher that will efficiently match everything.''' return matchmod.always(repo.root, repo.getcwd()) def matchfiles(repo, files, badfn=None): '''Return a matcher that will efficiently match exactly these files.''' return matchmod.exact(repo.root, repo.getcwd(), files, badfn=badfn) def origpath(ui, repo, filepath): '''customize where .orig files are created Fetch user defined path from config file: [ui] origbackuppath = Fall back to default (filepath) if not specified ''' origbackuppath = ui.config('ui', 'origbackuppath', None) if origbackuppath is None: return filepath + ".orig" filepathfromroot = os.path.relpath(filepath, start=repo.root) fullorigpath = repo.wjoin(origbackuppath, filepathfromroot) origbackupdir = repo.vfs.dirname(fullorigpath) if not repo.vfs.exists(origbackupdir): ui.note(_('creating directory: %s\n') % origbackupdir) util.makedirs(origbackupdir) return fullorigpath + ".orig" def addremove(repo, matcher, prefix, opts=None, dry_run=None, similarity=None): if opts is None: opts = {} m = matcher if dry_run is None: dry_run = opts.get('dry_run') if similarity is None: similarity = float(opts.get('similarity') or 0) ret = 0 join = lambda f: os.path.join(prefix, f) def matchessubrepo(matcher, subpath): if matcher.exact(subpath): return True for f in matcher.files(): if f.startswith(subpath): return True return False wctx = repo[None] for subpath in sorted(wctx.substate): if opts.get('subrepos') or matchessubrepo(m, subpath): sub = wctx.sub(subpath) try: submatch = matchmod.narrowmatcher(subpath, m) if sub.addremove(submatch, prefix, opts, dry_run, similarity): ret = 1 except error.LookupError: repo.ui.status(_("skipping missing subrepository: %s\n") % join(subpath)) rejected = [] def badfn(f, msg): if f in m.files(): m.bad(f, msg) rejected.append(f) badmatch = matchmod.badmatch(m, badfn) added, unknown, deleted, removed, forgotten = _interestingfiles(repo, badmatch) unknownset = set(unknown + forgotten) toprint = unknownset.copy() toprint.update(deleted) for abs in sorted(toprint): if repo.ui.verbose or not m.exact(abs): if abs in unknownset: status = _('adding %s\n') % m.uipath(abs) else: status = _('removing %s\n') % m.uipath(abs) repo.ui.status(status) renames = _findrenames(repo, m, added + unknown, removed + deleted, similarity) if not dry_run: _markchanges(repo, unknown + forgotten, deleted, renames) for f in rejected: if f in m.files(): return 1 return ret def marktouched(repo, files, similarity=0.0): '''Assert that files have somehow been operated upon. files are relative to the repo root.''' m = matchfiles(repo, files, badfn=lambda x, y: rejected.append(x)) rejected = [] added, unknown, deleted, removed, forgotten = _interestingfiles(repo, m) if repo.ui.verbose: unknownset = set(unknown + forgotten) toprint = unknownset.copy() toprint.update(deleted) for abs in sorted(toprint): if abs in unknownset: status = _('adding %s\n') % abs else: status = _('removing %s\n') % abs repo.ui.status(status) renames = _findrenames(repo, m, added + unknown, removed + deleted, similarity) _markchanges(repo, unknown + forgotten, deleted, renames) for f in rejected: if f in m.files(): return 1 return 0 def _interestingfiles(repo, matcher): '''Walk dirstate with matcher, looking for files that addremove would care about. This is different from dirstate.status because it doesn't care about whether files are modified or clean.''' added, unknown, deleted, removed, forgotten = [], [], [], [], [] audit_path = pathutil.pathauditor(repo.root) ctx = repo[None] dirstate = repo.dirstate walkresults = dirstate.walk(matcher, sorted(ctx.substate), True, False, full=False) for abs, st in walkresults.iteritems(): dstate = dirstate[abs] if dstate == '?' and audit_path.check(abs): unknown.append(abs) elif dstate != 'r' and not st: deleted.append(abs) elif dstate == 'r' and st: forgotten.append(abs) # for finding renames elif dstate == 'r' and not st: removed.append(abs) elif dstate == 'a': added.append(abs) return added, unknown, deleted, removed, forgotten def _findrenames(repo, matcher, added, removed, similarity): '''Find renames from removed files to added ones.''' renames = {} if similarity > 0: for old, new, score in similar.findrenames(repo, added, removed, similarity): if (repo.ui.verbose or not matcher.exact(old) or not matcher.exact(new)): repo.ui.status(_('recording removal of %s as rename to %s ' '(%d%% similar)\n') % (matcher.rel(old), matcher.rel(new), score * 100)) renames[new] = old return renames def _markchanges(repo, unknown, deleted, renames): '''Marks the files in unknown as added, the files in deleted as removed, and the files in renames as copied.''' wctx = repo[None] with repo.wlock(): wctx.forget(deleted) wctx.add(unknown) for new, old in renames.iteritems(): wctx.copy(old, new) def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None): """Update the dirstate to reflect the intent of copying src to dst. For different reasons it might not end with dst being marked as copied from src. """ origsrc = repo.dirstate.copied(src) or src if dst == origsrc: # copying back a copy? if repo.dirstate[dst] not in 'mn' and not dryrun: repo.dirstate.normallookup(dst) else: if repo.dirstate[origsrc] == 'a' and origsrc == src: if not ui.quiet: ui.warn(_("%s has not been committed yet, so no copy " "data will be stored for %s.\n") % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))) if repo.dirstate[dst] in '?r' and not dryrun: wctx.add([dst]) elif not dryrun: wctx.copy(origsrc, dst) def readrequires(opener, supported): '''Reads and parses .hg/requires and checks if all entries found are in the list of supported features.''' requirements = set(opener.read("requires").splitlines()) missings = [] for r in requirements: if r not in supported: if not r or not r[0].isalnum(): raise error.RequirementError(_(".hg/requires file is corrupt")) missings.append(r) missings.sort() if missings: raise error.RequirementError( _("repository requires features unknown to this Mercurial: %s") % " ".join(missings), hint=_("see https://mercurial-scm.org/wiki/MissingRequirement" " for more information")) return requirements def writerequires(opener, requirements): with opener('requires', 'w') as fp: for r in sorted(requirements): fp.write("%s\n" % r) class filecachesubentry(object): def __init__(self, path, stat): self.path = path self.cachestat = None self._cacheable = None if stat: self.cachestat = filecachesubentry.stat(self.path) if self.cachestat: self._cacheable = self.cachestat.cacheable() else: # None means we don't know yet self._cacheable = None def refresh(self): if self.cacheable(): self.cachestat = filecachesubentry.stat(self.path) def cacheable(self): if self._cacheable is not None: return self._cacheable # we don't know yet, assume it is for now return True def changed(self): # no point in going further if we can't cache it if not self.cacheable(): return True newstat = filecachesubentry.stat(self.path) # we may not know if it's cacheable yet, check again now if newstat and self._cacheable is None: self._cacheable = newstat.cacheable() # check again if not self._cacheable: return True if self.cachestat != newstat: self.cachestat = newstat return True else: return False @staticmethod def stat(path): try: return util.cachestat(path) except OSError as e: if e.errno != errno.ENOENT: raise class filecacheentry(object): def __init__(self, paths, stat=True): self._entries = [] for path in paths: self._entries.append(filecachesubentry(path, stat)) def changed(self): '''true if any entry has changed''' for entry in self._entries: if entry.changed(): return True return False def refresh(self): for entry in self._entries: entry.refresh() class filecache(object): '''A property like decorator that tracks files under .hg/ for updates. Records stat info when called in _filecache. On subsequent calls, compares old stat info with new info, and recreates the object when any of the files changes, updating the new stat info in _filecache. Mercurial either atomic renames or appends for files under .hg, so to ensure the cache is reliable we need the filesystem to be able to tell us if a file has been replaced. If it can't, we fallback to recreating the object on every call (essentially the same behavior as propertycache). ''' def __init__(self, *paths): self.paths = paths def join(self, obj, fname): """Used to compute the runtime path of a cached file. Users should subclass filecache and provide their own version of this function to call the appropriate join function on 'obj' (an instance of the class that its member function was decorated). """ return obj.join(fname) def __call__(self, func): self.func = func self.name = func.__name__ return self def __get__(self, obj, type=None): # do we need to check if the file changed? if self.name in obj.__dict__: assert self.name in obj._filecache, self.name return obj.__dict__[self.name] entry = obj._filecache.get(self.name) if entry: if entry.changed(): entry.obj = self.func(obj) else: paths = [self.join(obj, path) for path in self.paths] # We stat -before- creating the object so our cache doesn't lie if # a writer modified between the time we read and stat entry = filecacheentry(paths, True) entry.obj = self.func(obj) obj._filecache[self.name] = entry obj.__dict__[self.name] = entry.obj return entry.obj def __set__(self, obj, value): if self.name not in obj._filecache: # we add an entry for the missing value because X in __dict__ # implies X in _filecache paths = [self.join(obj, path) for path in self.paths] ce = filecacheentry(paths, False) obj._filecache[self.name] = ce else: ce = obj._filecache[self.name] ce.obj = value # update cached copy obj.__dict__[self.name] = value # update copy returned by obj.x def __delete__(self, obj): try: del obj.__dict__[self.name] except KeyError: raise AttributeError(self.name) def _locksub(repo, lock, envvar, cmd, environ=None, *args, **kwargs): if lock is None: raise error.LockInheritanceContractViolation( 'lock can only be inherited while held') if environ is None: environ = {} with lock.inherit() as locker: environ[envvar] = locker return repo.ui.system(cmd, environ=environ, *args, **kwargs) def wlocksub(repo, cmd, *args, **kwargs): """run cmd as a subprocess that allows inheriting repo's wlock This can only be called while the wlock is held. This takes all the arguments that ui.system does, and returns the exit code of the subprocess.""" return _locksub(repo, repo.currentwlock(), 'HG_WLOCK_LOCKER', cmd, *args, **kwargs) def gdinitconfig(ui): """helper function to know if a repo should be created as general delta """ # experimental config: format.generaldelta return (ui.configbool('format', 'generaldelta', False) or ui.configbool('format', 'usegeneraldelta', True)) def gddeltaconfig(ui): """helper function to know if incoming delta should be optimised """ # experimental config: format.generaldelta return ui.configbool('format', 'generaldelta', False) class delayclosedfile(object): """Proxy for a file object whose close is delayed. Do not instantiate outside of the vfs layer. """ def __init__(self, fh, closer): object.__setattr__(self, '_origfh', fh) object.__setattr__(self, '_closer', closer) def __getattr__(self, attr): return getattr(self._origfh, attr) def __setattr__(self, attr, value): return setattr(self._origfh, attr, value) def __delattr__(self, attr): return delattr(self._origfh, attr) def __enter__(self): return self._origfh.__enter__() def __exit__(self, exc_type, exc_value, exc_tb): self._closer.close(self._origfh) def close(self): self._closer.close(self._origfh) class backgroundfilecloser(object): """Coordinates background closing of file handles on multiple threads.""" def __init__(self, ui, expectedcount=-1): self._running = False self._entered = False self._threads = [] self._threadexception = None # Only Windows/NTFS has slow file closing. So only enable by default # on that platform. But allow to be enabled elsewhere for testing. defaultenabled = os.name == 'nt' enabled = ui.configbool('worker', 'backgroundclose', defaultenabled) if not enabled: return # There is overhead to starting and stopping the background threads. # Don't do background processing unless the file count is large enough # to justify it. minfilecount = ui.configint('worker', 'backgroundcloseminfilecount', 2048) # FUTURE dynamically start background threads after minfilecount closes. # (We don't currently have any callers that don't know their file count) if expectedcount > 0 and expectedcount < minfilecount: return # Windows defaults to a limit of 512 open files. A buffer of 128 # should give us enough headway. maxqueue = ui.configint('worker', 'backgroundclosemaxqueue', 384) threadcount = ui.configint('worker', 'backgroundclosethreadcount', 4) ui.debug('starting %d threads for background file closing\n' % threadcount) self._queue = Queue.Queue(maxsize=maxqueue) self._running = True for i in range(threadcount): t = threading.Thread(target=self._worker, name='backgroundcloser') self._threads.append(t) t.start() def __enter__(self): self._entered = True return self def __exit__(self, exc_type, exc_value, exc_tb): self._running = False # Wait for threads to finish closing so open files don't linger for # longer than lifetime of context manager. for t in self._threads: t.join() def _worker(self): """Main routine for worker thread.""" while True: try: fh = self._queue.get(block=True, timeout=0.100) # Need to catch or the thread will terminate and # we could orphan file descriptors. try: fh.close() except Exception as e: # Stash so can re-raise from main thread later. self._threadexception = e except Queue.Empty: if not self._running: break def close(self, fh): """Schedule a file for closing.""" if not self._entered: raise error.Abort('can only call close() when context manager ' 'active') # If a background thread encountered an exception, raise now so we fail # fast. Otherwise we may potentially go on for minutes until the error # is acted on. if self._threadexception: e = self._threadexception self._threadexception = None raise e # If we're not actively running, close synchronously. if not self._running: fh.close() return self._queue.put(fh, block=True, timeout=None) mercurial-3.7.3/mercurial/dirs.c0000644000175000017500000001404412676531525016222 0ustar mpmmpm00000000000000/* dirs.c - dynamic directory diddling for dirstates Copyright 2013 Facebook This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. */ #define PY_SSIZE_T_CLEAN #include #include "util.h" /* * This is a multiset of directory names, built from the files that * appear in a dirstate or manifest. * * A few implementation notes: * * We modify Python integers for refcounting, but those integers are * never visible to Python code. * * We mutate strings in-place, but leave them immutable once they can * be seen by Python code. */ typedef struct { PyObject_HEAD PyObject *dict; } dirsObject; static inline Py_ssize_t _finddir(const char *path, Py_ssize_t pos) { while (pos != -1) { if (path[pos] == '/') break; pos -= 1; } return pos; } static int _addpath(PyObject *dirs, PyObject *path) { const char *cpath = PyString_AS_STRING(path); Py_ssize_t pos = PyString_GET_SIZE(path); PyObject *key = NULL; int ret = -1; while ((pos = _finddir(cpath, pos - 1)) != -1) { PyObject *val; /* It's likely that every prefix already has an entry in our dict. Try to avoid allocating and deallocating a string for each prefix we check. */ if (key != NULL) ((PyStringObject *)key)->ob_shash = -1; else { /* Force Python to not reuse a small shared string. */ key = PyString_FromStringAndSize(cpath, pos < 2 ? 2 : pos); if (key == NULL) goto bail; } PyString_GET_SIZE(key) = pos; PyString_AS_STRING(key)[pos] = '\0'; val = PyDict_GetItem(dirs, key); if (val != NULL) { PyInt_AS_LONG(val) += 1; break; } /* Force Python to not reuse a small shared int. */ val = PyInt_FromLong(0x1eadbeef); if (val == NULL) goto bail; PyInt_AS_LONG(val) = 1; ret = PyDict_SetItem(dirs, key, val); Py_DECREF(val); if (ret == -1) goto bail; Py_CLEAR(key); } ret = 0; bail: Py_XDECREF(key); return ret; } static int _delpath(PyObject *dirs, PyObject *path) { char *cpath = PyString_AS_STRING(path); Py_ssize_t pos = PyString_GET_SIZE(path); PyObject *key = NULL; int ret = -1; while ((pos = _finddir(cpath, pos - 1)) != -1) { PyObject *val; key = PyString_FromStringAndSize(cpath, pos); if (key == NULL) goto bail; val = PyDict_GetItem(dirs, key); if (val == NULL) { PyErr_SetString(PyExc_ValueError, "expected a value, found none"); goto bail; } if (--PyInt_AS_LONG(val) <= 0) { if (PyDict_DelItem(dirs, key) == -1) goto bail; } else break; Py_CLEAR(key); } ret = 0; bail: Py_XDECREF(key); return ret; } static int dirs_fromdict(PyObject *dirs, PyObject *source, char skipchar) { PyObject *key, *value; Py_ssize_t pos = 0; while (PyDict_Next(source, &pos, &key, &value)) { if (!PyString_Check(key)) { PyErr_SetString(PyExc_TypeError, "expected string key"); return -1; } if (skipchar) { if (!dirstate_tuple_check(value)) { PyErr_SetString(PyExc_TypeError, "expected a dirstate tuple"); return -1; } if (((dirstateTupleObject *)value)->state == skipchar) continue; } if (_addpath(dirs, key) == -1) return -1; } return 0; } static int dirs_fromiter(PyObject *dirs, PyObject *source) { PyObject *iter, *item = NULL; int ret; iter = PyObject_GetIter(source); if (iter == NULL) return -1; while ((item = PyIter_Next(iter)) != NULL) { if (!PyString_Check(item)) { PyErr_SetString(PyExc_TypeError, "expected string"); break; } if (_addpath(dirs, item) == -1) break; Py_CLEAR(item); } ret = PyErr_Occurred() ? -1 : 0; Py_DECREF(iter); Py_XDECREF(item); return ret; } /* * Calculate a refcounted set of directory names for the files in a * dirstate. */ static int dirs_init(dirsObject *self, PyObject *args) { PyObject *dirs = NULL, *source = NULL; char skipchar = 0; int ret = -1; self->dict = NULL; if (!PyArg_ParseTuple(args, "|Oc:__init__", &source, &skipchar)) return -1; dirs = PyDict_New(); if (dirs == NULL) return -1; if (source == NULL) ret = 0; else if (PyDict_Check(source)) ret = dirs_fromdict(dirs, source, skipchar); else if (skipchar) PyErr_SetString(PyExc_ValueError, "skip character is only supported " "with a dict source"); else ret = dirs_fromiter(dirs, source); if (ret == -1) Py_XDECREF(dirs); else self->dict = dirs; return ret; } PyObject *dirs_addpath(dirsObject *self, PyObject *args) { PyObject *path; if (!PyArg_ParseTuple(args, "O!:addpath", &PyString_Type, &path)) return NULL; if (_addpath(self->dict, path) == -1) return NULL; Py_RETURN_NONE; } static PyObject *dirs_delpath(dirsObject *self, PyObject *args) { PyObject *path; if (!PyArg_ParseTuple(args, "O!:delpath", &PyString_Type, &path)) return NULL; if (_delpath(self->dict, path) == -1) return NULL; Py_RETURN_NONE; } static int dirs_contains(dirsObject *self, PyObject *value) { return PyString_Check(value) ? PyDict_Contains(self->dict, value) : 0; } static void dirs_dealloc(dirsObject *self) { Py_XDECREF(self->dict); PyObject_Del(self); } static PyObject *dirs_iter(dirsObject *self) { return PyObject_GetIter(self->dict); } static PySequenceMethods dirs_sequence_methods; static PyMethodDef dirs_methods[] = { {"addpath", (PyCFunction)dirs_addpath, METH_VARARGS, "add a path"}, {"delpath", (PyCFunction)dirs_delpath, METH_VARARGS, "remove a path"}, {NULL} /* Sentinel */ }; static PyTypeObject dirsType = { PyObject_HEAD_INIT(NULL) }; void dirs_module_init(PyObject *mod) { dirs_sequence_methods.sq_contains = (objobjproc)dirs_contains; dirsType.tp_name = "parsers.dirs"; dirsType.tp_new = PyType_GenericNew; dirsType.tp_basicsize = sizeof(dirsObject); dirsType.tp_dealloc = (destructor)dirs_dealloc; dirsType.tp_as_sequence = &dirs_sequence_methods; dirsType.tp_flags = Py_TPFLAGS_DEFAULT; dirsType.tp_doc = "dirs"; dirsType.tp_iter = (getiterfunc)dirs_iter; dirsType.tp_methods = dirs_methods; dirsType.tp_init = (initproc)dirs_init; if (PyType_Ready(&dirsType) < 0) return; Py_INCREF(&dirsType); PyModule_AddObject(mod, "dirs", (PyObject *)&dirsType); } mercurial-3.7.3/mercurial/parsers.c0000644000175000017500000021110112676531525016731 0ustar mpmmpm00000000000000/* parsers.c - efficient content parsing Copyright 2008 Matt Mackall and others This software may be used and distributed according to the terms of the GNU General Public License, incorporated herein by reference. */ #include #include #include #include #include "util.h" static char *versionerrortext = "Python minor version mismatch"; static int8_t hextable[256] = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, /* 0-9 */ -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* A-F */ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* a-f */ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1 }; static char lowertable[128] = { '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07', '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f', '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17', '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f', '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27', '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f', '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37', '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f', '\x40', '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67', /* A-G */ '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f', /* H-O */ '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77', /* P-W */ '\x78', '\x79', '\x7a', /* X-Z */ '\x5b', '\x5c', '\x5d', '\x5e', '\x5f', '\x60', '\x61', '\x62', '\x63', '\x64', '\x65', '\x66', '\x67', '\x68', '\x69', '\x6a', '\x6b', '\x6c', '\x6d', '\x6e', '\x6f', '\x70', '\x71', '\x72', '\x73', '\x74', '\x75', '\x76', '\x77', '\x78', '\x79', '\x7a', '\x7b', '\x7c', '\x7d', '\x7e', '\x7f' }; static char uppertable[128] = { '\x00', '\x01', '\x02', '\x03', '\x04', '\x05', '\x06', '\x07', '\x08', '\x09', '\x0a', '\x0b', '\x0c', '\x0d', '\x0e', '\x0f', '\x10', '\x11', '\x12', '\x13', '\x14', '\x15', '\x16', '\x17', '\x18', '\x19', '\x1a', '\x1b', '\x1c', '\x1d', '\x1e', '\x1f', '\x20', '\x21', '\x22', '\x23', '\x24', '\x25', '\x26', '\x27', '\x28', '\x29', '\x2a', '\x2b', '\x2c', '\x2d', '\x2e', '\x2f', '\x30', '\x31', '\x32', '\x33', '\x34', '\x35', '\x36', '\x37', '\x38', '\x39', '\x3a', '\x3b', '\x3c', '\x3d', '\x3e', '\x3f', '\x40', '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47', '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f', '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57', '\x58', '\x59', '\x5a', '\x5b', '\x5c', '\x5d', '\x5e', '\x5f', '\x60', '\x41', '\x42', '\x43', '\x44', '\x45', '\x46', '\x47', /* a-g */ '\x48', '\x49', '\x4a', '\x4b', '\x4c', '\x4d', '\x4e', '\x4f', /* h-o */ '\x50', '\x51', '\x52', '\x53', '\x54', '\x55', '\x56', '\x57', /* p-w */ '\x58', '\x59', '\x5a', /* x-z */ '\x7b', '\x7c', '\x7d', '\x7e', '\x7f' }; static inline int hexdigit(const char *p, Py_ssize_t off) { int8_t val = hextable[(unsigned char)p[off]]; if (val >= 0) { return val; } PyErr_SetString(PyExc_ValueError, "input contains non-hex character"); return 0; } /* * Turn a hex-encoded string into binary. */ PyObject *unhexlify(const char *str, int len) { PyObject *ret; char *d; int i; ret = PyBytes_FromStringAndSize(NULL, len / 2); if (!ret) return NULL; d = PyBytes_AsString(ret); for (i = 0; i < len;) { int hi = hexdigit(str, i++); int lo = hexdigit(str, i++); *d++ = (hi << 4) | lo; } return ret; } static inline PyObject *_asciitransform(PyObject *str_obj, const char table[128], PyObject *fallback_fn) { char *str, *newstr; Py_ssize_t i, len; PyObject *newobj = NULL; PyObject *ret = NULL; str = PyBytes_AS_STRING(str_obj); len = PyBytes_GET_SIZE(str_obj); newobj = PyBytes_FromStringAndSize(NULL, len); if (!newobj) goto quit; newstr = PyBytes_AS_STRING(newobj); for (i = 0; i < len; i++) { char c = str[i]; if (c & 0x80) { if (fallback_fn != NULL) { ret = PyObject_CallFunctionObjArgs(fallback_fn, str_obj, NULL); } else { PyObject *err = PyUnicodeDecodeError_Create( "ascii", str, len, i, (i + 1), "unexpected code byte"); PyErr_SetObject(PyExc_UnicodeDecodeError, err); Py_XDECREF(err); } goto quit; } newstr[i] = table[(unsigned char)c]; } ret = newobj; Py_INCREF(ret); quit: Py_XDECREF(newobj); return ret; } static PyObject *asciilower(PyObject *self, PyObject *args) { PyObject *str_obj; if (!PyArg_ParseTuple(args, "O!:asciilower", &PyBytes_Type, &str_obj)) return NULL; return _asciitransform(str_obj, lowertable, NULL); } static PyObject *asciiupper(PyObject *self, PyObject *args) { PyObject *str_obj; if (!PyArg_ParseTuple(args, "O!:asciiupper", &PyBytes_Type, &str_obj)) return NULL; return _asciitransform(str_obj, uppertable, NULL); } static inline PyObject *_dict_new_presized(Py_ssize_t expected_size) { /* _PyDict_NewPresized expects a minused parameter, but it actually creates a dictionary that's the nearest power of two bigger than the parameter. For example, with the initial minused = 1000, the dictionary created has size 1024. Of course in a lot of cases that can be greater than the maximum load factor Python's dict object expects (= 2/3), so as soon as we cross the threshold we'll resize anyway. So create a dictionary that's at least 3/2 the size. */ return _PyDict_NewPresized(((1 + expected_size) / 2) * 3); } static PyObject *dict_new_presized(PyObject *self, PyObject *args) { Py_ssize_t expected_size; if (!PyArg_ParseTuple(args, "n:make_presized_dict", &expected_size)) return NULL; return _dict_new_presized(expected_size); } static PyObject *make_file_foldmap(PyObject *self, PyObject *args) { PyObject *dmap, *spec_obj, *normcase_fallback; PyObject *file_foldmap = NULL; enum normcase_spec spec; PyObject *k, *v; dirstateTupleObject *tuple; Py_ssize_t pos = 0; const char *table; if (!PyArg_ParseTuple(args, "O!O!O!:make_file_foldmap", &PyDict_Type, &dmap, &PyInt_Type, &spec_obj, &PyFunction_Type, &normcase_fallback)) goto quit; spec = (int)PyInt_AS_LONG(spec_obj); switch (spec) { case NORMCASE_LOWER: table = lowertable; break; case NORMCASE_UPPER: table = uppertable; break; case NORMCASE_OTHER: table = NULL; break; default: PyErr_SetString(PyExc_TypeError, "invalid normcasespec"); goto quit; } /* Add some more entries to deal with additions outside this function. */ file_foldmap = _dict_new_presized((PyDict_Size(dmap) / 10) * 11); if (file_foldmap == NULL) goto quit; while (PyDict_Next(dmap, &pos, &k, &v)) { if (!dirstate_tuple_check(v)) { PyErr_SetString(PyExc_TypeError, "expected a dirstate tuple"); goto quit; } tuple = (dirstateTupleObject *)v; if (tuple->state != 'r') { PyObject *normed; if (table != NULL) { normed = _asciitransform(k, table, normcase_fallback); } else { normed = PyObject_CallFunctionObjArgs( normcase_fallback, k, NULL); } if (normed == NULL) goto quit; if (PyDict_SetItem(file_foldmap, normed, k) == -1) { Py_DECREF(normed); goto quit; } Py_DECREF(normed); } } return file_foldmap; quit: Py_XDECREF(file_foldmap); return NULL; } /* * This code assumes that a manifest is stitched together with newline * ('\n') characters. */ static PyObject *parse_manifest(PyObject *self, PyObject *args) { PyObject *mfdict, *fdict; char *str, *start, *end; int len; if (!PyArg_ParseTuple(args, "O!O!s#:parse_manifest", &PyDict_Type, &mfdict, &PyDict_Type, &fdict, &str, &len)) goto quit; start = str; end = str + len; while (start < end) { PyObject *file = NULL, *node = NULL; PyObject *flags = NULL; char *zero = NULL, *newline = NULL; ptrdiff_t nlen; zero = memchr(start, '\0', end - start); if (!zero) { PyErr_SetString(PyExc_ValueError, "manifest entry has no separator"); goto quit; } newline = memchr(zero + 1, '\n', end - (zero + 1)); if (!newline) { PyErr_SetString(PyExc_ValueError, "manifest contains trailing garbage"); goto quit; } file = PyBytes_FromStringAndSize(start, zero - start); if (!file) goto bail; nlen = newline - zero - 1; node = unhexlify(zero + 1, nlen > 40 ? 40 : (int)nlen); if (!node) goto bail; if (nlen > 40) { flags = PyBytes_FromStringAndSize(zero + 41, nlen - 40); if (!flags) goto bail; if (PyDict_SetItem(fdict, file, flags) == -1) goto bail; } if (PyDict_SetItem(mfdict, file, node) == -1) goto bail; start = newline + 1; Py_XDECREF(flags); Py_XDECREF(node); Py_XDECREF(file); continue; bail: Py_XDECREF(flags); Py_XDECREF(node); Py_XDECREF(file); goto quit; } Py_INCREF(Py_None); return Py_None; quit: return NULL; } static inline dirstateTupleObject *make_dirstate_tuple(char state, int mode, int size, int mtime) { dirstateTupleObject *t = PyObject_New(dirstateTupleObject, &dirstateTupleType); if (!t) return NULL; t->state = state; t->mode = mode; t->size = size; t->mtime = mtime; return t; } static PyObject *dirstate_tuple_new(PyTypeObject *subtype, PyObject *args, PyObject *kwds) { /* We do all the initialization here and not a tp_init function because * dirstate_tuple is immutable. */ dirstateTupleObject *t; char state; int size, mode, mtime; if (!PyArg_ParseTuple(args, "ciii", &state, &mode, &size, &mtime)) return NULL; t = (dirstateTupleObject *)subtype->tp_alloc(subtype, 1); if (!t) return NULL; t->state = state; t->mode = mode; t->size = size; t->mtime = mtime; return (PyObject *)t; } static void dirstate_tuple_dealloc(PyObject *o) { PyObject_Del(o); } static Py_ssize_t dirstate_tuple_length(PyObject *o) { return 4; } static PyObject *dirstate_tuple_item(PyObject *o, Py_ssize_t i) { dirstateTupleObject *t = (dirstateTupleObject *)o; switch (i) { case 0: return PyBytes_FromStringAndSize(&t->state, 1); case 1: return PyInt_FromLong(t->mode); case 2: return PyInt_FromLong(t->size); case 3: return PyInt_FromLong(t->mtime); default: PyErr_SetString(PyExc_IndexError, "index out of range"); return NULL; } } static PySequenceMethods dirstate_tuple_sq = { dirstate_tuple_length, /* sq_length */ 0, /* sq_concat */ 0, /* sq_repeat */ dirstate_tuple_item, /* sq_item */ 0, /* sq_ass_item */ 0, /* sq_contains */ 0, /* sq_inplace_concat */ 0 /* sq_inplace_repeat */ }; PyTypeObject dirstateTupleType = { PyVarObject_HEAD_INIT(NULL, 0) "dirstate_tuple", /* tp_name */ sizeof(dirstateTupleObject),/* tp_basicsize */ 0, /* tp_itemsize */ (destructor)dirstate_tuple_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ 0, /* tp_compare */ 0, /* tp_repr */ 0, /* tp_as_number */ &dirstate_tuple_sq, /* tp_as_sequence */ 0, /* tp_as_mapping */ 0, /* tp_hash */ 0, /* tp_call */ 0, /* tp_str */ 0, /* tp_getattro */ 0, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT, /* tp_flags */ "dirstate tuple", /* tp_doc */ 0, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ 0, /* tp_methods */ 0, /* tp_members */ 0, /* tp_getset */ 0, /* tp_base */ 0, /* tp_dict */ 0, /* tp_descr_get */ 0, /* tp_descr_set */ 0, /* tp_dictoffset */ 0, /* tp_init */ 0, /* tp_alloc */ dirstate_tuple_new, /* tp_new */ }; static PyObject *parse_dirstate(PyObject *self, PyObject *args) { PyObject *dmap, *cmap, *parents = NULL, *ret = NULL; PyObject *fname = NULL, *cname = NULL, *entry = NULL; char state, *cur, *str, *cpos; int mode, size, mtime; unsigned int flen, len, pos = 40; int readlen; if (!PyArg_ParseTuple(args, "O!O!s#:parse_dirstate", &PyDict_Type, &dmap, &PyDict_Type, &cmap, &str, &readlen)) goto quit; len = readlen; /* read parents */ if (len < 40) { PyErr_SetString( PyExc_ValueError, "too little data for parents"); goto quit; } parents = Py_BuildValue("s#s#", str, 20, str + 20, 20); if (!parents) goto quit; /* read filenames */ while (pos >= 40 && pos < len) { if (pos + 17 > len) { PyErr_SetString(PyExc_ValueError, "overflow in dirstate"); goto quit; } cur = str + pos; /* unpack header */ state = *cur; mode = getbe32(cur + 1); size = getbe32(cur + 5); mtime = getbe32(cur + 9); flen = getbe32(cur + 13); pos += 17; cur += 17; if (flen > len - pos) { PyErr_SetString(PyExc_ValueError, "overflow in dirstate"); goto quit; } entry = (PyObject *)make_dirstate_tuple(state, mode, size, mtime); cpos = memchr(cur, 0, flen); if (cpos) { fname = PyBytes_FromStringAndSize(cur, cpos - cur); cname = PyBytes_FromStringAndSize(cpos + 1, flen - (cpos - cur) - 1); if (!fname || !cname || PyDict_SetItem(cmap, fname, cname) == -1 || PyDict_SetItem(dmap, fname, entry) == -1) goto quit; Py_DECREF(cname); } else { fname = PyBytes_FromStringAndSize(cur, flen); if (!fname || PyDict_SetItem(dmap, fname, entry) == -1) goto quit; } Py_DECREF(fname); Py_DECREF(entry); fname = cname = entry = NULL; pos += flen; } ret = parents; Py_INCREF(ret); quit: Py_XDECREF(fname); Py_XDECREF(cname); Py_XDECREF(entry); Py_XDECREF(parents); return ret; } /* * Build a set of non-normal entries from the dirstate dmap */ static PyObject *nonnormalentries(PyObject *self, PyObject *args) { PyObject *dmap, *nonnset = NULL, *fname, *v; Py_ssize_t pos; if (!PyArg_ParseTuple(args, "O!:nonnormalentries", &PyDict_Type, &dmap)) goto bail; nonnset = PySet_New(NULL); if (nonnset == NULL) goto bail; pos = 0; while (PyDict_Next(dmap, &pos, &fname, &v)) { dirstateTupleObject *t; if (!dirstate_tuple_check(v)) { PyErr_SetString(PyExc_TypeError, "expected a dirstate tuple"); goto bail; } t = (dirstateTupleObject *)v; if (t->state == 'n' && t->mtime != -1) continue; if (PySet_Add(nonnset, fname) == -1) goto bail; } return nonnset; bail: Py_XDECREF(nonnset); return NULL; } /* * Efficiently pack a dirstate object into its on-disk format. */ static PyObject *pack_dirstate(PyObject *self, PyObject *args) { PyObject *packobj = NULL; PyObject *map, *copymap, *pl, *mtime_unset = NULL; Py_ssize_t nbytes, pos, l; PyObject *k, *v = NULL, *pn; char *p, *s; int now; if (!PyArg_ParseTuple(args, "O!O!Oi:pack_dirstate", &PyDict_Type, &map, &PyDict_Type, ©map, &pl, &now)) return NULL; if (!PySequence_Check(pl) || PySequence_Size(pl) != 2) { PyErr_SetString(PyExc_TypeError, "expected 2-element sequence"); return NULL; } /* Figure out how much we need to allocate. */ for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) { PyObject *c; if (!PyString_Check(k)) { PyErr_SetString(PyExc_TypeError, "expected string key"); goto bail; } nbytes += PyString_GET_SIZE(k) + 17; c = PyDict_GetItem(copymap, k); if (c) { if (!PyString_Check(c)) { PyErr_SetString(PyExc_TypeError, "expected string key"); goto bail; } nbytes += PyString_GET_SIZE(c) + 1; } } packobj = PyString_FromStringAndSize(NULL, nbytes); if (packobj == NULL) goto bail; p = PyString_AS_STRING(packobj); pn = PySequence_ITEM(pl, 0); if (PyString_AsStringAndSize(pn, &s, &l) == -1 || l != 20) { PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash"); goto bail; } memcpy(p, s, l); p += 20; pn = PySequence_ITEM(pl, 1); if (PyString_AsStringAndSize(pn, &s, &l) == -1 || l != 20) { PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash"); goto bail; } memcpy(p, s, l); p += 20; for (pos = 0; PyDict_Next(map, &pos, &k, &v); ) { dirstateTupleObject *tuple; char state; int mode, size, mtime; Py_ssize_t len, l; PyObject *o; char *t; if (!dirstate_tuple_check(v)) { PyErr_SetString(PyExc_TypeError, "expected a dirstate tuple"); goto bail; } tuple = (dirstateTupleObject *)v; state = tuple->state; mode = tuple->mode; size = tuple->size; mtime = tuple->mtime; if (state == 'n' && mtime == now) { /* See pure/parsers.py:pack_dirstate for why we do * this. */ mtime = -1; mtime_unset = (PyObject *)make_dirstate_tuple( state, mode, size, mtime); if (!mtime_unset) goto bail; if (PyDict_SetItem(map, k, mtime_unset) == -1) goto bail; Py_DECREF(mtime_unset); mtime_unset = NULL; } *p++ = state; putbe32((uint32_t)mode, p); putbe32((uint32_t)size, p + 4); putbe32((uint32_t)mtime, p + 8); t = p + 12; p += 16; len = PyString_GET_SIZE(k); memcpy(p, PyString_AS_STRING(k), len); p += len; o = PyDict_GetItem(copymap, k); if (o) { *p++ = '\0'; l = PyString_GET_SIZE(o); memcpy(p, PyString_AS_STRING(o), l); p += l; len += l + 1; } putbe32((uint32_t)len, t); } pos = p - PyString_AS_STRING(packobj); if (pos != nbytes) { PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld", (long)pos, (long)nbytes); goto bail; } return packobj; bail: Py_XDECREF(mtime_unset); Py_XDECREF(packobj); Py_XDECREF(v); return NULL; } /* * A base-16 trie for fast node->rev mapping. * * Positive value is index of the next node in the trie * Negative value is a leaf: -(rev + 1) * Zero is empty */ typedef struct { int children[16]; } nodetree; /* * This class has two behaviors. * * When used in a list-like way (with integer keys), we decode an * entry in a RevlogNG index file on demand. Our last entry is a * sentinel, always a nullid. We have limited support for * integer-keyed insert and delete, only at elements right before the * sentinel. * * With string keys, we lazily perform a reverse mapping from node to * rev, using a base-16 trie. */ typedef struct { PyObject_HEAD /* Type-specific fields go here. */ PyObject *data; /* raw bytes of index */ PyObject **cache; /* cached tuples */ const char **offsets; /* populated on demand */ Py_ssize_t raw_length; /* original number of elements */ Py_ssize_t length; /* current number of elements */ PyObject *added; /* populated on demand */ PyObject *headrevs; /* cache, invalidated on changes */ PyObject *filteredrevs;/* filtered revs set */ nodetree *nt; /* base-16 trie */ unsigned ntlength; /* # nodes in use */ unsigned ntcapacity; /* # nodes allocated */ int ntdepth; /* maximum depth of tree */ int ntsplits; /* # splits performed */ int ntrev; /* last rev scanned */ int ntlookups; /* # lookups */ int ntmisses; /* # lookups that miss the cache */ int inlined; } indexObject; static Py_ssize_t index_length(const indexObject *self) { if (self->added == NULL) return self->length; return self->length + PyList_GET_SIZE(self->added); } static PyObject *nullentry; static const char nullid[20]; static Py_ssize_t inline_scan(indexObject *self, const char **offsets); #if LONG_MAX == 0x7fffffffL static char *tuple_format = "Kiiiiiis#"; #else static char *tuple_format = "kiiiiiis#"; #endif /* A RevlogNG v1 index entry is 64 bytes long. */ static const long v1_hdrsize = 64; /* * Return a pointer to the beginning of a RevlogNG record. */ static const char *index_deref(indexObject *self, Py_ssize_t pos) { if (self->inlined && pos > 0) { if (self->offsets == NULL) { self->offsets = malloc(self->raw_length * sizeof(*self->offsets)); if (self->offsets == NULL) return (const char *)PyErr_NoMemory(); inline_scan(self, self->offsets); } return self->offsets[pos]; } return PyString_AS_STRING(self->data) + pos * v1_hdrsize; } static inline int index_get_parents(indexObject *self, Py_ssize_t rev, int *ps, int maxrev) { if (rev >= self->length - 1) { PyObject *tuple = PyList_GET_ITEM(self->added, rev - self->length + 1); ps[0] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 5)); ps[1] = (int)PyInt_AS_LONG(PyTuple_GET_ITEM(tuple, 6)); } else { const char *data = index_deref(self, rev); ps[0] = getbe32(data + 24); ps[1] = getbe32(data + 28); } /* If index file is corrupted, ps[] may point to invalid revisions. So * there is a risk of buffer overflow to trust them unconditionally. */ if (ps[0] > maxrev || ps[1] > maxrev) { PyErr_SetString(PyExc_ValueError, "parent out of range"); return -1; } return 0; } /* * RevlogNG format (all in big endian, data may be inlined): * 6 bytes: offset * 2 bytes: flags * 4 bytes: compressed length * 4 bytes: uncompressed length * 4 bytes: base revision * 4 bytes: link revision * 4 bytes: parent 1 revision * 4 bytes: parent 2 revision * 32 bytes: nodeid (only 20 bytes used) */ static PyObject *index_get(indexObject *self, Py_ssize_t pos) { uint64_t offset_flags; int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2; const char *c_node_id; const char *data; Py_ssize_t length = index_length(self); PyObject *entry; if (pos < 0) pos += length; if (pos < 0 || pos >= length) { PyErr_SetString(PyExc_IndexError, "revlog index out of range"); return NULL; } if (pos == length - 1) { Py_INCREF(nullentry); return nullentry; } if (pos >= self->length - 1) { PyObject *obj; obj = PyList_GET_ITEM(self->added, pos - self->length + 1); Py_INCREF(obj); return obj; } if (self->cache) { if (self->cache[pos]) { Py_INCREF(self->cache[pos]); return self->cache[pos]; } } else { self->cache = calloc(self->raw_length, sizeof(PyObject *)); if (self->cache == NULL) return PyErr_NoMemory(); } data = index_deref(self, pos); if (data == NULL) return NULL; offset_flags = getbe32(data + 4); if (pos == 0) /* mask out version number for the first entry */ offset_flags &= 0xFFFF; else { uint32_t offset_high = getbe32(data); offset_flags |= ((uint64_t)offset_high) << 32; } comp_len = getbe32(data + 8); uncomp_len = getbe32(data + 12); base_rev = getbe32(data + 16); link_rev = getbe32(data + 20); parent_1 = getbe32(data + 24); parent_2 = getbe32(data + 28); c_node_id = data + 32; entry = Py_BuildValue(tuple_format, offset_flags, comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2, c_node_id, 20); if (entry) { PyObject_GC_UnTrack(entry); Py_INCREF(entry); } self->cache[pos] = entry; return entry; } /* * Return the 20-byte SHA of the node corresponding to the given rev. */ static const char *index_node(indexObject *self, Py_ssize_t pos) { Py_ssize_t length = index_length(self); const char *data; if (pos == length - 1 || pos == INT_MAX) return nullid; if (pos >= length) return NULL; if (pos >= self->length - 1) { PyObject *tuple, *str; tuple = PyList_GET_ITEM(self->added, pos - self->length + 1); str = PyTuple_GetItem(tuple, 7); return str ? PyString_AS_STRING(str) : NULL; } data = index_deref(self, pos); return data ? data + 32 : NULL; } static int nt_insert(indexObject *self, const char *node, int rev); static int node_check(PyObject *obj, char **node, Py_ssize_t *nodelen) { if (PyString_AsStringAndSize(obj, node, nodelen) == -1) return -1; if (*nodelen == 20) return 0; PyErr_SetString(PyExc_ValueError, "20-byte hash required"); return -1; } static PyObject *index_insert(indexObject *self, PyObject *args) { PyObject *obj; char *node; int index; Py_ssize_t len, nodelen; if (!PyArg_ParseTuple(args, "iO", &index, &obj)) return NULL; if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) { PyErr_SetString(PyExc_TypeError, "8-tuple required"); return NULL; } if (node_check(PyTuple_GET_ITEM(obj, 7), &node, &nodelen) == -1) return NULL; len = index_length(self); if (index < 0) index += len; if (index != len - 1) { PyErr_SetString(PyExc_IndexError, "insert only supported at index -1"); return NULL; } if (self->added == NULL) { self->added = PyList_New(0); if (self->added == NULL) return NULL; } if (PyList_Append(self->added, obj) == -1) return NULL; if (self->nt) nt_insert(self, node, index); Py_CLEAR(self->headrevs); Py_RETURN_NONE; } static void _index_clearcaches(indexObject *self) { if (self->cache) { Py_ssize_t i; for (i = 0; i < self->raw_length; i++) Py_CLEAR(self->cache[i]); free(self->cache); self->cache = NULL; } if (self->offsets) { free(self->offsets); self->offsets = NULL; } if (self->nt) { free(self->nt); self->nt = NULL; } Py_CLEAR(self->headrevs); } static PyObject *index_clearcaches(indexObject *self) { _index_clearcaches(self); self->ntlength = self->ntcapacity = 0; self->ntdepth = self->ntsplits = 0; self->ntrev = -1; self->ntlookups = self->ntmisses = 0; Py_RETURN_NONE; } static PyObject *index_stats(indexObject *self) { PyObject *obj = PyDict_New(); PyObject *t = NULL; if (obj == NULL) return NULL; #define istat(__n, __d) \ t = PyInt_FromSsize_t(self->__n); \ if (!t) \ goto bail; \ if (PyDict_SetItemString(obj, __d, t) == -1) \ goto bail; \ Py_DECREF(t); if (self->added) { Py_ssize_t len = PyList_GET_SIZE(self->added); t = PyInt_FromSsize_t(len); if (!t) goto bail; if (PyDict_SetItemString(obj, "index entries added", t) == -1) goto bail; Py_DECREF(t); } if (self->raw_length != self->length - 1) istat(raw_length, "revs on disk"); istat(length, "revs in memory"); istat(ntcapacity, "node trie capacity"); istat(ntdepth, "node trie depth"); istat(ntlength, "node trie count"); istat(ntlookups, "node trie lookups"); istat(ntmisses, "node trie misses"); istat(ntrev, "node trie last rev scanned"); istat(ntsplits, "node trie splits"); #undef istat return obj; bail: Py_XDECREF(obj); Py_XDECREF(t); return NULL; } /* * When we cache a list, we want to be sure the caller can't mutate * the cached copy. */ static PyObject *list_copy(PyObject *list) { Py_ssize_t len = PyList_GET_SIZE(list); PyObject *newlist = PyList_New(len); Py_ssize_t i; if (newlist == NULL) return NULL; for (i = 0; i < len; i++) { PyObject *obj = PyList_GET_ITEM(list, i); Py_INCREF(obj); PyList_SET_ITEM(newlist, i, obj); } return newlist; } static int check_filter(PyObject *filter, Py_ssize_t arg) { if (filter) { PyObject *arglist, *result; int isfiltered; arglist = Py_BuildValue("(n)", arg); if (!arglist) { return -1; } result = PyEval_CallObject(filter, arglist); Py_DECREF(arglist); if (!result) { return -1; } /* PyObject_IsTrue returns 1 if true, 0 if false, -1 if error, * same as this function, so we can just return it directly.*/ isfiltered = PyObject_IsTrue(result); Py_DECREF(result); return isfiltered; } else { return 0; } } static Py_ssize_t add_roots_get_min(indexObject *self, PyObject *list, Py_ssize_t marker, char *phases) { PyObject *iter = NULL; PyObject *iter_item = NULL; Py_ssize_t min_idx = index_length(self) + 1; long iter_item_long; if (PyList_GET_SIZE(list) != 0) { iter = PyObject_GetIter(list); if (iter == NULL) return -2; while ((iter_item = PyIter_Next(iter))) { iter_item_long = PyInt_AS_LONG(iter_item); Py_DECREF(iter_item); if (iter_item_long < min_idx) min_idx = iter_item_long; phases[iter_item_long] = marker; } Py_DECREF(iter); } return min_idx; } static inline void set_phase_from_parents(char *phases, int parent_1, int parent_2, Py_ssize_t i) { if (parent_1 >= 0 && phases[parent_1] > phases[i]) phases[i] = phases[parent_1]; if (parent_2 >= 0 && phases[parent_2] > phases[i]) phases[i] = phases[parent_2]; } static PyObject *reachableroots2(indexObject *self, PyObject *args) { /* Input */ long minroot; PyObject *includepatharg = NULL; int includepath = 0; /* heads and roots are lists */ PyObject *heads = NULL; PyObject *roots = NULL; PyObject *reachable = NULL; PyObject *val; Py_ssize_t len = index_length(self) - 1; long revnum; Py_ssize_t k; Py_ssize_t i; Py_ssize_t l; int r; int parents[2]; /* Internal data structure: * tovisit: array of length len+1 (all revs + nullrev), filled upto lentovisit * revstates: array of length len+1 (all revs + nullrev) */ int *tovisit = NULL; long lentovisit = 0; enum { RS_SEEN = 1, RS_ROOT = 2, RS_REACHABLE = 4 }; char *revstates = NULL; /* Get arguments */ if (!PyArg_ParseTuple(args, "lO!O!O!", &minroot, &PyList_Type, &heads, &PyList_Type, &roots, &PyBool_Type, &includepatharg)) goto bail; if (includepatharg == Py_True) includepath = 1; /* Initialize return set */ reachable = PyList_New(0); if (reachable == NULL) goto bail; /* Initialize internal datastructures */ tovisit = (int *)malloc((len + 1) * sizeof(int)); if (tovisit == NULL) { PyErr_NoMemory(); goto bail; } revstates = (char *)calloc(len + 1, 1); if (revstates == NULL) { PyErr_NoMemory(); goto bail; } l = PyList_GET_SIZE(roots); for (i = 0; i < l; i++) { revnum = PyInt_AsLong(PyList_GET_ITEM(roots, i)); if (revnum == -1 && PyErr_Occurred()) goto bail; /* If root is out of range, e.g. wdir(), it must be unreachable * from heads. So we can just ignore it. */ if (revnum + 1 < 0 || revnum + 1 >= len + 1) continue; revstates[revnum + 1] |= RS_ROOT; } /* Populate tovisit with all the heads */ l = PyList_GET_SIZE(heads); for (i = 0; i < l; i++) { revnum = PyInt_AsLong(PyList_GET_ITEM(heads, i)); if (revnum == -1 && PyErr_Occurred()) goto bail; if (revnum + 1 < 0 || revnum + 1 >= len + 1) { PyErr_SetString(PyExc_IndexError, "head out of range"); goto bail; } if (!(revstates[revnum + 1] & RS_SEEN)) { tovisit[lentovisit++] = (int)revnum; revstates[revnum + 1] |= RS_SEEN; } } /* Visit the tovisit list and find the reachable roots */ k = 0; while (k < lentovisit) { /* Add the node to reachable if it is a root*/ revnum = tovisit[k++]; if (revstates[revnum + 1] & RS_ROOT) { revstates[revnum + 1] |= RS_REACHABLE; val = PyInt_FromLong(revnum); if (val == NULL) goto bail; r = PyList_Append(reachable, val); Py_DECREF(val); if (r < 0) goto bail; if (includepath == 0) continue; } /* Add its parents to the list of nodes to visit */ if (revnum == -1) continue; r = index_get_parents(self, revnum, parents, (int)len - 1); if (r < 0) goto bail; for (i = 0; i < 2; i++) { if (!(revstates[parents[i] + 1] & RS_SEEN) && parents[i] >= minroot) { tovisit[lentovisit++] = parents[i]; revstates[parents[i] + 1] |= RS_SEEN; } } } /* Find all the nodes in between the roots we found and the heads * and add them to the reachable set */ if (includepath == 1) { long minidx = minroot; if (minidx < 0) minidx = 0; for (i = minidx; i < len; i++) { if (!(revstates[i + 1] & RS_SEEN)) continue; r = index_get_parents(self, i, parents, (int)len - 1); /* Corrupted index file, error is set from * index_get_parents */ if (r < 0) goto bail; if (((revstates[parents[0] + 1] | revstates[parents[1] + 1]) & RS_REACHABLE) && !(revstates[i + 1] & RS_REACHABLE)) { revstates[i + 1] |= RS_REACHABLE; val = PyInt_FromLong(i); if (val == NULL) goto bail; r = PyList_Append(reachable, val); Py_DECREF(val); if (r < 0) goto bail; } } } free(revstates); free(tovisit); return reachable; bail: Py_XDECREF(reachable); free(revstates); free(tovisit); return NULL; } static PyObject *compute_phases_map_sets(indexObject *self, PyObject *args) { PyObject *roots = Py_None; PyObject *ret = NULL; PyObject *phaseslist = NULL; PyObject *phaseroots = NULL; PyObject *phaseset = NULL; PyObject *phasessetlist = NULL; PyObject *rev = NULL; Py_ssize_t len = index_length(self) - 1; Py_ssize_t numphase = 0; Py_ssize_t minrevallphases = 0; Py_ssize_t minrevphase = 0; Py_ssize_t i = 0; char *phases = NULL; long phase; if (!PyArg_ParseTuple(args, "O", &roots)) goto done; if (roots == NULL || !PyList_Check(roots)) goto done; phases = calloc(len, 1); /* phase per rev: {0: public, 1: draft, 2: secret} */ if (phases == NULL) { PyErr_NoMemory(); goto done; } /* Put the phase information of all the roots in phases */ numphase = PyList_GET_SIZE(roots)+1; minrevallphases = len + 1; phasessetlist = PyList_New(numphase); if (phasessetlist == NULL) goto done; PyList_SET_ITEM(phasessetlist, 0, Py_None); Py_INCREF(Py_None); for (i = 0; i < numphase-1; i++) { phaseroots = PyList_GET_ITEM(roots, i); phaseset = PySet_New(NULL); if (phaseset == NULL) goto release; PyList_SET_ITEM(phasessetlist, i+1, phaseset); if (!PyList_Check(phaseroots)) goto release; minrevphase = add_roots_get_min(self, phaseroots, i+1, phases); if (minrevphase == -2) /* Error from add_roots_get_min */ goto release; minrevallphases = MIN(minrevallphases, minrevphase); } /* Propagate the phase information from the roots to the revs */ if (minrevallphases != -1) { int parents[2]; for (i = minrevallphases; i < len; i++) { if (index_get_parents(self, i, parents, (int)len - 1) < 0) goto release; set_phase_from_parents(phases, parents[0], parents[1], i); } } /* Transform phase list to a python list */ phaseslist = PyList_New(len); if (phaseslist == NULL) goto release; for (i = 0; i < len; i++) { PyObject *phaseval; phase = phases[i]; /* We only store the sets of phase for non public phase, the public phase * is computed as a difference */ if (phase != 0) { phaseset = PyList_GET_ITEM(phasessetlist, phase); rev = PyInt_FromLong(i); if (rev == NULL) goto release; PySet_Add(phaseset, rev); Py_XDECREF(rev); } phaseval = PyInt_FromLong(phase); if (phaseval == NULL) goto release; PyList_SET_ITEM(phaseslist, i, phaseval); } ret = PyTuple_Pack(2, phaseslist, phasessetlist); release: Py_XDECREF(phaseslist); Py_XDECREF(phasessetlist); done: free(phases); return ret; } static PyObject *index_headrevs(indexObject *self, PyObject *args) { Py_ssize_t i, j, len; char *nothead = NULL; PyObject *heads = NULL; PyObject *filter = NULL; PyObject *filteredrevs = Py_None; if (!PyArg_ParseTuple(args, "|O", &filteredrevs)) { return NULL; } if (self->headrevs && filteredrevs == self->filteredrevs) return list_copy(self->headrevs); Py_DECREF(self->filteredrevs); self->filteredrevs = filteredrevs; Py_INCREF(filteredrevs); if (filteredrevs != Py_None) { filter = PyObject_GetAttrString(filteredrevs, "__contains__"); if (!filter) { PyErr_SetString(PyExc_TypeError, "filteredrevs has no attribute __contains__"); goto bail; } } len = index_length(self) - 1; heads = PyList_New(0); if (heads == NULL) goto bail; if (len == 0) { PyObject *nullid = PyInt_FromLong(-1); if (nullid == NULL || PyList_Append(heads, nullid) == -1) { Py_XDECREF(nullid); goto bail; } goto done; } nothead = calloc(len, 1); if (nothead == NULL) { PyErr_NoMemory(); goto bail; } for (i = 0; i < len; i++) { int isfiltered; int parents[2]; isfiltered = check_filter(filter, i); if (isfiltered == -1) { PyErr_SetString(PyExc_TypeError, "unable to check filter"); goto bail; } if (isfiltered) { nothead[i] = 1; continue; } if (index_get_parents(self, i, parents, (int)len - 1) < 0) goto bail; for (j = 0; j < 2; j++) { if (parents[j] >= 0) nothead[parents[j]] = 1; } } for (i = 0; i < len; i++) { PyObject *head; if (nothead[i]) continue; head = PyInt_FromSsize_t(i); if (head == NULL || PyList_Append(heads, head) == -1) { Py_XDECREF(head); goto bail; } } done: self->headrevs = heads; Py_XDECREF(filter); free(nothead); return list_copy(self->headrevs); bail: Py_XDECREF(filter); Py_XDECREF(heads); free(nothead); return NULL; } static inline int nt_level(const char *node, Py_ssize_t level) { int v = node[level>>1]; if (!(level & 1)) v >>= 4; return v & 0xf; } /* * Return values: * * -4: match is ambiguous (multiple candidates) * -2: not found * rest: valid rev */ static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen, int hex) { int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level; int level, maxlevel, off; if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0) return -1; if (self->nt == NULL) return -2; if (hex) maxlevel = nodelen > 40 ? 40 : (int)nodelen; else maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2); for (level = off = 0; level < maxlevel; level++) { int k = getnybble(node, level); nodetree *n = &self->nt[off]; int v = n->children[k]; if (v < 0) { const char *n; Py_ssize_t i; v = -(v + 1); n = index_node(self, v); if (n == NULL) return -2; for (i = level; i < maxlevel; i++) if (getnybble(node, i) != nt_level(n, i)) return -2; return v; } if (v == 0) return -2; off = v; } /* multiple matches against an ambiguous prefix */ return -4; } static int nt_new(indexObject *self) { if (self->ntlength == self->ntcapacity) { if (self->ntcapacity >= INT_MAX / (sizeof(nodetree) * 2)) { PyErr_SetString(PyExc_MemoryError, "overflow in nt_new"); return -1; } self->ntcapacity *= 2; self->nt = realloc(self->nt, self->ntcapacity * sizeof(nodetree)); if (self->nt == NULL) { PyErr_SetString(PyExc_MemoryError, "out of memory"); return -1; } memset(&self->nt[self->ntlength], 0, sizeof(nodetree) * (self->ntcapacity - self->ntlength)); } return self->ntlength++; } static int nt_insert(indexObject *self, const char *node, int rev) { int level = 0; int off = 0; while (level < 40) { int k = nt_level(node, level); nodetree *n; int v; n = &self->nt[off]; v = n->children[k]; if (v == 0) { n->children[k] = -rev - 1; return 0; } if (v < 0) { const char *oldnode = index_node(self, -(v + 1)); int noff; if (!oldnode || !memcmp(oldnode, node, 20)) { n->children[k] = -rev - 1; return 0; } noff = nt_new(self); if (noff == -1) return -1; /* self->nt may have been changed by realloc */ self->nt[off].children[k] = noff; off = noff; n = &self->nt[off]; n->children[nt_level(oldnode, ++level)] = v; if (level > self->ntdepth) self->ntdepth = level; self->ntsplits += 1; } else { level += 1; off = v; } } return -1; } static int nt_init(indexObject *self) { if (self->nt == NULL) { if ((size_t)self->raw_length > INT_MAX / sizeof(nodetree)) { PyErr_SetString(PyExc_ValueError, "overflow in nt_init"); return -1; } self->ntcapacity = self->raw_length < 4 ? 4 : (int)self->raw_length / 2; self->nt = calloc(self->ntcapacity, sizeof(nodetree)); if (self->nt == NULL) { PyErr_NoMemory(); return -1; } self->ntlength = 1; self->ntrev = (int)index_length(self) - 1; self->ntlookups = 1; self->ntmisses = 0; if (nt_insert(self, nullid, INT_MAX) == -1) return -1; } return 0; } /* * Return values: * * -3: error (exception set) * -2: not found (no exception set) * rest: valid rev */ static int index_find_node(indexObject *self, const char *node, Py_ssize_t nodelen) { int rev; self->ntlookups++; rev = nt_find(self, node, nodelen, 0); if (rev >= -1) return rev; if (nt_init(self) == -1) return -3; /* * For the first handful of lookups, we scan the entire index, * and cache only the matching nodes. This optimizes for cases * like "hg tip", where only a few nodes are accessed. * * After that, we cache every node we visit, using a single * scan amortized over multiple lookups. This gives the best * bulk performance, e.g. for "hg log". */ if (self->ntmisses++ < 4) { for (rev = self->ntrev - 1; rev >= 0; rev--) { const char *n = index_node(self, rev); if (n == NULL) return -2; if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) { if (nt_insert(self, n, rev) == -1) return -3; break; } } } else { for (rev = self->ntrev - 1; rev >= 0; rev--) { const char *n = index_node(self, rev); if (n == NULL) { self->ntrev = rev + 1; return -2; } if (nt_insert(self, n, rev) == -1) { self->ntrev = rev + 1; return -3; } if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) { break; } } self->ntrev = rev; } if (rev >= 0) return rev; return -2; } static void raise_revlog_error(void) { PyObject *mod = NULL, *dict = NULL, *errclass = NULL; mod = PyImport_ImportModule("mercurial.error"); if (mod == NULL) { goto cleanup; } dict = PyModule_GetDict(mod); if (dict == NULL) { goto cleanup; } Py_INCREF(dict); errclass = PyDict_GetItemString(dict, "RevlogError"); if (errclass == NULL) { PyErr_SetString(PyExc_SystemError, "could not find RevlogError"); goto cleanup; } /* value of exception is ignored by callers */ PyErr_SetString(errclass, "RevlogError"); cleanup: Py_XDECREF(dict); Py_XDECREF(mod); } static PyObject *index_getitem(indexObject *self, PyObject *value) { char *node; Py_ssize_t nodelen; int rev; if (PyInt_Check(value)) return index_get(self, PyInt_AS_LONG(value)); if (node_check(value, &node, &nodelen) == -1) return NULL; rev = index_find_node(self, node, nodelen); if (rev >= -1) return PyInt_FromLong(rev); if (rev == -2) raise_revlog_error(); return NULL; } static int nt_partialmatch(indexObject *self, const char *node, Py_ssize_t nodelen) { int rev; if (nt_init(self) == -1) return -3; if (self->ntrev > 0) { /* ensure that the radix tree is fully populated */ for (rev = self->ntrev - 1; rev >= 0; rev--) { const char *n = index_node(self, rev); if (n == NULL) return -2; if (nt_insert(self, n, rev) == -1) return -3; } self->ntrev = rev; } return nt_find(self, node, nodelen, 1); } static PyObject *index_partialmatch(indexObject *self, PyObject *args) { const char *fullnode; int nodelen; char *node; int rev, i; if (!PyArg_ParseTuple(args, "s#", &node, &nodelen)) return NULL; if (nodelen < 4) { PyErr_SetString(PyExc_ValueError, "key too short"); return NULL; } if (nodelen > 40) { PyErr_SetString(PyExc_ValueError, "key too long"); return NULL; } for (i = 0; i < nodelen; i++) hexdigit(node, i); if (PyErr_Occurred()) { /* input contains non-hex characters */ PyErr_Clear(); Py_RETURN_NONE; } rev = nt_partialmatch(self, node, nodelen); switch (rev) { case -4: raise_revlog_error(); case -3: return NULL; case -2: Py_RETURN_NONE; case -1: return PyString_FromStringAndSize(nullid, 20); } fullnode = index_node(self, rev); if (fullnode == NULL) { PyErr_Format(PyExc_IndexError, "could not access rev %d", rev); return NULL; } return PyString_FromStringAndSize(fullnode, 20); } static PyObject *index_m_get(indexObject *self, PyObject *args) { Py_ssize_t nodelen; PyObject *val; char *node; int rev; if (!PyArg_ParseTuple(args, "O", &val)) return NULL; if (node_check(val, &node, &nodelen) == -1) return NULL; rev = index_find_node(self, node, nodelen); if (rev == -3) return NULL; if (rev == -2) Py_RETURN_NONE; return PyInt_FromLong(rev); } static int index_contains(indexObject *self, PyObject *value) { char *node; Py_ssize_t nodelen; if (PyInt_Check(value)) { long rev = PyInt_AS_LONG(value); return rev >= -1 && rev < index_length(self); } if (node_check(value, &node, &nodelen) == -1) return -1; switch (index_find_node(self, node, nodelen)) { case -3: return -1; case -2: return 0; default: return 1; } } typedef uint64_t bitmask; /* * Given a disjoint set of revs, return all candidates for the * greatest common ancestor. In revset notation, this is the set * "heads(::a and ::b and ...)" */ static PyObject *find_gca_candidates(indexObject *self, const int *revs, int revcount) { const bitmask allseen = (1ull << revcount) - 1; const bitmask poison = 1ull << revcount; PyObject *gca = PyList_New(0); int i, v, interesting; int maxrev = -1; bitmask sp; bitmask *seen; if (gca == NULL) return PyErr_NoMemory(); for (i = 0; i < revcount; i++) { if (revs[i] > maxrev) maxrev = revs[i]; } seen = calloc(sizeof(*seen), maxrev + 1); if (seen == NULL) { Py_DECREF(gca); return PyErr_NoMemory(); } for (i = 0; i < revcount; i++) seen[revs[i]] = 1ull << i; interesting = revcount; for (v = maxrev; v >= 0 && interesting; v--) { bitmask sv = seen[v]; int parents[2]; if (!sv) continue; if (sv < poison) { interesting -= 1; if (sv == allseen) { PyObject *obj = PyInt_FromLong(v); if (obj == NULL) goto bail; if (PyList_Append(gca, obj) == -1) { Py_DECREF(obj); goto bail; } sv |= poison; for (i = 0; i < revcount; i++) { if (revs[i] == v) goto done; } } } if (index_get_parents(self, v, parents, maxrev) < 0) goto bail; for (i = 0; i < 2; i++) { int p = parents[i]; if (p == -1) continue; sp = seen[p]; if (sv < poison) { if (sp == 0) { seen[p] = sv; interesting++; } else if (sp != sv) seen[p] |= sv; } else { if (sp && sp < poison) interesting--; seen[p] = sv; } } } done: free(seen); return gca; bail: free(seen); Py_XDECREF(gca); return NULL; } /* * Given a disjoint set of revs, return the subset with the longest * path to the root. */ static PyObject *find_deepest(indexObject *self, PyObject *revs) { const Py_ssize_t revcount = PyList_GET_SIZE(revs); static const Py_ssize_t capacity = 24; int *depth, *interesting = NULL; int i, j, v, ninteresting; PyObject *dict = NULL, *keys = NULL; long *seen = NULL; int maxrev = -1; long final; if (revcount > capacity) { PyErr_Format(PyExc_OverflowError, "bitset size (%ld) > capacity (%ld)", (long)revcount, (long)capacity); return NULL; } for (i = 0; i < revcount; i++) { int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i)); if (n > maxrev) maxrev = n; } depth = calloc(sizeof(*depth), maxrev + 1); if (depth == NULL) return PyErr_NoMemory(); seen = calloc(sizeof(*seen), maxrev + 1); if (seen == NULL) { PyErr_NoMemory(); goto bail; } interesting = calloc(sizeof(*interesting), 2 << revcount); if (interesting == NULL) { PyErr_NoMemory(); goto bail; } if (PyList_Sort(revs) == -1) goto bail; for (i = 0; i < revcount; i++) { int n = (int)PyInt_AsLong(PyList_GET_ITEM(revs, i)); long b = 1l << i; depth[n] = 1; seen[n] = b; interesting[b] = 1; } ninteresting = (int)revcount; for (v = maxrev; v >= 0 && ninteresting > 1; v--) { int dv = depth[v]; int parents[2]; long sv; if (dv == 0) continue; sv = seen[v]; if (index_get_parents(self, v, parents, maxrev) < 0) goto bail; for (i = 0; i < 2; i++) { int p = parents[i]; long sp; int dp; if (p == -1) continue; dp = depth[p]; sp = seen[p]; if (dp <= dv) { depth[p] = dv + 1; if (sp != sv) { interesting[sv] += 1; seen[p] = sv; if (sp) { interesting[sp] -= 1; if (interesting[sp] == 0) ninteresting -= 1; } } } else if (dv == dp - 1) { long nsp = sp | sv; if (nsp == sp) continue; seen[p] = nsp; interesting[sp] -= 1; if (interesting[sp] == 0 && interesting[nsp] > 0) ninteresting -= 1; interesting[nsp] += 1; } } interesting[sv] -= 1; if (interesting[sv] == 0) ninteresting -= 1; } final = 0; j = ninteresting; for (i = 0; i < (int)(2 << revcount) && j > 0; i++) { if (interesting[i] == 0) continue; final |= i; j -= 1; } if (final == 0) { keys = PyList_New(0); goto bail; } dict = PyDict_New(); if (dict == NULL) goto bail; for (i = 0; i < revcount; i++) { PyObject *key; if ((final & (1 << i)) == 0) continue; key = PyList_GET_ITEM(revs, i); Py_INCREF(key); Py_INCREF(Py_None); if (PyDict_SetItem(dict, key, Py_None) == -1) { Py_DECREF(key); Py_DECREF(Py_None); goto bail; } } keys = PyDict_Keys(dict); bail: free(depth); free(seen); free(interesting); Py_XDECREF(dict); return keys; } /* * Given a (possibly overlapping) set of revs, return all the * common ancestors heads: heads(::args[0] and ::a[1] and ...) */ static PyObject *index_commonancestorsheads(indexObject *self, PyObject *args) { PyObject *ret = NULL; Py_ssize_t argcount, i, len; bitmask repeat = 0; int revcount = 0; int *revs; argcount = PySequence_Length(args); revs = malloc(argcount * sizeof(*revs)); if (argcount > 0 && revs == NULL) return PyErr_NoMemory(); len = index_length(self) - 1; for (i = 0; i < argcount; i++) { static const int capacity = 24; PyObject *obj = PySequence_GetItem(args, i); bitmask x; long val; if (!PyInt_Check(obj)) { PyErr_SetString(PyExc_TypeError, "arguments must all be ints"); Py_DECREF(obj); goto bail; } val = PyInt_AsLong(obj); Py_DECREF(obj); if (val == -1) { ret = PyList_New(0); goto done; } if (val < 0 || val >= len) { PyErr_SetString(PyExc_IndexError, "index out of range"); goto bail; } /* this cheesy bloom filter lets us avoid some more * expensive duplicate checks in the common set-is-disjoint * case */ x = 1ull << (val & 0x3f); if (repeat & x) { int k; for (k = 0; k < revcount; k++) { if (val == revs[k]) goto duplicate; } } else repeat |= x; if (revcount >= capacity) { PyErr_Format(PyExc_OverflowError, "bitset size (%d) > capacity (%d)", revcount, capacity); goto bail; } revs[revcount++] = (int)val; duplicate:; } if (revcount == 0) { ret = PyList_New(0); goto done; } if (revcount == 1) { PyObject *obj; ret = PyList_New(1); if (ret == NULL) goto bail; obj = PyInt_FromLong(revs[0]); if (obj == NULL) goto bail; PyList_SET_ITEM(ret, 0, obj); goto done; } ret = find_gca_candidates(self, revs, revcount); if (ret == NULL) goto bail; done: free(revs); return ret; bail: free(revs); Py_XDECREF(ret); return NULL; } /* * Given a (possibly overlapping) set of revs, return the greatest * common ancestors: those with the longest path to the root. */ static PyObject *index_ancestors(indexObject *self, PyObject *args) { PyObject *ret; PyObject *gca = index_commonancestorsheads(self, args); if (gca == NULL) return NULL; if (PyList_GET_SIZE(gca) <= 1) { return gca; } ret = find_deepest(self, gca); Py_DECREF(gca); return ret; } /* * Invalidate any trie entries introduced by added revs. */ static void nt_invalidate_added(indexObject *self, Py_ssize_t start) { Py_ssize_t i, len = PyList_GET_SIZE(self->added); for (i = start; i < len; i++) { PyObject *tuple = PyList_GET_ITEM(self->added, i); PyObject *node = PyTuple_GET_ITEM(tuple, 7); nt_insert(self, PyString_AS_STRING(node), -1); } if (start == 0) Py_CLEAR(self->added); } /* * Delete a numeric range of revs, which must be at the end of the * range, but exclude the sentinel nullid entry. */ static int index_slice_del(indexObject *self, PyObject *item) { Py_ssize_t start, stop, step, slicelength; Py_ssize_t length = index_length(self); int ret = 0; if (PySlice_GetIndicesEx((PySliceObject*)item, length, &start, &stop, &step, &slicelength) < 0) return -1; if (slicelength <= 0) return 0; if ((step < 0 && start < stop) || (step > 0 && start > stop)) stop = start; if (step < 0) { stop = start + 1; start = stop + step*(slicelength - 1) - 1; step = -step; } if (step != 1) { PyErr_SetString(PyExc_ValueError, "revlog index delete requires step size of 1"); return -1; } if (stop != length - 1) { PyErr_SetString(PyExc_IndexError, "revlog index deletion indices are invalid"); return -1; } if (start < self->length - 1) { if (self->nt) { Py_ssize_t i; for (i = start + 1; i < self->length - 1; i++) { const char *node = index_node(self, i); if (node) nt_insert(self, node, -1); } if (self->added) nt_invalidate_added(self, 0); if (self->ntrev > start) self->ntrev = (int)start; } self->length = start + 1; if (start < self->raw_length) { if (self->cache) { Py_ssize_t i; for (i = start; i < self->raw_length; i++) Py_CLEAR(self->cache[i]); } self->raw_length = start; } goto done; } if (self->nt) { nt_invalidate_added(self, start - self->length + 1); if (self->ntrev > start) self->ntrev = (int)start; } if (self->added) ret = PyList_SetSlice(self->added, start - self->length + 1, PyList_GET_SIZE(self->added), NULL); done: Py_CLEAR(self->headrevs); return ret; } /* * Supported ops: * * slice deletion * string assignment (extend node->rev mapping) * string deletion (shrink node->rev mapping) */ static int index_assign_subscript(indexObject *self, PyObject *item, PyObject *value) { char *node; Py_ssize_t nodelen; long rev; if (PySlice_Check(item) && value == NULL) return index_slice_del(self, item); if (node_check(item, &node, &nodelen) == -1) return -1; if (value == NULL) return self->nt ? nt_insert(self, node, -1) : 0; rev = PyInt_AsLong(value); if (rev > INT_MAX || rev < 0) { if (!PyErr_Occurred()) PyErr_SetString(PyExc_ValueError, "rev out of range"); return -1; } if (nt_init(self) == -1) return -1; return nt_insert(self, node, (int)rev); } /* * Find all RevlogNG entries in an index that has inline data. Update * the optional "offsets" table with those entries. */ static Py_ssize_t inline_scan(indexObject *self, const char **offsets) { const char *data = PyString_AS_STRING(self->data); Py_ssize_t pos = 0; Py_ssize_t end = PyString_GET_SIZE(self->data); long incr = v1_hdrsize; Py_ssize_t len = 0; while (pos + v1_hdrsize <= end && pos >= 0) { uint32_t comp_len; /* 3rd element of header is length of compressed inline data */ comp_len = getbe32(data + pos + 8); incr = v1_hdrsize + comp_len; if (offsets) offsets[len] = data + pos; len++; pos += incr; } if (pos != end) { if (!PyErr_Occurred()) PyErr_SetString(PyExc_ValueError, "corrupt index file"); return -1; } return len; } static int index_init(indexObject *self, PyObject *args) { PyObject *data_obj, *inlined_obj; Py_ssize_t size; /* Initialize before argument-checking to avoid index_dealloc() crash. */ self->raw_length = 0; self->added = NULL; self->cache = NULL; self->data = NULL; self->headrevs = NULL; self->filteredrevs = Py_None; Py_INCREF(Py_None); self->nt = NULL; self->offsets = NULL; if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj)) return -1; if (!PyString_Check(data_obj)) { PyErr_SetString(PyExc_TypeError, "data is not a string"); return -1; } size = PyString_GET_SIZE(data_obj); self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj); self->data = data_obj; self->ntlength = self->ntcapacity = 0; self->ntdepth = self->ntsplits = 0; self->ntlookups = self->ntmisses = 0; self->ntrev = -1; Py_INCREF(self->data); if (self->inlined) { Py_ssize_t len = inline_scan(self, NULL); if (len == -1) goto bail; self->raw_length = len; self->length = len + 1; } else { if (size % v1_hdrsize) { PyErr_SetString(PyExc_ValueError, "corrupt index file"); goto bail; } self->raw_length = size / v1_hdrsize; self->length = self->raw_length + 1; } return 0; bail: return -1; } static PyObject *index_nodemap(indexObject *self) { Py_INCREF(self); return (PyObject *)self; } static void index_dealloc(indexObject *self) { _index_clearcaches(self); Py_XDECREF(self->filteredrevs); Py_XDECREF(self->data); Py_XDECREF(self->added); PyObject_Del(self); } static PySequenceMethods index_sequence_methods = { (lenfunc)index_length, /* sq_length */ 0, /* sq_concat */ 0, /* sq_repeat */ (ssizeargfunc)index_get, /* sq_item */ 0, /* sq_slice */ 0, /* sq_ass_item */ 0, /* sq_ass_slice */ (objobjproc)index_contains, /* sq_contains */ }; static PyMappingMethods index_mapping_methods = { (lenfunc)index_length, /* mp_length */ (binaryfunc)index_getitem, /* mp_subscript */ (objobjargproc)index_assign_subscript, /* mp_ass_subscript */ }; static PyMethodDef index_methods[] = { {"ancestors", (PyCFunction)index_ancestors, METH_VARARGS, "return the gca set of the given revs"}, {"commonancestorsheads", (PyCFunction)index_commonancestorsheads, METH_VARARGS, "return the heads of the common ancestors of the given revs"}, {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS, "clear the index caches"}, {"get", (PyCFunction)index_m_get, METH_VARARGS, "get an index entry"}, {"computephasesmapsets", (PyCFunction)compute_phases_map_sets, METH_VARARGS, "compute phases"}, {"reachableroots2", (PyCFunction)reachableroots2, METH_VARARGS, "reachableroots"}, {"headrevs", (PyCFunction)index_headrevs, METH_VARARGS, "get head revisions"}, /* Can do filtering since 3.2 */ {"headrevsfiltered", (PyCFunction)index_headrevs, METH_VARARGS, "get filtered head revisions"}, /* Can always do filtering */ {"insert", (PyCFunction)index_insert, METH_VARARGS, "insert an index entry"}, {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS, "match a potentially ambiguous node ID"}, {"stats", (PyCFunction)index_stats, METH_NOARGS, "stats for the index"}, {NULL} /* Sentinel */ }; static PyGetSetDef index_getset[] = { {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL}, {NULL} /* Sentinel */ }; static PyTypeObject indexType = { PyObject_HEAD_INIT(NULL) 0, /* ob_size */ "parsers.index", /* tp_name */ sizeof(indexObject), /* tp_basicsize */ 0, /* tp_itemsize */ (destructor)index_dealloc, /* tp_dealloc */ 0, /* tp_print */ 0, /* tp_getattr */ 0, /* tp_setattr */ 0, /* tp_compare */ 0, /* tp_repr */ 0, /* tp_as_number */ &index_sequence_methods, /* tp_as_sequence */ &index_mapping_methods, /* tp_as_mapping */ 0, /* tp_hash */ 0, /* tp_call */ 0, /* tp_str */ 0, /* tp_getattro */ 0, /* tp_setattro */ 0, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT, /* tp_flags */ "revlog index", /* tp_doc */ 0, /* tp_traverse */ 0, /* tp_clear */ 0, /* tp_richcompare */ 0, /* tp_weaklistoffset */ 0, /* tp_iter */ 0, /* tp_iternext */ index_methods, /* tp_methods */ 0, /* tp_members */ index_getset, /* tp_getset */ 0, /* tp_base */ 0, /* tp_dict */ 0, /* tp_descr_get */ 0, /* tp_descr_set */ 0, /* tp_dictoffset */ (initproc)index_init, /* tp_init */ 0, /* tp_alloc */ }; /* * returns a tuple of the form (index, index, cache) with elements as * follows: * * index: an index object that lazily parses RevlogNG records * cache: if data is inlined, a tuple (index_file_content, 0), else None * * added complications are for backwards compatibility */ static PyObject *parse_index2(PyObject *self, PyObject *args) { PyObject *tuple = NULL, *cache = NULL; indexObject *idx; int ret; idx = PyObject_New(indexObject, &indexType); if (idx == NULL) goto bail; ret = index_init(idx, args); if (ret == -1) goto bail; if (idx->inlined) { cache = Py_BuildValue("iO", 0, idx->data); if (cache == NULL) goto bail; } else { cache = Py_None; Py_INCREF(cache); } tuple = Py_BuildValue("NN", idx, cache); if (!tuple) goto bail; return tuple; bail: Py_XDECREF(idx); Py_XDECREF(cache); Py_XDECREF(tuple); return NULL; } #define BUMPED_FIX 1 #define USING_SHA_256 2 #define FM1_HEADER_SIZE (4 + 8 + 2 + 2 + 1 + 1 + 1) static PyObject *readshas( const char *source, unsigned char num, Py_ssize_t hashwidth) { int i; PyObject *list = PyTuple_New(num); if (list == NULL) { return NULL; } for (i = 0; i < num; i++) { PyObject *hash = PyString_FromStringAndSize(source, hashwidth); if (hash == NULL) { Py_DECREF(list); return NULL; } PyTuple_SET_ITEM(list, i, hash); source += hashwidth; } return list; } static PyObject *fm1readmarker(const char *databegin, const char *dataend, uint32_t *msize) { const char *data = databegin; const char *meta; double mtime; int16_t tz; uint16_t flags; unsigned char nsuccs, nparents, nmetadata; Py_ssize_t hashwidth = 20; PyObject *prec = NULL, *parents = NULL, *succs = NULL; PyObject *metadata = NULL, *ret = NULL; int i; if (data + FM1_HEADER_SIZE > dataend) { goto overflow; } *msize = getbe32(data); data += 4; mtime = getbefloat64(data); data += 8; tz = getbeint16(data); data += 2; flags = getbeuint16(data); data += 2; if (flags & USING_SHA_256) { hashwidth = 32; } nsuccs = (unsigned char)(*data++); nparents = (unsigned char)(*data++); nmetadata = (unsigned char)(*data++); if (databegin + *msize > dataend) { goto overflow; } dataend = databegin + *msize; /* narrow down to marker size */ if (data + hashwidth > dataend) { goto overflow; } prec = PyString_FromStringAndSize(data, hashwidth); data += hashwidth; if (prec == NULL) { goto bail; } if (data + nsuccs * hashwidth > dataend) { goto overflow; } succs = readshas(data, nsuccs, hashwidth); if (succs == NULL) { goto bail; } data += nsuccs * hashwidth; if (nparents == 1 || nparents == 2) { if (data + nparents * hashwidth > dataend) { goto overflow; } parents = readshas(data, nparents, hashwidth); if (parents == NULL) { goto bail; } data += nparents * hashwidth; } else { parents = Py_None; } if (data + 2 * nmetadata > dataend) { goto overflow; } meta = data + (2 * nmetadata); metadata = PyTuple_New(nmetadata); if (metadata == NULL) { goto bail; } for (i = 0; i < nmetadata; i++) { PyObject *tmp, *left = NULL, *right = NULL; Py_ssize_t leftsize = (unsigned char)(*data++); Py_ssize_t rightsize = (unsigned char)(*data++); if (meta + leftsize + rightsize > dataend) { goto overflow; } left = PyString_FromStringAndSize(meta, leftsize); meta += leftsize; right = PyString_FromStringAndSize(meta, rightsize); meta += rightsize; tmp = PyTuple_New(2); if (!left || !right || !tmp) { Py_XDECREF(left); Py_XDECREF(right); Py_XDECREF(tmp); goto bail; } PyTuple_SET_ITEM(tmp, 0, left); PyTuple_SET_ITEM(tmp, 1, right); PyTuple_SET_ITEM(metadata, i, tmp); } ret = Py_BuildValue("(OOHO(di)O)", prec, succs, flags, metadata, mtime, (int)tz * 60, parents); goto bail; /* return successfully */ overflow: PyErr_SetString(PyExc_ValueError, "overflow in obsstore"); bail: Py_XDECREF(prec); Py_XDECREF(succs); Py_XDECREF(metadata); if (parents != Py_None) Py_XDECREF(parents); return ret; } static PyObject *fm1readmarkers(PyObject *self, PyObject *args) { const char *data, *dataend; int datalen; Py_ssize_t offset, stop; PyObject *markers = NULL; if (!PyArg_ParseTuple(args, "s#nn", &data, &datalen, &offset, &stop)) { return NULL; } dataend = data + datalen; data += offset; markers = PyList_New(0); if (!markers) { return NULL; } while (offset < stop) { uint32_t msize; int error; PyObject *record = fm1readmarker(data, dataend, &msize); if (!record) { goto bail; } error = PyList_Append(markers, record); Py_DECREF(record); if (error) { goto bail; } data += msize; offset += msize; } return markers; bail: Py_DECREF(markers); return NULL; } static char parsers_doc[] = "Efficient content parsing."; PyObject *encodedir(PyObject *self, PyObject *args); PyObject *pathencode(PyObject *self, PyObject *args); PyObject *lowerencode(PyObject *self, PyObject *args); static PyMethodDef methods[] = { {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"}, {"nonnormalentries", nonnormalentries, METH_VARARGS, "create a set containing non-normal entries of given dirstate\n"}, {"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"}, {"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"}, {"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"}, {"asciilower", asciilower, METH_VARARGS, "lowercase an ASCII string\n"}, {"asciiupper", asciiupper, METH_VARARGS, "uppercase an ASCII string\n"}, {"dict_new_presized", dict_new_presized, METH_VARARGS, "construct a dict with an expected size\n"}, {"make_file_foldmap", make_file_foldmap, METH_VARARGS, "make file foldmap\n"}, {"encodedir", encodedir, METH_VARARGS, "encodedir a path\n"}, {"pathencode", pathencode, METH_VARARGS, "fncache-encode a path\n"}, {"lowerencode", lowerencode, METH_VARARGS, "lower-encode a path\n"}, {"fm1readmarkers", fm1readmarkers, METH_VARARGS, "parse v1 obsolete markers\n"}, {NULL, NULL} }; void dirs_module_init(PyObject *mod); void manifest_module_init(PyObject *mod); static void module_init(PyObject *mod) { /* This module constant has two purposes. First, it lets us unit test * the ImportError raised without hard-coding any error text. This * means we can change the text in the future without breaking tests, * even across changesets without a recompile. Second, its presence * can be used to determine whether the version-checking logic is * present, which also helps in testing across changesets without a * recompile. Note that this means the pure-Python version of parsers * should not have this module constant. */ PyModule_AddStringConstant(mod, "versionerrortext", versionerrortext); dirs_module_init(mod); manifest_module_init(mod); indexType.tp_new = PyType_GenericNew; if (PyType_Ready(&indexType) < 0 || PyType_Ready(&dirstateTupleType) < 0) return; Py_INCREF(&indexType); PyModule_AddObject(mod, "index", (PyObject *)&indexType); Py_INCREF(&dirstateTupleType); PyModule_AddObject(mod, "dirstatetuple", (PyObject *)&dirstateTupleType); nullentry = Py_BuildValue("iiiiiiis#", 0, 0, 0, -1, -1, -1, -1, nullid, 20); if (nullentry) PyObject_GC_UnTrack(nullentry); } static int check_python_version(void) { PyObject *sys = PyImport_ImportModule("sys"), *ver; long hexversion; if (!sys) return -1; ver = PyObject_GetAttrString(sys, "hexversion"); Py_DECREF(sys); if (!ver) return -1; hexversion = PyInt_AsLong(ver); Py_DECREF(ver); /* sys.hexversion is a 32-bit number by default, so the -1 case * should only occur in unusual circumstances (e.g. if sys.hexversion * is manually set to an invalid value). */ if ((hexversion == -1) || (hexversion >> 16 != PY_VERSION_HEX >> 16)) { PyErr_Format(PyExc_ImportError, "%s: The Mercurial extension " "modules were compiled with Python " PY_VERSION ", but " "Mercurial is currently using Python with sys.hexversion=%ld: " "Python %s\n at: %s", versionerrortext, hexversion, Py_GetVersion(), Py_GetProgramFullPath()); return -1; } return 0; } #ifdef IS_PY3K static struct PyModuleDef parsers_module = { PyModuleDef_HEAD_INIT, "parsers", parsers_doc, -1, methods }; PyMODINIT_FUNC PyInit_parsers(void) { PyObject *mod; if (check_python_version() == -1) return; mod = PyModule_Create(&parsers_module); module_init(mod); return mod; } #else PyMODINIT_FUNC initparsers(void) { PyObject *mod; if (check_python_version() == -1) return; mod = Py_InitModule3("parsers", methods, parsers_doc); module_init(mod); } #endif mercurial-3.7.3/mercurial/tagmerge.py0000644000175000017500000002650012676531525017262 0ustar mpmmpm00000000000000# tagmerge.py - merge .hgtags files # # Copyright 2014 Angel Ezquerra # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. # This module implements an automatic merge algorithm for mercurial's tag files # # The tagmerge algorithm implemented in this module is able to resolve most # merge conflicts that currently would trigger a .hgtags merge conflict. The # only case that it does not (and cannot) handle is that in which two tags point # to different revisions on each merge parent _and_ their corresponding tag # histories have the same rank (i.e. the same length). In all other cases the # merge algorithm will choose the revision belonging to the parent with the # highest ranked tag history. The merged tag history is the combination of both # tag histories (special care is taken to try to combine common tag histories # where possible). # # In addition to actually merging the tags from two parents, taking into # account the base, the algorithm also tries to minimize the difference # between the merged tag file and the first parent's tag file (i.e. it tries to # make the merged tag order as as similar as possible to the first parent's tag # file order). # # The algorithm works as follows: # 1. read the tags from p1, p2 and the base # - when reading the p1 tags, also get the line numbers associated to each # tag node (these will be used to sort the merged tags in a way that # minimizes the diff to p1). Ignore the file numbers when reading p2 and # the base # 2. recover the "lost tags" (i.e. those that are found in the base but not on # p1 or p2) and add them back to p1 and/or p2 # - at this point the only tags that are on p1 but not on p2 are those new # tags that were introduced in p1. Same thing for the tags that are on p2 # but not on p2 # 3. take all tags that are only on p1 or only on p2 (but not on the base) # - Note that these are the tags that were introduced between base and p1 # and between base and p2, possibly on separate clones # 4. for each tag found both on p1 and p2 perform the following merge algorithm: # - the tags conflict if their tag "histories" have the same "rank" (i.e. # length) AND the last (current) tag is NOT the same # - for non conflicting tags: # - choose which are the high and the low ranking nodes # - the high ranking list of nodes is the one that is longer. # In case of draw favor p1 # - the merged node list is made of 3 parts: # - first the nodes that are common to the beginning of both # the low and the high ranking nodes # - second the non common low ranking nodes # - finally the non common high ranking nodes (with the last # one being the merged tag node) # - note that this is equivalent to putting the whole low ranking # node list first, followed by the non common high ranking nodes # - note that during the merge we keep the "node line numbers", which will # be used when writing the merged tags to the tag file # 5. write the merged tags taking into account to their positions in the first # parent (i.e. try to keep the relative ordering of the nodes that come # from p1). This minimizes the diff between the merged and the p1 tag files # This is done by using the following algorithm # - group the nodes for a given tag that must be written next to each other # - A: nodes that come from consecutive lines on p1 # - B: nodes that come from p2 (i.e. whose associated line number is # None) and are next to one of the a nodes in A # - each group is associated with a line number coming from p1 # - generate a "tag block" for each of the groups # - a tag block is a set of consecutive "node tag" lines belonging to # the same tag and which will be written next to each other on the # merged tags file # - sort the "tag blocks" according to their associated number line # - put blocks whose nodes come all from p2 first # - write the tag blocks in the sorted order from __future__ import absolute_import import operator from .i18n import _ from .node import ( hex, nullid, ) from .import ( tags as tagsmod, util, ) hexnullid = hex(nullid) def readtagsformerge(ui, repo, lines, fn='', keeplinenums=False): '''read the .hgtags file into a structure that is suitable for merging Depending on the keeplinenums flag, clear the line numbers associated with each tag. This is done because only the line numbers of the first parent are useful for merging. ''' filetags = tagsmod._readtaghist(ui, repo, lines, fn=fn, recode=None, calcnodelines=True)[1] for tagname, taginfo in filetags.items(): if not keeplinenums: for el in taginfo: el[1] = None return filetags def grouptagnodesbyline(tagnodes): ''' Group nearby nodes (i.e. those that must be written next to each other) The input is a list of [node, position] pairs, corresponding to a given tag The position is the line number where the node was found on the first parent .hgtags file, or None for those nodes that came from the base or the second parent .hgtags files. This function groups those [node, position] pairs, returning a list of groups of nodes that must be written next to each other because their positions are consecutive or have no position preference (because their position is None). The result is a list of [position, [consecutive node list]] ''' firstlinenum = None for hexnode, linenum in tagnodes: firstlinenum = linenum if firstlinenum is not None: break if firstlinenum is None: return [[None, [el[0] for el in tagnodes]]] tagnodes[0][1] = firstlinenum groupednodes = [[firstlinenum, []]] prevlinenum = firstlinenum for hexnode, linenum in tagnodes: if linenum is not None and linenum - prevlinenum > 1: groupednodes.append([linenum, []]) groupednodes[-1][1].append(hexnode) if linenum is not None: prevlinenum = linenum return groupednodes def writemergedtags(repo, mergedtags): ''' write the merged tags while trying to minimize the diff to the first parent This function uses the ordering info stored on the merged tags dict to generate an .hgtags file which is correct (in the sense that its contents correspond to the result of the tag merge) while also being as close as possible to the first parent's .hgtags file. ''' # group the node-tag pairs that must be written next to each other for tname, taglist in mergedtags.items(): mergedtags[tname] = grouptagnodesbyline(taglist) # convert the grouped merged tags dict into a format that resembles the # final .hgtags file (i.e. a list of blocks of 'node tag' pairs) def taglist2string(tlist, tname): return '\n'.join(['%s %s' % (hexnode, tname) for hexnode in tlist]) finaltags = [] for tname, tags in mergedtags.items(): for block in tags: block[1] = taglist2string(block[1], tname) finaltags += tags # the tag groups are linked to a "position" that can be used to sort them # before writing them # the position is calculated to ensure that the diff of the merged .hgtags # file to the first parent's .hgtags file is as small as possible finaltags.sort(key=operator.itemgetter(0)) # finally we can join the sorted groups to get the final contents of the # merged .hgtags file, and then write it to disk mergedtagstring = '\n'.join([tags for rank, tags in finaltags if tags]) fp = repo.wfile('.hgtags', 'wb') fp.write(mergedtagstring + '\n') fp.close() def singletagmerge(p1nodes, p2nodes): ''' merge the nodes corresponding to a single tag Note that the inputs are lists of node-linenum pairs (i.e. not just lists of nodes) ''' if not p2nodes: return p1nodes if not p1nodes: return p2nodes # there is no conflict unless both tags point to different revisions # and have a non identical tag history p1currentnode = p1nodes[-1][0] p2currentnode = p2nodes[-1][0] if p1currentnode != p2currentnode and len(p1nodes) == len(p2nodes): # cannot merge two tags with same rank pointing to different nodes return None # which are the highest ranking (hr) / lowest ranking (lr) nodes? if len(p1nodes) >= len(p2nodes): hrnodes, lrnodes = p1nodes, p2nodes else: hrnodes, lrnodes = p2nodes, p1nodes # the lowest ranking nodes will be written first, followed by the highest # ranking nodes # to avoid unwanted tag rank explosion we try to see if there are some # common nodes that can be written only once commonidx = len(lrnodes) for n in range(len(lrnodes)): if hrnodes[n][0] != lrnodes[n][0]: commonidx = n break lrnodes[n][1] = p1nodes[n][1] # the merged node list has 3 parts: # - common nodes # - non common lowest ranking nodes # - non common highest ranking nodes # note that the common nodes plus the non common lowest ranking nodes is the # whole list of lr nodes return lrnodes + hrnodes[commonidx:] def merge(repo, fcd, fco, fca): ''' Merge the tags of two revisions, taking into account the base tags Try to minimize the diff between the merged tags and the first parent tags ''' ui = repo.ui # read the p1, p2 and base tags # only keep the line numbers for the p1 tags p1tags = readtagsformerge( ui, repo, fcd.data().splitlines(), fn="p1 tags", keeplinenums=True) p2tags = readtagsformerge( ui, repo, fco.data().splitlines(), fn="p2 tags", keeplinenums=False) basetags = readtagsformerge( ui, repo, fca.data().splitlines(), fn="base tags", keeplinenums=False) # recover the list of "lost tags" (i.e. those that were found on the base # revision but not on one of the revisions being merged) basetagset = set(basetags) for n, pntags in enumerate((p1tags, p2tags)): pntagset = set(pntags) pnlosttagset = basetagset - pntagset for t in pnlosttagset: pntags[t] = basetags[t] if pntags[t][-1][0] != hexnullid: pntags[t].append([hexnullid, None]) conflictedtags = [] # for reporting purposes mergedtags = util.sortdict(p1tags) # sortdict does not implement iteritems() for tname, p2nodes in p2tags.items(): if tname not in mergedtags: mergedtags[tname] = p2nodes continue p1nodes = mergedtags[tname] mergednodes = singletagmerge(p1nodes, p2nodes) if mergednodes is None: conflictedtags.append(tname) continue mergedtags[tname] = mergednodes if conflictedtags: numconflicts = len(conflictedtags) ui.warn(_('automatic .hgtags merge failed\n' 'the following %d tags are in conflict: %s\n') % (numconflicts, ', '.join(sorted(conflictedtags)))) return True, 1 writemergedtags(repo, mergedtags) ui.note(_('.hgtags merged successfully\n')) return False, 0 mercurial-3.7.3/mercurial/dagutil.py0000644000175000017500000002024512676531525017120 0ustar mpmmpm00000000000000# dagutil.py - dag utilities for mercurial # # Copyright 2010 Benoit Boissinot # and Peter Arrenbrecht # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import from .i18n import _ from .node import nullrev class basedag(object): '''generic interface for DAGs terms: "ix" (short for index) identifies a nodes internally, "id" identifies one externally. All params are ixs unless explicitly suffixed otherwise. Pluralized params are lists or sets. ''' def __init__(self): self._inverse = None def nodeset(self): '''set of all node ixs''' raise NotImplementedError def heads(self): '''list of head ixs''' raise NotImplementedError def parents(self, ix): '''list of parents ixs of ix''' raise NotImplementedError def inverse(self): '''inverse DAG, where parents becomes children, etc.''' raise NotImplementedError def ancestorset(self, starts, stops=None): ''' set of all ancestors of starts (incl), but stop walk at stops (excl) ''' raise NotImplementedError def descendantset(self, starts, stops=None): ''' set of all descendants of starts (incl), but stop walk at stops (excl) ''' return self.inverse().ancestorset(starts, stops) def headsetofconnecteds(self, ixs): ''' subset of connected list of ixs so that no node has a descendant in it By "connected list" we mean that if an ancestor and a descendant are in the list, then so is at least one path connecting them. ''' raise NotImplementedError def externalize(self, ix): '''return a node id''' return self._externalize(ix) def externalizeall(self, ixs): '''return a list of (or set if given a set) of node ids''' ids = self._externalizeall(ixs) if isinstance(ixs, set): return set(ids) return list(ids) def internalize(self, id): '''return a node ix''' return self._internalize(id) def internalizeall(self, ids, filterunknown=False): '''return a list of (or set if given a set) of node ixs''' ixs = self._internalizeall(ids, filterunknown) if isinstance(ids, set): return set(ixs) return list(ixs) class genericdag(basedag): '''generic implementations for DAGs''' def ancestorset(self, starts, stops=None): if stops: stops = set(stops) else: stops = set() seen = set() pending = list(starts) while pending: n = pending.pop() if n not in seen and n not in stops: seen.add(n) pending.extend(self.parents(n)) return seen def headsetofconnecteds(self, ixs): hds = set(ixs) if not hds: return hds for n in ixs: for p in self.parents(n): hds.discard(p) assert hds return hds class revlogbaseddag(basedag): '''generic dag interface to a revlog''' def __init__(self, revlog, nodeset): basedag.__init__(self) self._revlog = revlog self._heads = None self._nodeset = nodeset def nodeset(self): return self._nodeset def heads(self): if self._heads is None: self._heads = self._getheads() return self._heads def _externalize(self, ix): return self._revlog.index[ix][7] def _externalizeall(self, ixs): idx = self._revlog.index return [idx[i][7] for i in ixs] def _internalize(self, id): ix = self._revlog.rev(id) if ix == nullrev: raise LookupError(id, self._revlog.indexfile, _('nullid')) return ix def _internalizeall(self, ids, filterunknown): rl = self._revlog if filterunknown: return [r for r in map(rl.nodemap.get, ids) if (r is not None and r != nullrev and r not in rl.filteredrevs)] return map(self._internalize, ids) class revlogdag(revlogbaseddag): '''dag interface to a revlog''' def __init__(self, revlog): revlogbaseddag.__init__(self, revlog, set(revlog)) def _getheads(self): return [r for r in self._revlog.headrevs() if r != nullrev] def parents(self, ix): rlog = self._revlog idx = rlog.index revdata = idx[ix] prev = revdata[5] if prev != nullrev: prev2 = revdata[6] if prev2 == nullrev: return [prev] return [prev, prev2] prev2 = revdata[6] if prev2 != nullrev: return [prev2] return [] def inverse(self): if self._inverse is None: self._inverse = inverserevlogdag(self) return self._inverse def ancestorset(self, starts, stops=None): rlog = self._revlog idx = rlog.index if stops: stops = set(stops) else: stops = set() seen = set() pending = list(starts) while pending: rev = pending.pop() if rev not in seen and rev not in stops: seen.add(rev) revdata = idx[rev] for i in [5, 6]: prev = revdata[i] if prev != nullrev: pending.append(prev) return seen def headsetofconnecteds(self, ixs): if not ixs: return set() rlog = self._revlog idx = rlog.index headrevs = set(ixs) for rev in ixs: revdata = idx[rev] for i in [5, 6]: prev = revdata[i] if prev != nullrev: headrevs.discard(prev) assert headrevs return headrevs def linearize(self, ixs): '''linearize and topologically sort a list of revisions The linearization process tries to create long runs of revs where a child rev comes immediately after its first parent. This is done by visiting the heads of the given revs in inverse topological order, and for each visited rev, visiting its second parent, then its first parent, then adding the rev itself to the output list. ''' sorted = [] visit = list(self.headsetofconnecteds(ixs)) visit.sort(reverse=True) finished = set() while visit: cur = visit.pop() if cur < 0: cur = -cur - 1 if cur not in finished: sorted.append(cur) finished.add(cur) else: visit.append(-cur - 1) visit += [p for p in self.parents(cur) if p in ixs and p not in finished] assert len(sorted) == len(ixs) return sorted class inverserevlogdag(revlogbaseddag, genericdag): '''inverse of an existing revlog dag; see revlogdag.inverse()''' def __init__(self, orig): revlogbaseddag.__init__(self, orig._revlog, orig._nodeset) self._orig = orig self._children = {} self._roots = [] self._walkfrom = len(self._revlog) - 1 def _walkto(self, walkto): rev = self._walkfrom cs = self._children roots = self._roots idx = self._revlog.index while rev >= walkto: data = idx[rev] isroot = True for prev in [data[5], data[6]]: # parent revs if prev != nullrev: cs.setdefault(prev, []).append(rev) isroot = False if isroot: roots.append(rev) rev -= 1 self._walkfrom = rev def _getheads(self): self._walkto(nullrev) return self._roots def parents(self, ix): if ix is None: return [] if ix <= self._walkfrom: self._walkto(ix) return self._children.get(ix, []) def inverse(self): return self._orig mercurial-3.7.3/mercurial/graphmod.py0000644000175000017500000005022412676531525017270 0ustar mpmmpm00000000000000# Revision graph generator for Mercurial # # Copyright 2008 Dirkjan Ochtman # Copyright 2007 Joel Rosdahl # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """supports walking the history as DAGs suitable for graphical output The most basic format we use is that of:: (id, type, data, [parentids]) The node and parent ids are arbitrary integers which identify a node in the context of the graph returned. Type is a constant specifying the node type. Data depends on type. """ from __future__ import absolute_import import heapq from .node import nullrev from . import ( revset, util, ) CHANGESET = 'C' def groupbranchiter(revs, parentsfunc, firstbranch=()): """Yield revisions from heads to roots one (topo) branch at a time. This function aims to be used by a graph generator that wishes to minimize the number of parallel branches and their interleaving. Example iteration order (numbers show the "true" order in a changelog): o 4 | o 1 | | o 3 | | | o 2 |/ o 0 Note that the ancestors of merges are understood by the current algorithm to be on the same branch. This means no reordering will occur behind a merge. """ ### Quick summary of the algorithm # # This function is based around a "retention" principle. We keep revisions # in memory until we are ready to emit a whole branch that immediately # "merges" into an existing one. This reduces the number of parallel # branches with interleaved revisions. # # During iteration revs are split into two groups: # A) revision already emitted # B) revision in "retention". They are stored as different subgroups. # # for each REV, we do the following logic: # # 1) if REV is a parent of (A), we will emit it. If there is a # retention group ((B) above) that is blocked on REV being # available, we emit all the revisions out of that retention # group first. # # 2) else, we'll search for a subgroup in (B) awaiting for REV to be # available, if such subgroup exist, we add REV to it and the subgroup is # now awaiting for REV.parents() to be available. # # 3) finally if no such group existed in (B), we create a new subgroup. # # # To bootstrap the algorithm, we emit the tipmost revision (which # puts it in group (A) from above). revs.sort(reverse=True) # Set of parents of revision that have been emitted. They can be considered # unblocked as the graph generator is already aware of them so there is no # need to delay the revisions that reference them. # # If someone wants to prioritize a branch over the others, pre-filling this # set will force all other branches to wait until this branch is ready to be # emitted. unblocked = set(firstbranch) # list of groups waiting to be displayed, each group is defined by: # # (revs: lists of revs waiting to be displayed, # blocked: set of that cannot be displayed before those in 'revs') # # The second value ('blocked') correspond to parents of any revision in the # group ('revs') that is not itself contained in the group. The main idea # of this algorithm is to delay as much as possible the emission of any # revision. This means waiting for the moment we are about to display # these parents to display the revs in a group. # # This first implementation is smart until it encounters a merge: it will # emit revs as soon as any parent is about to be emitted and can grow an # arbitrary number of revs in 'blocked'. In practice this mean we properly # retains new branches but gives up on any special ordering for ancestors # of merges. The implementation can be improved to handle this better. # # The first subgroup is special. It corresponds to all the revision that # were already emitted. The 'revs' lists is expected to be empty and the # 'blocked' set contains the parents revisions of already emitted revision. # # You could pre-seed the set of groups[0] to a specific # changesets to select what the first emitted branch should be. groups = [([], unblocked)] pendingheap = [] pendingset = set() heapq.heapify(pendingheap) heappop = heapq.heappop heappush = heapq.heappush for currentrev in revs: # Heap works with smallest element, we want highest so we invert if currentrev not in pendingset: heappush(pendingheap, -currentrev) pendingset.add(currentrev) # iterates on pending rev until after the current rev have been # processed. rev = None while rev != currentrev: rev = -heappop(pendingheap) pendingset.remove(rev) # Seek for a subgroup blocked, waiting for the current revision. matching = [i for i, g in enumerate(groups) if rev in g[1]] if matching: # The main idea is to gather together all sets that are blocked # on the same revision. # # Groups are merged when a common blocking ancestor is # observed. For example, given two groups: # # revs [5, 4] waiting for 1 # revs [3, 2] waiting for 1 # # These two groups will be merged when we process # 1. In theory, we could have merged the groups when # we added 2 to the group it is now in (we could have # noticed the groups were both blocked on 1 then), but # the way it works now makes the algorithm simpler. # # We also always keep the oldest subgroup first. We can # probably improve the behavior by having the longest set # first. That way, graph algorithms could minimise the length # of parallel lines their drawing. This is currently not done. targetidx = matching.pop(0) trevs, tparents = groups[targetidx] for i in matching: gr = groups[i] trevs.extend(gr[0]) tparents |= gr[1] # delete all merged subgroups (except the one we kept) # (starting from the last subgroup for performance and # sanity reasons) for i in reversed(matching): del groups[i] else: # This is a new head. We create a new subgroup for it. targetidx = len(groups) groups.append(([], set([rev]))) gr = groups[targetidx] # We now add the current nodes to this subgroups. This is done # after the subgroup merging because all elements from a subgroup # that relied on this rev must precede it. # # we also update the set to include the parents of the # new nodes. if rev == currentrev: # only display stuff in rev gr[0].append(rev) gr[1].remove(rev) parents = [p for p in parentsfunc(rev) if p > nullrev] gr[1].update(parents) for p in parents: if p not in pendingset: pendingset.add(p) heappush(pendingheap, -p) # Look for a subgroup to display # # When unblocked is empty (if clause), we were not waiting for any # revisions during the first iteration (if no priority was given) or # if we emitted a whole disconnected set of the graph (reached a # root). In that case we arbitrarily take the oldest known # subgroup. The heuristic could probably be better. # # Otherwise (elif clause) if the subgroup is blocked on # a revision we just emitted, we can safely emit it as # well. if not unblocked: if len(groups) > 1: # display other subset targetidx = 1 gr = groups[1] elif not gr[1] & unblocked: gr = None if gr is not None: # update the set of awaited revisions with the one from the # subgroup unblocked |= gr[1] # output all revisions in the subgroup for r in gr[0]: yield r # delete the subgroup that you just output # unless it is groups[0] in which case you just empty it. if targetidx: del groups[targetidx] else: gr[0][:] = [] # Check if we have some subgroup waiting for revisions we are not going to # iterate over for g in groups: for r in g[0]: yield r def dagwalker(repo, revs): """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples This generator function walks through revisions (which should be ordered from bigger to lower). It returns a tuple for each node. The node and parent ids are arbitrary integers which identify a node in the context of the graph returned. """ if not revs: return gpcache = {} if repo.ui.configbool('experimental', 'graph-group-branches', False): firstbranch = () firstbranchrevset = repo.ui.config( 'experimental', 'graph-group-branches.firstbranch', '') if firstbranchrevset: firstbranch = repo.revs(firstbranchrevset) parentrevs = repo.changelog.parentrevs revs = groupbranchiter(revs, parentrevs, firstbranch) revs = revset.baseset(revs) for rev in revs: ctx = repo[rev] parents = sorted(set([p.rev() for p in ctx.parents() if p.rev() in revs])) mpars = [p.rev() for p in ctx.parents() if p.rev() != nullrev and p.rev() not in parents] for mpar in mpars: gp = gpcache.get(mpar) if gp is None: # precompute slow query as we know reachableroots() goes # through all revs (issue4782) if not isinstance(revs, revset.baseset): revs = revset.baseset(revs) gp = gpcache[mpar] = revset.reachableroots(repo, revs, [mpar]) if not gp: parents.append(mpar) else: parents.extend(g for g in gp if g not in parents) yield (ctx.rev(), CHANGESET, ctx, parents) def nodes(repo, nodes): """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples This generator function walks the given nodes. It only returns parents that are in nodes, too. """ include = set(nodes) for node in nodes: ctx = repo[node] parents = set([p.rev() for p in ctx.parents() if p.node() in include]) yield (ctx.rev(), CHANGESET, ctx, sorted(parents)) def colored(dag, repo): """annotates a DAG with colored edge information For each DAG node this function emits tuples:: (id, type, data, (col, color), [(col, nextcol, color)]) with the following new elements: - Tuple (col, color) with column and color index for the current node - A list of tuples indicating the edges between the current node and its parents. """ seen = [] colors = {} newcolor = 1 config = {} for key, val in repo.ui.configitems('graph'): if '.' in key: branch, setting = key.rsplit('.', 1) # Validation if setting == "width" and val.isdigit(): config.setdefault(branch, {})[setting] = int(val) elif setting == "color" and val.isalnum(): config.setdefault(branch, {})[setting] = val if config: getconf = util.lrucachefunc( lambda rev: config.get(repo[rev].branch(), {})) else: getconf = lambda rev: {} for (cur, type, data, parents) in dag: # Compute seen and next if cur not in seen: seen.append(cur) # new head colors[cur] = newcolor newcolor += 1 col = seen.index(cur) color = colors.pop(cur) next = seen[:] # Add parents to next addparents = [p for p in parents if p not in next] next[col:col + 1] = addparents # Set colors for the parents for i, p in enumerate(addparents): if not i: colors[p] = color else: colors[p] = newcolor newcolor += 1 # Add edges to the graph edges = [] for ecol, eid in enumerate(seen): if eid in next: bconf = getconf(eid) edges.append(( ecol, next.index(eid), colors[eid], bconf.get('width', -1), bconf.get('color', ''))) elif eid == cur: for p in parents: bconf = getconf(p) edges.append(( ecol, next.index(p), color, bconf.get('width', -1), bconf.get('color', ''))) # Yield and move on yield (cur, type, data, (col, color), edges) seen = next def asciiedges(type, char, lines, seen, rev, parents): """adds edge info to changelog DAG walk suitable for ascii()""" if rev not in seen: seen.append(rev) nodeidx = seen.index(rev) knownparents = [] newparents = [] for parent in parents: if parent in seen: knownparents.append(parent) else: newparents.append(parent) ncols = len(seen) nextseen = seen[:] nextseen[nodeidx:nodeidx + 1] = newparents edges = [(nodeidx, nextseen.index(p)) for p in knownparents if p != nullrev] while len(newparents) > 2: # ascii() only knows how to add or remove a single column between two # calls. Nodes with more than two parents break this constraint so we # introduce intermediate expansion lines to grow the active node list # slowly. edges.append((nodeidx, nodeidx)) edges.append((nodeidx, nodeidx + 1)) nmorecols = 1 yield (type, char, lines, (nodeidx, edges, ncols, nmorecols)) char = '\\' lines = [] nodeidx += 1 ncols += 1 edges = [] del newparents[0] if len(newparents) > 0: edges.append((nodeidx, nodeidx)) if len(newparents) > 1: edges.append((nodeidx, nodeidx + 1)) nmorecols = len(nextseen) - ncols seen[:] = nextseen yield (type, char, lines, (nodeidx, edges, ncols, nmorecols)) def _fixlongrightedges(edges): for (i, (start, end)) in enumerate(edges): if end > start: edges[i] = (start, end + 1) def _getnodelineedgestail( node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail): if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0: # Still going in the same non-vertical direction. if n_columns_diff == -1: start = max(node_index + 1, p_node_index) tail = ["|", " "] * (start - node_index - 1) tail.extend(["/", " "] * (n_columns - start)) return tail else: return ["\\", " "] * (n_columns - node_index - 1) else: return ["|", " "] * (n_columns - node_index - 1) def _drawedges(edges, nodeline, interline): for (start, end) in edges: if start == end + 1: interline[2 * end + 1] = "/" elif start == end - 1: interline[2 * start + 1] = "\\" elif start == end: interline[2 * start] = "|" else: if 2 * end >= len(nodeline): continue nodeline[2 * end] = "+" if start > end: (start, end) = (end, start) for i in range(2 * start + 1, 2 * end): if nodeline[i] != "+": nodeline[i] = "-" def _getpaddingline(ni, n_columns, edges): line = [] line.extend(["|", " "] * ni) if (ni, ni - 1) in edges or (ni, ni) in edges: # (ni, ni - 1) (ni, ni) # | | | | | | | | # +---o | | o---+ # | | c | | c | | # | |/ / | |/ / # | | | | | | c = "|" else: c = " " line.extend([c, " "]) line.extend(["|", " "] * (n_columns - ni - 1)) return line def asciistate(): """returns the initial value for the "state" argument to ascii()""" return [0, 0] def ascii(ui, state, type, char, text, coldata): """prints an ASCII graph of the DAG takes the following arguments (one call per node in the graph): - ui to write to - Somewhere to keep the needed state in (init to asciistate()) - Column of the current node in the set of ongoing edges. - Type indicator of node data, usually 'C' for changesets. - Payload: (char, lines): - Character to use as node's symbol. - List of lines to display as the node's text. - Edges; a list of (col, next_col) indicating the edges between the current node and its parents. - Number of columns (ongoing edges) in the current revision. - The difference between the number of columns (ongoing edges) in the next revision and the number of columns (ongoing edges) in the current revision. That is: -1 means one column removed; 0 means no columns added or removed; 1 means one column added. """ idx, edges, ncols, coldiff = coldata assert -2 < coldiff < 2 if coldiff == -1: # Transform # # | | | | | | # o | | into o---+ # |X / |/ / # | | | | _fixlongrightedges(edges) # add_padding_line says whether to rewrite # # | | | | | | | | # | o---+ into | o---+ # | / / | | | # <--- padding line # o | | | / / # o | | add_padding_line = (len(text) > 2 and coldiff == -1 and [x for (x, y) in edges if x + 1 < y]) # fix_nodeline_tail says whether to rewrite # # | | o | | | | o | | # | | |/ / | | |/ / # | o | | into | o / / # <--- fixed nodeline tail # | |/ / | |/ / # o | | o | | fix_nodeline_tail = len(text) <= 2 and not add_padding_line # nodeline is the line containing the node character (typically o) nodeline = ["|", " "] * idx nodeline.extend([char, " "]) nodeline.extend( _getnodelineedgestail(idx, state[1], ncols, coldiff, state[0], fix_nodeline_tail)) # shift_interline is the line containing the non-vertical # edges between this entry and the next shift_interline = ["|", " "] * idx if coldiff == -1: n_spaces = 1 edge_ch = "/" elif coldiff == 0: n_spaces = 2 edge_ch = "|" else: n_spaces = 3 edge_ch = "\\" shift_interline.extend(n_spaces * [" "]) shift_interline.extend([edge_ch, " "] * (ncols - idx - 1)) # draw edges from the current node to its parents _drawedges(edges, nodeline, shift_interline) # lines is the list of all graph lines to print lines = [nodeline] if add_padding_line: lines.append(_getpaddingline(idx, ncols, edges)) lines.append(shift_interline) # make sure that there are as many graph lines as there are # log strings while len(text) < len(lines): text.append("") if len(lines) < len(text): extra_interline = ["|", " "] * (ncols + coldiff) while len(lines) < len(text): lines.append(extra_interline) # print lines indentation_level = max(ncols, ncols + coldiff) for (line, logstr) in zip(lines, text): ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr) ui.write(ln.rstrip() + '\n') # ... and start over state[0] = coldiff state[1] = idx mercurial-3.7.3/mercurial/default.d/0000755000175000017500000000000012676531544016761 5ustar mpmmpm00000000000000mercurial-3.7.3/mercurial/default.d/mergetools.rc0000644000175000017500000001352412676531525021473 0ustar mpmmpm00000000000000# Some default global settings for common merge tools [merge-tools] kdiff3.args=--auto --L1 base --L2 local --L3 other $base $local $other -o $output kdiff3.regkey=Software\KDiff3 kdiff3.regkeyalt=Software\Wow6432Node\KDiff3 kdiff3.regappend=\kdiff3.exe kdiff3.fixeol=True kdiff3.gui=True kdiff3.diffargs=--L1 $plabel1 --L2 $clabel $parent $child gvimdiff.args=--nofork -d -g -O $local $other $base gvimdiff.regkey=Software\Vim\GVim gvimdiff.regkeyalt=Software\Wow6432Node\Vim\GVim gvimdiff.regname=path gvimdiff.priority=-9 gvimdiff.diffargs=--nofork -d -g -O $parent $child vimdiff.args=$local $other $base -c 'redraw | echomsg "hg merge conflict, type \":cq\" to abort vimdiff"' vimdiff.check=changed vimdiff.priority=-10 merge.check=conflicts merge.priority=-100 gpyfm.gui=True meld.gui=True meld.args=--label='local' $local --label='merged' $base --label='other' $other -o $output meld.check=changed meld.diffargs=-a --label=$plabel1 $parent --label=$clabel $child tkdiff.args=$local $other -a $base -o $output tkdiff.gui=True tkdiff.priority=-8 tkdiff.diffargs=-L $plabel1 $parent -L $clabel $child xxdiff.args=--show-merged-pane --exit-with-merge-status --title1 local --title2 base --title3 other --merged-filename $output --merge $local $base $other xxdiff.gui=True xxdiff.priority=-8 xxdiff.diffargs=--title1 $plabel1 $parent --title2 $clabel $child diffmerge.regkey=Software\SourceGear\SourceGear DiffMerge\ diffmerge.regkeyalt=Software\Wow6432Node\SourceGear\SourceGear DiffMerge\ diffmerge.regname=Location diffmerge.priority=-7 diffmerge.args=-nosplash -merge -title1=local -title2=merged -title3=other $local $base $other -result=$output diffmerge.check=changed diffmerge.gui=True diffmerge.diffargs=--nosplash --title1=$plabel1 --title2=$clabel $parent $child p4merge.args=$base $local $other $output p4merge.regkey=Software\Perforce\Environment p4merge.regkeyalt=Software\Wow6432Node\Perforce\Environment p4merge.regname=P4INSTROOT p4merge.regappend=\p4merge.exe p4merge.gui=True p4merge.priority=-8 p4merge.diffargs=$parent $child p4mergeosx.executable = /Applications/p4merge.app/Contents/MacOS/p4merge p4mergeosx.args = $base $local $other $output p4mergeosx.gui = True p4mergeosx.priority=-8 p4mergeosx.diffargs=$parent $child tortoisemerge.args=/base:$base /mine:$local /theirs:$other /merged:$output tortoisemerge.regkey=Software\TortoiseSVN tortoisemerge.regkeyalt=Software\Wow6432Node\TortoiseSVN tortoisemerge.check=changed tortoisemerge.gui=True tortoisemerge.priority=-8 tortoisemerge.diffargs=/base:$parent /mine:$child /basename:$plabel1 /minename:$clabel ecmerge.args=$base $local $other --mode=merge3 --title0=base --title1=local --title2=other --to=$output ecmerge.regkey=Software\Elli\xc3\xa9 Computing\Merge ecmerge.regkeyalt=Software\Wow6432Node\Elli\xc3\xa9 Computing\Merge ecmerge.gui=True ecmerge.diffargs=$parent $child --mode=diff2 --title1=$plabel1 --title2=$clabel # editmerge is a small script shipped in contrib. # It needs this config otherwise it behaves the same as internal:local editmerge.args=$output editmerge.check=changed editmerge.premerge=keep filemerge.executable=/Developer/Applications/Utilities/FileMerge.app/Contents/MacOS/FileMerge filemerge.args=-left $other -right $local -ancestor $base -merge $output filemerge.gui=True filemergexcode.executable=/Applications/Xcode.app/Contents/Applications/FileMerge.app/Contents/MacOS/FileMerge filemergexcode.args=-left $other -right $local -ancestor $base -merge $output filemergexcode.gui=True ; Windows version of Beyond Compare beyondcompare3.args=$local $other $base $output /ro /lefttitle=local /centertitle=base /righttitle=other /automerge /reviewconflicts /solo beyondcompare3.regkey=Software\Scooter Software\Beyond Compare 3 beyondcompare3.regname=ExePath beyondcompare3.gui=True beyondcompare3.priority=-2 beyondcompare3.diffargs=/lro /lefttitle=$plabel1 /righttitle=$clabel /solo /expandall $parent $child ; Linux version of Beyond Compare bcompare.args=$local $other $base -mergeoutput=$output -ro -lefttitle=parent1 -centertitle=base -righttitle=parent2 -outputtitle=merged -automerge -reviewconflicts -solo bcompare.gui=True bcompare.priority=-1 bcompare.diffargs=-lro -lefttitle=$plabel1 -righttitle=$clabel -solo -expandall $parent $child ; OS X version of Beyond Compare bcomposx.executable = /Applications/Beyond Compare.app/Contents/MacOS/bcomp bcomposx.args=$local $other $base -mergeoutput=$output -ro -lefttitle=parent1 -centertitle=base -righttitle=parent2 -outputtitle=merged -automerge -reviewconflicts -solo bcomposx.gui=True bcomposx.priority=-1 bcomposx.diffargs=-lro -lefttitle=$plabel1 -righttitle=$clabel -solo -expandall $parent $child winmerge.args=/e /x /wl /ub /dl other /dr local $other $local $output winmerge.regkey=Software\Thingamahoochie\WinMerge winmerge.regkeyalt=Software\Wow6432Node\Thingamahoochie\WinMerge\ winmerge.regname=Executable winmerge.check=changed winmerge.gui=True winmerge.priority=-10 winmerge.diffargs=/r /e /x /ub /wl /dl $plabel1 /dr $clabel $parent $child araxis.regkey=SOFTWARE\Classes\TypeLib\{46799e0a-7bd1-4330-911c-9660bb964ea2}\7.0\HELPDIR araxis.regappend=\ConsoleCompare.exe araxis.priority=-2 araxis.args=/3 /a2 /wait /merge /title1:"Other" /title2:"Base" /title3:"Local :"$local $other $base $local $output araxis.checkconflict=True araxis.binary=True araxis.gui=True araxis.diffargs=/2 /wait /title1:$plabel1 /title2:$clabel $parent $child diffuse.priority=-3 diffuse.args=$local $base $other diffuse.gui=True diffuse.diffargs=$parent $child UltraCompare.regkey=Software\Microsoft\Windows\CurrentVersion\App Paths\UC.exe UltraCompare.regkeyalt=Software\Wow6432Node\Microsoft\Windows\CurrentVersion\App Paths\UC.exe UltraCompare.args = $base $local $other -title1 base -title3 other UltraCompare.priority = -2 UltraCompare.gui = True UltraCompare.binary = True UltraCompare.check = conflicts,changed UltraCompare.diffargs=$child $parent -title1 $clabel -title2 $plabel1 mercurial-3.7.3/mercurial/hg.py0000644000175000017500000010052312676531525016063 0ustar mpmmpm00000000000000# hg.py - repository classes for mercurial # # Copyright 2005-2007 Matt Mackall # Copyright 2006 Vadim Gelfer # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import errno import os import shutil from .i18n import _ from .node import nullid from . import ( bookmarks, bundlerepo, cmdutil, discovery, error, exchange, extensions, httppeer, localrepo, lock, merge as mergemod, node, phases, repoview, scmutil, sshpeer, statichttprepo, ui as uimod, unionrepo, url, util, verify as verifymod, ) release = lock.release def _local(path): path = util.expandpath(util.urllocalpath(path)) return (os.path.isfile(path) and bundlerepo or localrepo) def addbranchrevs(lrepo, other, branches, revs): peer = other.peer() # a courtesy to callers using a localrepo for other hashbranch, branches = branches if not hashbranch and not branches: x = revs or None if util.safehasattr(revs, 'first'): y = revs.first() elif revs: y = revs[0] else: y = None return x, y if revs: revs = list(revs) else: revs = [] if not peer.capable('branchmap'): if branches: raise error.Abort(_("remote branch lookup not supported")) revs.append(hashbranch) return revs, revs[0] branchmap = peer.branchmap() def primary(branch): if branch == '.': if not lrepo: raise error.Abort(_("dirstate branch not accessible")) branch = lrepo.dirstate.branch() if branch in branchmap: revs.extend(node.hex(r) for r in reversed(branchmap[branch])) return True else: return False for branch in branches: if not primary(branch): raise error.RepoLookupError(_("unknown branch '%s'") % branch) if hashbranch: if not primary(hashbranch): revs.append(hashbranch) return revs, revs[0] def parseurl(path, branches=None): '''parse url#branch, returning (url, (branch, branches))''' u = util.url(path) branch = None if u.fragment: branch = u.fragment u.fragment = None return str(u), (branch, branches or []) schemes = { 'bundle': bundlerepo, 'union': unionrepo, 'file': _local, 'http': httppeer, 'https': httppeer, 'ssh': sshpeer, 'static-http': statichttprepo, } def _peerlookup(path): u = util.url(path) scheme = u.scheme or 'file' thing = schemes.get(scheme) or schemes['file'] try: return thing(path) except TypeError: # we can't test callable(thing) because 'thing' can be an unloaded # module that implements __call__ if not util.safehasattr(thing, 'instance'): raise return thing def islocal(repo): '''return true if repo (or path pointing to repo) is local''' if isinstance(repo, str): try: return _peerlookup(repo).islocal(repo) except AttributeError: return False return repo.local() def openpath(ui, path): '''open path with open if local, url.open if remote''' pathurl = util.url(path, parsequery=False, parsefragment=False) if pathurl.islocal(): return util.posixfile(pathurl.localpath(), 'rb') else: return url.open(ui, path) # a list of (ui, repo) functions called for wire peer initialization wirepeersetupfuncs = [] def _peerorrepo(ui, path, create=False): """return a repository object for the specified path""" obj = _peerlookup(path).instance(ui, path, create) ui = getattr(obj, "ui", ui) for name, module in extensions.extensions(ui): hook = getattr(module, 'reposetup', None) if hook: hook(ui, obj) if not obj.local(): for f in wirepeersetupfuncs: f(ui, obj) return obj def repository(ui, path='', create=False): """return a repository object for the specified path""" peer = _peerorrepo(ui, path, create) repo = peer.local() if not repo: raise error.Abort(_("repository '%s' is not local") % (path or peer.url())) return repo.filtered('visible') def peer(uiorrepo, opts, path, create=False): '''return a repository peer for the specified path''' rui = remoteui(uiorrepo, opts) return _peerorrepo(rui, path, create).peer() def defaultdest(source): '''return default destination of clone if none is given >>> defaultdest('foo') 'foo' >>> defaultdest('/foo/bar') 'bar' >>> defaultdest('/') '' >>> defaultdest('') '' >>> defaultdest('http://example.org/') '' >>> defaultdest('http://example.org/foo/') 'foo' ''' path = util.url(source).path if not path: return '' return os.path.basename(os.path.normpath(path)) def share(ui, source, dest=None, update=True, bookmarks=True): '''create a shared repository''' if not islocal(source): raise error.Abort(_('can only share local repositories')) if not dest: dest = defaultdest(source) else: dest = ui.expandpath(dest) if isinstance(source, str): origsource = ui.expandpath(source) source, branches = parseurl(origsource) srcrepo = repository(ui, source) rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None) else: srcrepo = source.local() origsource = source = srcrepo.url() checkout = None sharedpath = srcrepo.sharedpath # if our source is already sharing destwvfs = scmutil.vfs(dest, realpath=True) destvfs = scmutil.vfs(os.path.join(destwvfs.base, '.hg'), realpath=True) if destvfs.lexists(): raise error.Abort(_('destination already exists')) if not destwvfs.isdir(): destwvfs.mkdir() destvfs.makedir() requirements = '' try: requirements = srcrepo.vfs.read('requires') except IOError as inst: if inst.errno != errno.ENOENT: raise requirements += 'shared\n' destvfs.write('requires', requirements) destvfs.write('sharedpath', sharedpath) r = repository(ui, destwvfs.base) postshare(srcrepo, r, bookmarks=bookmarks) _postshareupdate(r, update, checkout=checkout) def postshare(sourcerepo, destrepo, bookmarks=True): """Called after a new shared repo is created. The new repo only has a requirements file and pointer to the source. This function configures additional shared data. Extensions can wrap this function and write additional entries to destrepo/.hg/shared to indicate additional pieces of data to be shared. """ default = sourcerepo.ui.config('paths', 'default') if default: fp = destrepo.vfs("hgrc", "w", text=True) fp.write("[paths]\n") fp.write("default = %s\n" % default) fp.close() if bookmarks: fp = destrepo.vfs('shared', 'w') fp.write('bookmarks\n') fp.close() def _postshareupdate(repo, update, checkout=None): """Maybe perform a working directory update after a shared repo is created. ``update`` can be a boolean or a revision to update to. """ if not update: return repo.ui.status(_("updating working directory\n")) if update is not True: checkout = update for test in (checkout, 'default', 'tip'): if test is None: continue try: uprev = repo.lookup(test) break except error.RepoLookupError: continue _update(repo, uprev) def copystore(ui, srcrepo, destpath): '''copy files from store of srcrepo in destpath returns destlock ''' destlock = None try: hardlink = None num = 0 closetopic = [None] def prog(topic, pos): if pos is None: closetopic[0] = topic else: ui.progress(topic, pos + num) srcpublishing = srcrepo.publishing() srcvfs = scmutil.vfs(srcrepo.sharedpath) dstvfs = scmutil.vfs(destpath) for f in srcrepo.store.copylist(): if srcpublishing and f.endswith('phaseroots'): continue dstbase = os.path.dirname(f) if dstbase and not dstvfs.exists(dstbase): dstvfs.mkdir(dstbase) if srcvfs.exists(f): if f.endswith('data'): # 'dstbase' may be empty (e.g. revlog format 0) lockfile = os.path.join(dstbase, "lock") # lock to avoid premature writing to the target destlock = lock.lock(dstvfs, lockfile) hardlink, n = util.copyfiles(srcvfs.join(f), dstvfs.join(f), hardlink, progress=prog) num += n if hardlink: ui.debug("linked %d files\n" % num) if closetopic[0]: ui.progress(closetopic[0], None) else: ui.debug("copied %d files\n" % num) if closetopic[0]: ui.progress(closetopic[0], None) return destlock except: # re-raises release(destlock) raise def clonewithshare(ui, peeropts, sharepath, source, srcpeer, dest, pull=False, rev=None, update=True, stream=False): """Perform a clone using a shared repo. The store for the repository will be located at /.hg. The specified revisions will be cloned or pulled from "source". A shared repo will be created at "dest" and a working copy will be created if "update" is True. """ revs = None if rev: if not srcpeer.capable('lookup'): raise error.Abort(_("src repository does not support " "revision lookup and so doesn't " "support clone by revision")) revs = [srcpeer.lookup(r) for r in rev] # Obtain a lock before checking for or cloning the pooled repo otherwise # 2 clients may race creating or populating it. pooldir = os.path.dirname(sharepath) # lock class requires the directory to exist. try: util.makedir(pooldir, False) except OSError as e: if e.errno != errno.EEXIST: raise poolvfs = scmutil.vfs(pooldir) basename = os.path.basename(sharepath) with lock.lock(poolvfs, '%s.lock' % basename): if os.path.exists(sharepath): ui.status(_('(sharing from existing pooled repository %s)\n') % basename) else: ui.status(_('(sharing from new pooled repository %s)\n') % basename) # Always use pull mode because hardlinks in share mode don't work # well. Never update because working copies aren't necessary in # share mode. clone(ui, peeropts, source, dest=sharepath, pull=True, rev=rev, update=False, stream=stream) sharerepo = repository(ui, path=sharepath) share(ui, sharerepo, dest=dest, update=False, bookmarks=False) # We need to perform a pull against the dest repo to fetch bookmarks # and other non-store data that isn't shared by default. In the case of # non-existing shared repo, this means we pull from the remote twice. This # is a bit weird. But at the time it was implemented, there wasn't an easy # way to pull just non-changegroup data. destrepo = repository(ui, path=dest) exchange.pull(destrepo, srcpeer, heads=revs) _postshareupdate(destrepo, update) return srcpeer, peer(ui, peeropts, dest) def clone(ui, peeropts, source, dest=None, pull=False, rev=None, update=True, stream=False, branch=None, shareopts=None): """Make a copy of an existing repository. Create a copy of an existing repository in a new directory. The source and destination are URLs, as passed to the repository function. Returns a pair of repository peers, the source and newly created destination. The location of the source is added to the new repository's .hg/hgrc file, as the default to be used for future pulls and pushes. If an exception is raised, the partly cloned/updated destination repository will be deleted. Arguments: source: repository object or URL dest: URL of destination repository to create (defaults to base name of source repository) pull: always pull from source repository, even in local case or if the server prefers streaming stream: stream raw data uncompressed from repository (fast over LAN, slow over WAN) rev: revision to clone up to (implies pull=True) update: update working directory after clone completes, if destination is local repository (True means update to default rev, anything else is treated as a revision) branch: branches to clone shareopts: dict of options to control auto sharing behavior. The "pool" key activates auto sharing mode and defines the directory for stores. The "mode" key determines how to construct the directory name of the shared repository. "identity" means the name is derived from the node of the first changeset in the repository. "remote" means the name is derived from the remote's path/URL. Defaults to "identity." """ if isinstance(source, str): origsource = ui.expandpath(source) source, branch = parseurl(origsource, branch) srcpeer = peer(ui, peeropts, source) else: srcpeer = source.peer() # in case we were called with a localrepo branch = (None, branch or []) origsource = source = srcpeer.url() rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev) if dest is None: dest = defaultdest(source) if dest: ui.status(_("destination directory: %s\n") % dest) else: dest = ui.expandpath(dest) dest = util.urllocalpath(dest) source = util.urllocalpath(source) if not dest: raise error.Abort(_("empty destination path is not valid")) destvfs = scmutil.vfs(dest, expandpath=True) if destvfs.lexists(): if not destvfs.isdir(): raise error.Abort(_("destination '%s' already exists") % dest) elif destvfs.listdir(): raise error.Abort(_("destination '%s' is not empty") % dest) shareopts = shareopts or {} sharepool = shareopts.get('pool') sharenamemode = shareopts.get('mode') if sharepool and islocal(dest): sharepath = None if sharenamemode == 'identity': # Resolve the name from the initial changeset in the remote # repository. This returns nullid when the remote is empty. It # raises RepoLookupError if revision 0 is filtered or otherwise # not available. If we fail to resolve, sharing is not enabled. try: rootnode = srcpeer.lookup('0') if rootnode != node.nullid: sharepath = os.path.join(sharepool, node.hex(rootnode)) else: ui.status(_('(not using pooled storage: ' 'remote appears to be empty)\n')) except error.RepoLookupError: ui.status(_('(not using pooled storage: ' 'unable to resolve identity of remote)\n')) elif sharenamemode == 'remote': sharepath = os.path.join(sharepool, util.sha1(source).hexdigest()) else: raise error.Abort('unknown share naming mode: %s' % sharenamemode) if sharepath: return clonewithshare(ui, peeropts, sharepath, source, srcpeer, dest, pull=pull, rev=rev, update=update, stream=stream) srclock = destlock = cleandir = None srcrepo = srcpeer.local() try: abspath = origsource if islocal(origsource): abspath = os.path.abspath(util.urllocalpath(origsource)) if islocal(dest): cleandir = dest copy = False if (srcrepo and srcrepo.cancopy() and islocal(dest) and not phases.hassecret(srcrepo)): copy = not pull and not rev if copy: try: # we use a lock here because if we race with commit, we # can end up with extra data in the cloned revlogs that's # not pointed to by changesets, thus causing verify to # fail srclock = srcrepo.lock(wait=False) except error.LockError: copy = False if copy: srcrepo.hook('preoutgoing', throw=True, source='clone') hgdir = os.path.realpath(os.path.join(dest, ".hg")) if not os.path.exists(dest): os.mkdir(dest) else: # only clean up directories we create ourselves cleandir = hgdir try: destpath = hgdir util.makedir(destpath, notindexed=True) except OSError as inst: if inst.errno == errno.EEXIST: cleandir = None raise error.Abort(_("destination '%s' already exists") % dest) raise destlock = copystore(ui, srcrepo, destpath) # copy bookmarks over srcbookmarks = srcrepo.join('bookmarks') dstbookmarks = os.path.join(destpath, 'bookmarks') if os.path.exists(srcbookmarks): util.copyfile(srcbookmarks, dstbookmarks) # Recomputing branch cache might be slow on big repos, # so just copy it def copybranchcache(fname): srcbranchcache = srcrepo.join('cache/%s' % fname) dstbranchcache = os.path.join(dstcachedir, fname) if os.path.exists(srcbranchcache): if not os.path.exists(dstcachedir): os.mkdir(dstcachedir) util.copyfile(srcbranchcache, dstbranchcache) dstcachedir = os.path.join(destpath, 'cache') # In local clones we're copying all nodes, not just served # ones. Therefore copy all branch caches over. copybranchcache('branch2') for cachename in repoview.filtertable: copybranchcache('branch2-%s' % cachename) # we need to re-init the repo after manually copying the data # into it destpeer = peer(srcrepo, peeropts, dest) srcrepo.hook('outgoing', source='clone', node=node.hex(node.nullid)) else: try: destpeer = peer(srcrepo or ui, peeropts, dest, create=True) # only pass ui when no srcrepo except OSError as inst: if inst.errno == errno.EEXIST: cleandir = None raise error.Abort(_("destination '%s' already exists") % dest) raise revs = None if rev: if not srcpeer.capable('lookup'): raise error.Abort(_("src repository does not support " "revision lookup and so doesn't " "support clone by revision")) revs = [srcpeer.lookup(r) for r in rev] checkout = revs[0] local = destpeer.local() if local: if not stream: if pull: stream = False else: stream = None # internal config: ui.quietbookmarkmove quiet = local.ui.backupconfig('ui', 'quietbookmarkmove') try: local.ui.setconfig( 'ui', 'quietbookmarkmove', True, 'clone') exchange.pull(local, srcpeer, revs, streamclonerequested=stream) finally: local.ui.restoreconfig(quiet) elif srcrepo: exchange.push(srcrepo, destpeer, revs=revs, bookmarks=srcrepo._bookmarks.keys()) else: raise error.Abort(_("clone from remote to remote not supported") ) cleandir = None destrepo = destpeer.local() if destrepo: template = uimod.samplehgrcs['cloned'] fp = destrepo.vfs("hgrc", "w", text=True) u = util.url(abspath) u.passwd = None defaulturl = str(u) fp.write(template % defaulturl) fp.close() destrepo.ui.setconfig('paths', 'default', defaulturl, 'clone') if update: if update is not True: checkout = srcpeer.lookup(update) uprev = None status = None if checkout is not None: try: uprev = destrepo.lookup(checkout) except error.RepoLookupError: if update is not True: try: uprev = destrepo.lookup(update) except error.RepoLookupError: pass if uprev is None: try: uprev = destrepo._bookmarks['@'] update = '@' bn = destrepo[uprev].branch() if bn == 'default': status = _("updating to bookmark @\n") else: status = (_("updating to bookmark @ on branch %s\n") % bn) except KeyError: try: uprev = destrepo.branchtip('default') except error.RepoLookupError: uprev = destrepo.lookup('tip') if not status: bn = destrepo[uprev].branch() status = _("updating to branch %s\n") % bn destrepo.ui.status(status) _update(destrepo, uprev) if update in destrepo._bookmarks: bookmarks.activate(destrepo, update) finally: release(srclock, destlock) if cleandir is not None: shutil.rmtree(cleandir, True) if srcpeer is not None: srcpeer.close() return srcpeer, destpeer def _showstats(repo, stats, quietempty=False): if quietempty and not any(stats): return repo.ui.status(_("%d files updated, %d files merged, " "%d files removed, %d files unresolved\n") % stats) def updaterepo(repo, node, overwrite): """Update the working directory to node. When overwrite is set, changes are clobbered, merged else returns stats (see pydoc mercurial.merge.applyupdates)""" return mergemod.update(repo, node, False, overwrite, labels=['working copy', 'destination']) def update(repo, node, quietempty=False): """update the working directory to node, merging linear changes""" stats = updaterepo(repo, node, False) _showstats(repo, stats, quietempty) if stats[3]: repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n")) return stats[3] > 0 # naming conflict in clone() _update = update def clean(repo, node, show_stats=True, quietempty=False): """forcibly switch the working directory to node, clobbering changes""" stats = updaterepo(repo, node, True) util.unlinkpath(repo.join('graftstate'), ignoremissing=True) if show_stats: _showstats(repo, stats, quietempty) return stats[3] > 0 def merge(repo, node, force=None, remind=True): """Branch merge with node, resolving changes. Return true if any unresolved conflicts.""" stats = mergemod.update(repo, node, True, force) _showstats(repo, stats) if stats[3]: repo.ui.status(_("use 'hg resolve' to retry unresolved file merges " "or 'hg update -C .' to abandon\n")) elif remind: repo.ui.status(_("(branch merge, don't forget to commit)\n")) return stats[3] > 0 def _incoming(displaychlist, subreporecurse, ui, repo, source, opts, buffered=False): """ Helper for incoming / gincoming. displaychlist gets called with (remoterepo, incomingchangesetlist, displayer) parameters, and is supposed to contain only code that can't be unified. """ source, branches = parseurl(ui.expandpath(source), opts.get('branch')) other = peer(repo, opts, source) ui.status(_('comparing with %s\n') % util.hidepassword(source)) revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev')) if revs: revs = [other.lookup(rev) for rev in revs] other, chlist, cleanupfn = bundlerepo.getremotechanges(ui, repo, other, revs, opts["bundle"], opts["force"]) try: if not chlist: ui.status(_("no changes found\n")) return subreporecurse() displayer = cmdutil.show_changeset(ui, other, opts, buffered) displaychlist(other, chlist, displayer) displayer.close() finally: cleanupfn() subreporecurse() return 0 # exit code is zero since we found incoming changes def incoming(ui, repo, source, opts): def subreporecurse(): ret = 1 if opts.get('subrepos'): ctx = repo[None] for subpath in sorted(ctx.substate): sub = ctx.sub(subpath) ret = min(ret, sub.incoming(ui, source, opts)) return ret def display(other, chlist, displayer): limit = cmdutil.loglimit(opts) if opts.get('newest_first'): chlist.reverse() count = 0 for n in chlist: if limit is not None and count >= limit: break parents = [p for p in other.changelog.parents(n) if p != nullid] if opts.get('no_merges') and len(parents) == 2: continue count += 1 displayer.show(other[n]) return _incoming(display, subreporecurse, ui, repo, source, opts) def _outgoing(ui, repo, dest, opts): dest = ui.expandpath(dest or 'default-push', dest or 'default') dest, branches = parseurl(dest, opts.get('branch')) ui.status(_('comparing with %s\n') % util.hidepassword(dest)) revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev')) if revs: revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)] other = peer(repo, opts, dest) outgoing = discovery.findcommonoutgoing(repo.unfiltered(), other, revs, force=opts.get('force')) o = outgoing.missing if not o: scmutil.nochangesfound(repo.ui, repo, outgoing.excluded) return o, other def outgoing(ui, repo, dest, opts): def recurse(): ret = 1 if opts.get('subrepos'): ctx = repo[None] for subpath in sorted(ctx.substate): sub = ctx.sub(subpath) ret = min(ret, sub.outgoing(ui, dest, opts)) return ret limit = cmdutil.loglimit(opts) o, other = _outgoing(ui, repo, dest, opts) if not o: cmdutil.outgoinghooks(ui, repo, other, opts, o) return recurse() if opts.get('newest_first'): o.reverse() displayer = cmdutil.show_changeset(ui, repo, opts) count = 0 for n in o: if limit is not None and count >= limit: break parents = [p for p in repo.changelog.parents(n) if p != nullid] if opts.get('no_merges') and len(parents) == 2: continue count += 1 displayer.show(repo[n]) displayer.close() cmdutil.outgoinghooks(ui, repo, other, opts, o) recurse() return 0 # exit code is zero since we found outgoing changes def verify(repo): """verify the consistency of a repository""" ret = verifymod.verify(repo) # Broken subrepo references in hidden csets don't seem worth worrying about, # since they can't be pushed/pulled, and --hidden can be used if they are a # concern. # pathto() is needed for -R case revs = repo.revs("filelog(%s)", util.pathto(repo.root, repo.getcwd(), '.hgsubstate')) if revs: repo.ui.status(_('checking subrepo links\n')) for rev in revs: ctx = repo[rev] try: for subpath in ctx.substate: ret = ctx.sub(subpath).verify() or ret except Exception: repo.ui.warn(_('.hgsubstate is corrupt in revision %s\n') % node.short(ctx.node())) return ret def remoteui(src, opts): 'build a remote ui from ui or repo and opts' if util.safehasattr(src, 'baseui'): # looks like a repository dst = src.baseui.copy() # drop repo-specific config src = src.ui # copy target options from repo else: # assume it's a global ui object dst = src.copy() # keep all global options # copy ssh-specific options for o in 'ssh', 'remotecmd': v = opts.get(o) or src.config('ui', o) if v: dst.setconfig("ui", o, v, 'copied') # copy bundle-specific options r = src.config('bundle', 'mainreporoot') if r: dst.setconfig('bundle', 'mainreporoot', r, 'copied') # copy selected local settings to the remote ui for sect in ('auth', 'hostfingerprints', 'http_proxy'): for key, val in src.configitems(sect): dst.setconfig(sect, key, val, 'copied') v = src.config('web', 'cacerts') if v == '!': dst.setconfig('web', 'cacerts', v, 'copied') elif v: dst.setconfig('web', 'cacerts', util.expandpath(v), 'copied') return dst # Files of interest # Used to check if the repository has changed looking at mtime and size of # these files. foi = [('spath', '00changelog.i'), ('spath', 'phaseroots'), # ! phase can change content at the same size ('spath', 'obsstore'), ('path', 'bookmarks'), # ! bookmark can change content at the same size ] class cachedlocalrepo(object): """Holds a localrepository that can be cached and reused.""" def __init__(self, repo): """Create a new cached repo from an existing repo. We assume the passed in repo was recently created. If the repo has changed between when it was created and when it was turned into a cache, it may not refresh properly. """ assert isinstance(repo, localrepo.localrepository) self._repo = repo self._state, self.mtime = self._repostate() def fetch(self): """Refresh (if necessary) and return a repository. If the cached instance is out of date, it will be recreated automatically and returned. Returns a tuple of the repo and a boolean indicating whether a new repo instance was created. """ # We compare the mtimes and sizes of some well-known files to # determine if the repo changed. This is not precise, as mtimes # are susceptible to clock skew and imprecise filesystems and # file content can change while maintaining the same size. state, mtime = self._repostate() if state == self._state: return self._repo, False self._repo = repository(self._repo.baseui, self._repo.url()) self._state = state self.mtime = mtime return self._repo, True def _repostate(self): state = [] maxmtime = -1 for attr, fname in foi: prefix = getattr(self._repo, attr) p = os.path.join(prefix, fname) try: st = os.stat(p) except OSError: st = os.stat(prefix) state.append((st.st_mtime, st.st_size)) maxmtime = max(maxmtime, st.st_mtime) return tuple(state), maxmtime def copy(self): """Obtain a copy of this class instance. A new localrepository instance is obtained. The new instance should be completely independent of the original. """ repo = repository(self._repo.baseui, self._repo.origroot) c = cachedlocalrepo(repo) c._state = self._state c.mtime = self.mtime return c mercurial-3.7.3/mercurial/strutil.py0000644000175000017500000000167112676531525017177 0ustar mpmmpm00000000000000# strutil.py - string utilities for Mercurial # # Copyright 2006 Vadim Gelfer # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import def findall(haystack, needle, start=0, end=None): if end is None: end = len(haystack) if end < 0: end += len(haystack) if start < 0: start += len(haystack) while start < end: c = haystack.find(needle, start, end) if c == -1: break yield c start = c + 1 def rfindall(haystack, needle, start=0, end=None): if end is None: end = len(haystack) if end < 0: end += len(haystack) if start < 0: start += len(haystack) while end >= 0: c = haystack.rfind(needle, start, end) if c == -1: break yield c end = c - 1 mercurial-3.7.3/mercurial/encoding.py0000644000175000017500000004233112676531525017255 0ustar mpmmpm00000000000000# encoding.py - character transcoding support for Mercurial # # Copyright 2005-2009 Matt Mackall and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import locale import os import unicodedata from . import ( error, ) # These unicode characters are ignored by HFS+ (Apple Technote 1150, # "Unicode Subtleties"), so we need to ignore them in some places for # sanity. _ignore = [unichr(int(x, 16)).encode("utf-8") for x in "200c 200d 200e 200f 202a 202b 202c 202d 202e " "206a 206b 206c 206d 206e 206f feff".split()] # verify the next function will work assert set([i[0] for i in _ignore]) == set(["\xe2", "\xef"]) def hfsignoreclean(s): """Remove codepoints ignored by HFS+ from s. >>> hfsignoreclean(u'.h\u200cg'.encode('utf-8')) '.hg' >>> hfsignoreclean(u'.h\ufeffg'.encode('utf-8')) '.hg' """ if "\xe2" in s or "\xef" in s: for c in _ignore: s = s.replace(c, '') return s def _getpreferredencoding(): ''' On darwin, getpreferredencoding ignores the locale environment and always returns mac-roman. http://bugs.python.org/issue6202 fixes this for Python 2.7 and up. This is the same corrected code for earlier Python versions. However, we can't use a version check for this method, as some distributions patch Python to fix this. Instead, we use it as a 'fixer' for the mac-roman encoding, as it is unlikely that this encoding is the actually expected. ''' try: locale.CODESET except AttributeError: # Fall back to parsing environment variables :-( return locale.getdefaultlocale()[1] oldloc = locale.setlocale(locale.LC_CTYPE) locale.setlocale(locale.LC_CTYPE, "") result = locale.nl_langinfo(locale.CODESET) locale.setlocale(locale.LC_CTYPE, oldloc) return result _encodingfixers = { '646': lambda: 'ascii', 'ANSI_X3.4-1968': lambda: 'ascii', 'mac-roman': _getpreferredencoding } try: encoding = os.environ.get("HGENCODING") if not encoding: encoding = locale.getpreferredencoding() or 'ascii' encoding = _encodingfixers.get(encoding, lambda: encoding)() except locale.Error: encoding = 'ascii' encodingmode = os.environ.get("HGENCODINGMODE", "strict") fallbackencoding = 'ISO-8859-1' class localstr(str): '''This class allows strings that are unmodified to be round-tripped to the local encoding and back''' def __new__(cls, u, l): s = str.__new__(cls, l) s._utf8 = u return s def __hash__(self): return hash(self._utf8) # avoid collisions in local string space def tolocal(s): """ Convert a string from internal UTF-8 to local encoding All internal strings should be UTF-8 but some repos before the implementation of locale support may contain latin1 or possibly other character sets. We attempt to decode everything strictly using UTF-8, then Latin-1, and failing that, we use UTF-8 and replace unknown characters. The localstr class is used to cache the known UTF-8 encoding of strings next to their local representation to allow lossless round-trip conversion back to UTF-8. >>> u = 'foo: \\xc3\\xa4' # utf-8 >>> l = tolocal(u) >>> l 'foo: ?' >>> fromlocal(l) 'foo: \\xc3\\xa4' >>> u2 = 'foo: \\xc3\\xa1' >>> d = { l: 1, tolocal(u2): 2 } >>> len(d) # no collision 2 >>> 'foo: ?' in d False >>> l1 = 'foo: \\xe4' # historical latin1 fallback >>> l = tolocal(l1) >>> l 'foo: ?' >>> fromlocal(l) # magically in utf-8 'foo: \\xc3\\xa4' """ try: try: # make sure string is actually stored in UTF-8 u = s.decode('UTF-8') if encoding == 'UTF-8': # fast path return s r = u.encode(encoding, "replace") if u == r.decode(encoding): # r is a safe, non-lossy encoding of s return r return localstr(s, r) except UnicodeDecodeError: # we should only get here if we're looking at an ancient changeset try: u = s.decode(fallbackencoding) r = u.encode(encoding, "replace") if u == r.decode(encoding): # r is a safe, non-lossy encoding of s return r return localstr(u.encode('UTF-8'), r) except UnicodeDecodeError: u = s.decode("utf-8", "replace") # last ditch return u.encode(encoding, "replace") # can't round-trip except LookupError as k: raise error.Abort(k, hint="please check your locale settings") def fromlocal(s): """ Convert a string from the local character encoding to UTF-8 We attempt to decode strings using the encoding mode set by HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown characters will cause an error message. Other modes include 'replace', which replaces unknown characters with a special Unicode character, and 'ignore', which drops the character. """ # can we do a lossless round-trip? if isinstance(s, localstr): return s._utf8 try: return s.decode(encoding, encodingmode).encode("utf-8") except UnicodeDecodeError as inst: sub = s[max(0, inst.start - 10):inst.start + 10] raise error.Abort("decoding near '%s': %s!" % (sub, inst)) except LookupError as k: raise error.Abort(k, hint="please check your locale settings") # How to treat ambiguous-width characters. Set to 'wide' to treat as wide. wide = (os.environ.get("HGENCODINGAMBIGUOUS", "narrow") == "wide" and "WFA" or "WF") def colwidth(s): "Find the column width of a string for display in the local encoding" return ucolwidth(s.decode(encoding, 'replace')) def ucolwidth(d): "Find the column width of a Unicode string for display" eaw = getattr(unicodedata, 'east_asian_width', None) if eaw is not None: return sum([eaw(c) in wide and 2 or 1 for c in d]) return len(d) def getcols(s, start, c): '''Use colwidth to find a c-column substring of s starting at byte index start''' for x in xrange(start + c, len(s)): t = s[start:x] if colwidth(t) == c: return t def trim(s, width, ellipsis='', leftside=False): """Trim string 's' to at most 'width' columns (including 'ellipsis'). If 'leftside' is True, left side of string 's' is trimmed. 'ellipsis' is always placed at trimmed side. >>> ellipsis = '+++' >>> from . import encoding >>> encoding.encoding = 'utf-8' >>> t= '1234567890' >>> print trim(t, 12, ellipsis=ellipsis) 1234567890 >>> print trim(t, 10, ellipsis=ellipsis) 1234567890 >>> print trim(t, 8, ellipsis=ellipsis) 12345+++ >>> print trim(t, 8, ellipsis=ellipsis, leftside=True) +++67890 >>> print trim(t, 8) 12345678 >>> print trim(t, 8, leftside=True) 34567890 >>> print trim(t, 3, ellipsis=ellipsis) +++ >>> print trim(t, 1, ellipsis=ellipsis) + >>> u = u'\u3042\u3044\u3046\u3048\u304a' # 2 x 5 = 10 columns >>> t = u.encode(encoding.encoding) >>> print trim(t, 12, ellipsis=ellipsis) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a >>> print trim(t, 10, ellipsis=ellipsis) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a >>> print trim(t, 8, ellipsis=ellipsis) \xe3\x81\x82\xe3\x81\x84+++ >>> print trim(t, 8, ellipsis=ellipsis, leftside=True) +++\xe3\x81\x88\xe3\x81\x8a >>> print trim(t, 5) \xe3\x81\x82\xe3\x81\x84 >>> print trim(t, 5, leftside=True) \xe3\x81\x88\xe3\x81\x8a >>> print trim(t, 4, ellipsis=ellipsis) +++ >>> print trim(t, 4, ellipsis=ellipsis, leftside=True) +++ >>> t = '\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa' # invalid byte sequence >>> print trim(t, 12, ellipsis=ellipsis) \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa >>> print trim(t, 10, ellipsis=ellipsis) \x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa >>> print trim(t, 8, ellipsis=ellipsis) \x11\x22\x33\x44\x55+++ >>> print trim(t, 8, ellipsis=ellipsis, leftside=True) +++\x66\x77\x88\x99\xaa >>> print trim(t, 8) \x11\x22\x33\x44\x55\x66\x77\x88 >>> print trim(t, 8, leftside=True) \x33\x44\x55\x66\x77\x88\x99\xaa >>> print trim(t, 3, ellipsis=ellipsis) +++ >>> print trim(t, 1, ellipsis=ellipsis) + """ try: u = s.decode(encoding) except UnicodeDecodeError: if len(s) <= width: # trimming is not needed return s width -= len(ellipsis) if width <= 0: # no enough room even for ellipsis return ellipsis[:width + len(ellipsis)] if leftside: return ellipsis + s[-width:] return s[:width] + ellipsis if ucolwidth(u) <= width: # trimming is not needed return s width -= len(ellipsis) if width <= 0: # no enough room even for ellipsis return ellipsis[:width + len(ellipsis)] if leftside: uslice = lambda i: u[i:] concat = lambda s: ellipsis + s else: uslice = lambda i: u[:-i] concat = lambda s: s + ellipsis for i in xrange(1, len(u)): usub = uslice(i) if ucolwidth(usub) <= width: return concat(usub.encode(encoding)) return ellipsis # no enough room for multi-column characters def _asciilower(s): '''convert a string to lowercase if ASCII Raises UnicodeDecodeError if non-ASCII characters are found.''' s.decode('ascii') return s.lower() def asciilower(s): # delay importing avoids cyclic dependency around "parsers" in # pure Python build (util => i18n => encoding => parsers => util) from . import parsers impl = getattr(parsers, 'asciilower', _asciilower) global asciilower asciilower = impl return impl(s) def _asciiupper(s): '''convert a string to uppercase if ASCII Raises UnicodeDecodeError if non-ASCII characters are found.''' s.decode('ascii') return s.upper() def asciiupper(s): # delay importing avoids cyclic dependency around "parsers" in # pure Python build (util => i18n => encoding => parsers => util) from . import parsers impl = getattr(parsers, 'asciiupper', _asciiupper) global asciiupper asciiupper = impl return impl(s) def lower(s): "best-effort encoding-aware case-folding of local string s" try: return asciilower(s) except UnicodeDecodeError: pass try: if isinstance(s, localstr): u = s._utf8.decode("utf-8") else: u = s.decode(encoding, encodingmode) lu = u.lower() if u == lu: return s # preserve localstring return lu.encode(encoding) except UnicodeError: return s.lower() # we don't know how to fold this except in ASCII except LookupError as k: raise error.Abort(k, hint="please check your locale settings") def upper(s): "best-effort encoding-aware case-folding of local string s" try: return asciiupper(s) except UnicodeDecodeError: return upperfallback(s) def upperfallback(s): try: if isinstance(s, localstr): u = s._utf8.decode("utf-8") else: u = s.decode(encoding, encodingmode) uu = u.upper() if u == uu: return s # preserve localstring return uu.encode(encoding) except UnicodeError: return s.upper() # we don't know how to fold this except in ASCII except LookupError as k: raise error.Abort(k, hint="please check your locale settings") class normcasespecs(object): '''what a platform's normcase does to ASCII strings This is specified per platform, and should be consistent with what normcase on that platform actually does. lower: normcase lowercases ASCII strings upper: normcase uppercases ASCII strings other: the fallback function should always be called This should be kept in sync with normcase_spec in util.h.''' lower = -1 upper = 1 other = 0 _jsonmap = {} def jsonescape(s): '''returns a string suitable for JSON JSON is problematic for us because it doesn't support non-Unicode bytes. To deal with this, we take the following approach: - localstr objects are converted back to UTF-8 - valid UTF-8/ASCII strings are passed as-is - other strings are converted to UTF-8b surrogate encoding - apply JSON-specified string escaping (escapes are doubled in these tests) >>> jsonescape('this is a test') 'this is a test' >>> jsonescape('escape characters: \\0 \\x0b \\x7f') 'escape characters: \\\\u0000 \\\\u000b \\\\u007f' >>> jsonescape('escape characters: \\t \\n \\r \\" \\\\') 'escape characters: \\\\t \\\\n \\\\r \\\\" \\\\\\\\' >>> jsonescape('a weird byte: \\xdd') 'a weird byte: \\xed\\xb3\\x9d' >>> jsonescape('utf-8: caf\\xc3\\xa9') 'utf-8: caf\\xc3\\xa9' >>> jsonescape('') '' ''' if not _jsonmap: for x in xrange(32): _jsonmap[chr(x)] = "\\u%04x" % x for x in xrange(32, 256): c = chr(x) _jsonmap[c] = c _jsonmap['\x7f'] = '\\u007f' _jsonmap['\t'] = '\\t' _jsonmap['\n'] = '\\n' _jsonmap['\"'] = '\\"' _jsonmap['\\'] = '\\\\' _jsonmap['\b'] = '\\b' _jsonmap['\f'] = '\\f' _jsonmap['\r'] = '\\r' return ''.join(_jsonmap[c] for c in toutf8b(s)) _utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4] def getutf8char(s, pos): '''get the next full utf-8 character in the given string, starting at pos Raises a UnicodeError if the given location does not start a valid utf-8 character. ''' # find how many bytes to attempt decoding from first nibble l = _utf8len[ord(s[pos]) >> 4] if not l: # ascii return s[pos] c = s[pos:pos + l] # validate with attempted decode c.decode("utf-8") return c def toutf8b(s): '''convert a local, possibly-binary string into UTF-8b This is intended as a generic method to preserve data when working with schemes like JSON and XML that have no provision for arbitrary byte strings. As Mercurial often doesn't know what encoding data is in, we use so-called UTF-8b. If a string is already valid UTF-8 (or ASCII), it passes unmodified. Otherwise, unsupported bytes are mapped to UTF-16 surrogate range, uDC00-uDCFF. Principles of operation: - ASCII and UTF-8 data successfully round-trips and is understood by Unicode-oriented clients - filenames and file contents in arbitrary other encodings can have be round-tripped or recovered by clueful clients - local strings that have a cached known UTF-8 encoding (aka localstr) get sent as UTF-8 so Unicode-oriented clients get the Unicode data they want - because we must preserve UTF-8 bytestring in places such as filenames, metadata can't be roundtripped without help (Note: "UTF-8b" often refers to decoding a mix of valid UTF-8 and arbitrary bytes into an internal Unicode format that can be re-encoded back into the original. Here we are exposing the internal surrogate encoding as a UTF-8 string.) ''' if "\xed" not in s: if isinstance(s, localstr): return s._utf8 try: s.decode('utf-8') return s except UnicodeDecodeError: pass r = "" pos = 0 l = len(s) while pos < l: try: c = getutf8char(s, pos) if "\xed\xb0\x80" <= c <= "\xed\xb3\xbf": # have to re-escape existing U+DCxx characters c = unichr(0xdc00 + ord(s[pos])).encode('utf-8') pos += 1 else: pos += len(c) except UnicodeDecodeError: c = unichr(0xdc00 + ord(s[pos])).encode('utf-8') pos += 1 r += c return r def fromutf8b(s): '''Given a UTF-8b string, return a local, possibly-binary string. return the original binary string. This is a round-trip process for strings like filenames, but metadata that's was passed through tolocal will remain in UTF-8. >>> roundtrip = lambda x: fromutf8b(toutf8b(x)) == x >>> m = "\\xc3\\xa9\\x99abcd" >>> toutf8b(m) '\\xc3\\xa9\\xed\\xb2\\x99abcd' >>> roundtrip(m) True >>> roundtrip("\\xc2\\xc2\\x80") True >>> roundtrip("\\xef\\xbf\\xbd") True >>> roundtrip("\\xef\\xef\\xbf\\xbd") True >>> roundtrip("\\xf1\\x80\\x80\\x80\\x80") True ''' # fast path - look for uDxxx prefixes in s if "\xed" not in s: return s # We could do this with the unicode type but some Python builds # use UTF-16 internally (issue5031) which causes non-BMP code # points to be escaped. Instead, we use our handy getutf8char # helper again to walk the string without "decoding" it. r = "" pos = 0 l = len(s) while pos < l: c = getutf8char(s, pos) pos += len(c) # unescape U+DCxx characters if "\xed\xb0\x80" <= c <= "\xed\xb3\xbf": c = chr(ord(c.decode("utf-8")) & 0xff) r += c return r mercurial-3.7.3/mercurial/discovery.py0000644000175000017500000004031012676531524017470 0ustar mpmmpm00000000000000# discovery.py - protocol changeset discovery functions # # Copyright 2010 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import from .i18n import _ from .node import ( nullid, short, ) from . import ( bookmarks, branchmap, error, obsolete, phases, setdiscovery, treediscovery, util, ) def findcommonincoming(repo, remote, heads=None, force=False): """Return a tuple (common, anyincoming, heads) used to identify the common subset of nodes between repo and remote. "common" is a list of (at least) the heads of the common subset. "anyincoming" is testable as a boolean indicating if any nodes are missing locally. If remote does not support getbundle, this actually is a list of roots of the nodes that would be incoming, to be supplied to changegroupsubset. No code except for pull should be relying on this fact any longer. "heads" is either the supplied heads, or else the remote's heads. If you pass heads and they are all known locally, the response lists just these heads in "common" and in "heads". Please use findcommonoutgoing to compute the set of outgoing nodes to give extensions a good hook into outgoing. """ if not remote.capable('getbundle'): return treediscovery.findcommonincoming(repo, remote, heads, force) if heads: allknown = True knownnode = repo.changelog.hasnode # no nodemap until it is filtered for h in heads: if not knownnode(h): allknown = False break if allknown: return (heads, False, heads) res = setdiscovery.findcommonheads(repo.ui, repo, remote, abortwhenunrelated=not force) common, anyinc, srvheads = res return (list(common), anyinc, heads or list(srvheads)) class outgoing(object): '''Represents the set of nodes present in a local repo but not in a (possibly) remote one. Members: missing is a list of all nodes present in local but not in remote. common is a list of all nodes shared between the two repos. excluded is the list of missing changeset that shouldn't be sent remotely. missingheads is the list of heads of missing. commonheads is the list of heads of common. The sets are computed on demand from the heads, unless provided upfront by discovery.''' def __init__(self, revlog, commonheads, missingheads): self.commonheads = commonheads self.missingheads = missingheads self._revlog = revlog self._common = None self._missing = None self.excluded = [] def _computecommonmissing(self): sets = self._revlog.findcommonmissing(self.commonheads, self.missingheads) self._common, self._missing = sets @util.propertycache def common(self): if self._common is None: self._computecommonmissing() return self._common @util.propertycache def missing(self): if self._missing is None: self._computecommonmissing() return self._missing def findcommonoutgoing(repo, other, onlyheads=None, force=False, commoninc=None, portable=False): '''Return an outgoing instance to identify the nodes present in repo but not in other. If onlyheads is given, only nodes ancestral to nodes in onlyheads (inclusive) are included. If you already know the local repo's heads, passing them in onlyheads is faster than letting them be recomputed here. If commoninc is given, it must be the result of a prior call to findcommonincoming(repo, other, force) to avoid recomputing it here. If portable is given, compute more conservative common and missingheads, to make bundles created from the instance more portable.''' # declare an empty outgoing object to be filled later og = outgoing(repo.changelog, None, None) # get common set if not provided if commoninc is None: commoninc = findcommonincoming(repo, other, force=force) og.commonheads, _any, _hds = commoninc # compute outgoing mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore) if not mayexclude: og.missingheads = onlyheads or repo.heads() elif onlyheads is None: # use visible heads as it should be cached og.missingheads = repo.filtered("served").heads() og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')] else: # compute common, missing and exclude secret stuff sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads) og._common, allmissing = sets og._missing = missing = [] og.excluded = excluded = [] for node in allmissing: ctx = repo[node] if ctx.phase() >= phases.secret or ctx.extinct(): excluded.append(node) else: missing.append(node) if len(missing) == len(allmissing): missingheads = onlyheads else: # update missing heads missingheads = phases.newheads(repo, onlyheads, excluded) og.missingheads = missingheads if portable: # recompute common and missingheads as if -r had been given for # each head of missing, and --base for each head of the proper # ancestors of missing og._computecommonmissing() cl = repo.changelog missingrevs = set(cl.rev(n) for n in og._missing) og._common = set(cl.ancestors(missingrevs)) - missingrevs commonheads = set(og.commonheads) og.missingheads = [h for h in og.missingheads if h not in commonheads] return og def _headssummary(repo, remote, outgoing): """compute a summary of branch and heads status before and after push return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping - branch: the branch name - remoteheads: the list of remote heads known locally None if the branch is new - newheads: the new remote heads (known locally) with outgoing pushed - unsyncedheads: the list of remote heads unknown locally. """ cl = repo.changelog headssum = {} # A. Create set of branches involved in the push. branches = set(repo[n].branch() for n in outgoing.missing) remotemap = remote.branchmap() newbranches = branches - set(remotemap) branches.difference_update(newbranches) # A. register remote heads remotebranches = set() for branch, heads in remote.branchmap().iteritems(): remotebranches.add(branch) known = [] unsynced = [] knownnode = cl.hasnode # do not use nodemap until it is filtered for h in heads: if knownnode(h): known.append(h) else: unsynced.append(h) headssum[branch] = (known, list(known), unsynced) # B. add new branch data missingctx = list(repo[n] for n in outgoing.missing) touchedbranches = set() for ctx in missingctx: branch = ctx.branch() touchedbranches.add(branch) if branch not in headssum: headssum[branch] = (None, [], []) # C drop data about untouched branches: for branch in remotebranches - touchedbranches: del headssum[branch] # D. Update newmap with outgoing changes. # This will possibly add new heads and remove existing ones. newmap = branchmap.branchcache((branch, heads[1]) for branch, heads in headssum.iteritems() if heads[0] is not None) newmap.update(repo, (ctx.rev() for ctx in missingctx)) for branch, newheads in newmap.iteritems(): headssum[branch][1][:] = newheads return headssum def _oldheadssummary(repo, remoteheads, outgoing, inc=False): """Compute branchmapsummary for repo without branchmap support""" # 1-4b. old servers: Check for new topological heads. # Construct {old,new}map with branch = None (topological branch). # (code based on update) knownnode = repo.changelog.hasnode # no nodemap until it is filtered oldheads = set(h for h in remoteheads if knownnode(h)) # all nodes in outgoing.missing are children of either: # - an element of oldheads # - another element of outgoing.missing # - nullrev # This explains why the new head are very simple to compute. r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing) newheads = list(c.node() for c in r) # set some unsynced head to issue the "unsynced changes" warning if inc: unsynced = set([None]) else: unsynced = set() return {None: (oldheads, newheads, unsynced)} def _nowarnheads(pushop): # Compute newly pushed bookmarks. We don't warn about bookmarked heads. # internal config: bookmarks.pushing newbookmarks = pushop.ui.configlist('bookmarks', 'pushing') repo = pushop.repo.unfiltered() remote = pushop.remote localbookmarks = repo._bookmarks remotebookmarks = remote.listkeys('bookmarks') bookmarkedheads = set() for bm in localbookmarks: rnode = remotebookmarks.get(bm) if rnode and rnode in repo: lctx, rctx = repo[bm], repo[rnode] if bookmarks.validdest(repo, rctx, lctx): bookmarkedheads.add(lctx.node()) else: if bm in newbookmarks and bm not in remotebookmarks: bookmarkedheads.add(repo[bm].node()) return bookmarkedheads def checkheads(pushop): """Check that a push won't add any outgoing head raise Abort error and display ui message as needed. """ repo = pushop.repo.unfiltered() remote = pushop.remote outgoing = pushop.outgoing remoteheads = pushop.remoteheads newbranch = pushop.newbranch inc = bool(pushop.incoming) # Check for each named branch if we're creating new remote heads. # To be a remote head after push, node must be either: # - unknown locally # - a local outgoing head descended from update # - a remote head that's known locally and not # ancestral to an outgoing head if remoteheads == [nullid]: # remote is empty, nothing to check. return if remote.capable('branchmap'): headssum = _headssummary(repo, remote, outgoing) else: headssum = _oldheadssummary(repo, remoteheads, outgoing, inc) newbranches = [branch for branch, heads in headssum.iteritems() if heads[0] is None] # 1. Check for new branches on the remote. if newbranches and not newbranch: # new branch requires --new-branch branchnames = ', '.join(sorted(newbranches)) raise error.Abort(_("push creates new remote branches: %s!") % branchnames, hint=_("use 'hg push --new-branch' to create" " new remote branches")) # 2. Find heads that we need not warn about nowarnheads = _nowarnheads(pushop) # 3. Check for new heads. # If there are more heads after the push than before, a suitable # error message, depending on unsynced status, is displayed. errormsg = None # If there is no obsstore, allfuturecommon won't be used, so no # need to compute it. if repo.obsstore: allmissing = set(outgoing.missing) cctx = repo.set('%ld', outgoing.common) allfuturecommon = set(c.node() for c in cctx) allfuturecommon.update(allmissing) for branch, heads in sorted(headssum.iteritems()): remoteheads, newheads, unsyncedheads = heads candidate_newhs = set(newheads) # add unsynced data if remoteheads is None: oldhs = set() else: oldhs = set(remoteheads) oldhs.update(unsyncedheads) candidate_newhs.update(unsyncedheads) dhs = None # delta heads, the new heads on branch discardedheads = set() if not repo.obsstore: newhs = candidate_newhs else: # remove future heads which are actually obsoleted by another # pushed element: # # XXX as above, There are several cases this code does not handle # XXX properly # # (1) if is public, it won't be affected by obsolete marker # and a new is created # # (2) if the new heads have ancestors which are not obsolete and # not ancestors of any other heads we will have a new head too. # # These two cases will be easy to handle for known changeset but # much more tricky for unsynced changes. # # In addition, this code is confused by prune as it only looks for # successors of the heads (none if pruned) leading to issue4354 newhs = set() for nh in candidate_newhs: if nh in repo and repo[nh].phase() <= phases.public: newhs.add(nh) else: for suc in obsolete.allsuccessors(repo.obsstore, [nh]): if suc != nh and suc in allfuturecommon: discardedheads.add(nh) break else: newhs.add(nh) unsynced = sorted(h for h in unsyncedheads if h not in discardedheads) if unsynced: if None in unsynced: # old remote, no heads data heads = None elif len(unsynced) <= 4 or repo.ui.verbose: heads = ' '.join(short(h) for h in unsynced) else: heads = (' '.join(short(h) for h in unsynced[:4]) + ' ' + _("and %s others") % (len(unsynced) - 4)) if heads is None: repo.ui.status(_("remote has heads that are " "not known locally\n")) elif branch is None: repo.ui.status(_("remote has heads that are " "not known locally: %s\n") % heads) else: repo.ui.status(_("remote has heads on branch '%s' that are " "not known locally: %s\n") % (branch, heads)) if remoteheads is None: if len(newhs) > 1: dhs = list(newhs) if errormsg is None: errormsg = (_("push creates new branch '%s' " "with multiple heads") % (branch)) hint = _("merge or" " see \"hg help push\" for details about" " pushing new heads") elif len(newhs) > len(oldhs): # remove bookmarked or existing remote heads from the new heads list dhs = sorted(newhs - nowarnheads - oldhs) if dhs: if errormsg is None: if branch not in ('default', None): errormsg = _("push creates new remote head %s " "on branch '%s'!") % (short(dhs[0]), branch) elif repo[dhs[0]].bookmarks(): errormsg = _("push creates new remote head %s " "with bookmark '%s'!") % ( short(dhs[0]), repo[dhs[0]].bookmarks()[0]) else: errormsg = _("push creates new remote head %s!" ) % short(dhs[0]) if unsyncedheads: hint = _("pull and merge or" " see \"hg help push\" for details about" " pushing new heads") else: hint = _("merge or" " see \"hg help push\" for details about" " pushing new heads") if branch is None: repo.ui.note(_("new remote heads:\n")) else: repo.ui.note(_("new remote heads on branch '%s':\n") % branch) for h in dhs: repo.ui.note((" %s\n") % short(h)) if errormsg: raise error.Abort(errormsg, hint=hint) mercurial-3.7.3/mercurial/mdiff.py0000644000175000017500000002752412676531525016563 0ustar mpmmpm00000000000000# mdiff.py - diff and patch routines for mercurial # # Copyright 2005, 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import re import struct import zlib from .i18n import _ from . import ( base85, bdiff, error, mpatch, util, ) def splitnewlines(text): '''like str.splitlines, but only split on newlines.''' lines = [l + '\n' for l in text.split('\n')] if lines: if lines[-1] == '\n': lines.pop() else: lines[-1] = lines[-1][:-1] return lines class diffopts(object): '''context is the number of context lines text treats all files as text showfunc enables diff -p output git enables the git extended patch format nodates removes dates from diff headers nobinary ignores binary files noprefix disables the 'a/' and 'b/' prefixes (ignored in plain mode) ignorews ignores all whitespace changes in the diff ignorewsamount ignores changes in the amount of whitespace ignoreblanklines ignores changes whose lines are all blank upgrade generates git diffs to avoid data loss ''' defaults = { 'context': 3, 'text': False, 'showfunc': False, 'git': False, 'nodates': False, 'nobinary': False, 'noprefix': False, 'ignorews': False, 'ignorewsamount': False, 'ignoreblanklines': False, 'upgrade': False, } __slots__ = defaults.keys() def __init__(self, **opts): for k in self.__slots__: v = opts.get(k) if v is None: v = self.defaults[k] setattr(self, k, v) try: self.context = int(self.context) except ValueError: raise error.Abort(_('diff context lines count must be ' 'an integer, not %r') % self.context) def copy(self, **kwargs): opts = dict((k, getattr(self, k)) for k in self.defaults) opts.update(kwargs) return diffopts(**opts) defaultopts = diffopts() def wsclean(opts, text, blank=True): if opts.ignorews: text = bdiff.fixws(text, 1) elif opts.ignorewsamount: text = bdiff.fixws(text, 0) if blank and opts.ignoreblanklines: text = re.sub('\n+', '\n', text).strip('\n') return text def splitblock(base1, lines1, base2, lines2, opts): # The input lines matches except for interwoven blank lines. We # transform it into a sequence of matching blocks and blank blocks. lines1 = [(wsclean(opts, l) and 1 or 0) for l in lines1] lines2 = [(wsclean(opts, l) and 1 or 0) for l in lines2] s1, e1 = 0, len(lines1) s2, e2 = 0, len(lines2) while s1 < e1 or s2 < e2: i1, i2, btype = s1, s2, '=' if (i1 >= e1 or lines1[i1] == 0 or i2 >= e2 or lines2[i2] == 0): # Consume the block of blank lines btype = '~' while i1 < e1 and lines1[i1] == 0: i1 += 1 while i2 < e2 and lines2[i2] == 0: i2 += 1 else: # Consume the matching lines while i1 < e1 and lines1[i1] == 1 and lines2[i2] == 1: i1 += 1 i2 += 1 yield [base1 + s1, base1 + i1, base2 + s2, base2 + i2], btype s1 = i1 s2 = i2 def allblocks(text1, text2, opts=None, lines1=None, lines2=None, refine=False): """Return (block, type) tuples, where block is an mdiff.blocks line entry. type is '=' for blocks matching exactly one another (bdiff blocks), '!' for non-matching blocks and '~' for blocks matching only after having filtered blank lines. If refine is True, then '~' blocks are refined and are only made of blank lines. line1 and line2 are text1 and text2 split with splitnewlines() if they are already available. """ if opts is None: opts = defaultopts if opts.ignorews or opts.ignorewsamount: text1 = wsclean(opts, text1, False) text2 = wsclean(opts, text2, False) diff = bdiff.blocks(text1, text2) for i, s1 in enumerate(diff): # The first match is special. # we've either found a match starting at line 0 or a match later # in the file. If it starts later, old and new below will both be # empty and we'll continue to the next match. if i > 0: s = diff[i - 1] else: s = [0, 0, 0, 0] s = [s[1], s1[0], s[3], s1[2]] # bdiff sometimes gives huge matches past eof, this check eats them, # and deals with the special first match case described above if s[0] != s[1] or s[2] != s[3]: type = '!' if opts.ignoreblanklines: if lines1 is None: lines1 = splitnewlines(text1) if lines2 is None: lines2 = splitnewlines(text2) old = wsclean(opts, "".join(lines1[s[0]:s[1]])) new = wsclean(opts, "".join(lines2[s[2]:s[3]])) if old == new: type = '~' yield s, type yield s1, '=' def unidiff(a, ad, b, bd, fn1, fn2, opts=defaultopts): def datetag(date, fn=None): if not opts.git and not opts.nodates: return '\t%s\n' % date if fn and ' ' in fn: return '\t\n' return '\n' if not a and not b: return "" if opts.noprefix: aprefix = bprefix = '' else: aprefix = 'a/' bprefix = 'b/' epoch = util.datestr((0, 0)) fn1 = util.pconvert(fn1) fn2 = util.pconvert(fn2) if not opts.text and (util.binary(a) or util.binary(b)): if a and b and len(a) == len(b) and a == b: return "" l = ['Binary file %s has changed\n' % fn1] elif not a: b = splitnewlines(b) if a is None: l1 = '--- /dev/null%s' % datetag(epoch) else: l1 = "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1)) l2 = "+++ %s%s" % (bprefix + fn2, datetag(bd, fn2)) l3 = "@@ -0,0 +1,%d @@\n" % len(b) l = [l1, l2, l3] + ["+" + e for e in b] elif not b: a = splitnewlines(a) l1 = "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1)) if b is None: l2 = '+++ /dev/null%s' % datetag(epoch) else: l2 = "+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2)) l3 = "@@ -1,%d +0,0 @@\n" % len(a) l = [l1, l2, l3] + ["-" + e for e in a] else: al = splitnewlines(a) bl = splitnewlines(b) l = list(_unidiff(a, b, al, bl, opts=opts)) if not l: return "" l.insert(0, "--- %s%s%s" % (aprefix, fn1, datetag(ad, fn1))) l.insert(1, "+++ %s%s%s" % (bprefix, fn2, datetag(bd, fn2))) for ln in xrange(len(l)): if l[ln][-1] != '\n': l[ln] += "\n\ No newline at end of file\n" return "".join(l) # creates a headerless unified diff # t1 and t2 are the text to be diffed # l1 and l2 are the text broken up into lines def _unidiff(t1, t2, l1, l2, opts=defaultopts): def contextend(l, len): ret = l + opts.context if ret > len: ret = len return ret def contextstart(l): ret = l - opts.context if ret < 0: return 0 return ret lastfunc = [0, ''] def yieldhunk(hunk): (astart, a2, bstart, b2, delta) = hunk aend = contextend(a2, len(l1)) alen = aend - astart blen = b2 - bstart + aend - a2 func = "" if opts.showfunc: lastpos, func = lastfunc # walk backwards from the start of the context up to the start of # the previous hunk context until we find a line starting with an # alphanumeric char. for i in xrange(astart - 1, lastpos - 1, -1): if l1[i][0].isalnum(): func = ' ' + l1[i].rstrip()[:40] lastfunc[1] = func break # by recording this hunk's starting point as the next place to # start looking for function lines, we avoid reading any line in # the file more than once. lastfunc[0] = astart # zero-length hunk ranges report their start line as one less if alen: astart += 1 if blen: bstart += 1 yield "@@ -%d,%d +%d,%d @@%s\n" % (astart, alen, bstart, blen, func) for x in delta: yield x for x in xrange(a2, aend): yield ' ' + l1[x] # bdiff.blocks gives us the matching sequences in the files. The loop # below finds the spaces between those matching sequences and translates # them into diff output. # hunk = None ignoredlines = 0 for s, stype in allblocks(t1, t2, opts, l1, l2): a1, a2, b1, b2 = s if stype != '!': if stype == '~': # The diff context lines are based on t1 content. When # blank lines are ignored, the new lines offsets must # be adjusted as if equivalent blocks ('~') had the # same sizes on both sides. ignoredlines += (b2 - b1) - (a2 - a1) continue delta = [] old = l1[a1:a2] new = l2[b1:b2] b1 -= ignoredlines b2 -= ignoredlines astart = contextstart(a1) bstart = contextstart(b1) prev = None if hunk: # join with the previous hunk if it falls inside the context if astart < hunk[1] + opts.context + 1: prev = hunk astart = hunk[1] bstart = hunk[3] else: for x in yieldhunk(hunk): yield x if prev: # we've joined the previous hunk, record the new ending points. hunk[1] = a2 hunk[3] = b2 delta = hunk[4] else: # create a new hunk hunk = [astart, a2, bstart, b2, delta] delta[len(delta):] = [' ' + x for x in l1[astart:a1]] delta[len(delta):] = ['-' + x for x in old] delta[len(delta):] = ['+' + x for x in new] if hunk: for x in yieldhunk(hunk): yield x def b85diff(to, tn): '''print base85-encoded binary diff''' def fmtline(line): l = len(line) if l <= 26: l = chr(ord('A') + l - 1) else: l = chr(l - 26 + ord('a') - 1) return '%c%s\n' % (l, base85.b85encode(line, True)) def chunk(text, csize=52): l = len(text) i = 0 while i < l: yield text[i:i + csize] i += csize if to is None: to = '' if tn is None: tn = '' if to == tn: return '' # TODO: deltas ret = [] ret.append('GIT binary patch\n') ret.append('literal %s\n' % len(tn)) for l in chunk(zlib.compress(tn)): ret.append(fmtline(l)) ret.append('\n') return ''.join(ret) def patchtext(bin): pos = 0 t = [] while pos < len(bin): p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12]) pos += 12 t.append(bin[pos:pos + l]) pos += l return "".join(t) def patch(a, bin): if len(a) == 0: # skip over trivial delta header return util.buffer(bin, 12) return mpatch.patches(a, [bin]) # similar to difflib.SequenceMatcher.get_matching_blocks def get_matching_blocks(a, b): return [(d[0], d[2], d[1] - d[0]) for d in bdiff.blocks(a, b)] def trivialdiffheader(length): return struct.pack(">lll", 0, 0, length) if length else '' def replacediffheader(oldlen, newlen): return struct.pack(">lll", 0, oldlen, newlen) patches = mpatch.patches patchedsize = mpatch.patchedsize textdiff = bdiff.bdiff mercurial-3.7.3/mercurial/branchmap.py0000644000175000017500000004476412676531525017436 0ustar mpmmpm00000000000000# branchmap.py - logic to computes, maintain and stores branchmap for local repo # # Copyright 2005-2007 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import array import struct import time from .node import ( bin, hex, nullid, nullrev, ) from . import ( encoding, error, scmutil, ) array = array.array calcsize = struct.calcsize pack = struct.pack unpack = struct.unpack def _filename(repo): """name of a branchcache file for a given repo or repoview""" filename = "cache/branch2" if repo.filtername: filename = '%s-%s' % (filename, repo.filtername) return filename def read(repo): try: f = repo.vfs(_filename(repo)) lines = f.read().split('\n') f.close() except (IOError, OSError): return None try: cachekey = lines.pop(0).split(" ", 2) last, lrev = cachekey[:2] last, lrev = bin(last), int(lrev) filteredhash = None if len(cachekey) > 2: filteredhash = bin(cachekey[2]) partial = branchcache(tipnode=last, tiprev=lrev, filteredhash=filteredhash) if not partial.validfor(repo): # invalidate the cache raise ValueError('tip differs') for l in lines: if not l: continue node, state, label = l.split(" ", 2) if state not in 'oc': raise ValueError('invalid branch state') label = encoding.tolocal(label.strip()) if not node in repo: raise ValueError('node %s does not exist' % node) node = bin(node) partial.setdefault(label, []).append(node) if state == 'c': partial._closednodes.add(node) except KeyboardInterrupt: raise except Exception as inst: if repo.ui.debugflag: msg = 'invalid branchheads cache' if repo.filtername is not None: msg += ' (%s)' % repo.filtername msg += ': %s\n' repo.ui.debug(msg % inst) partial = None return partial ### Nearest subset relation # Nearest subset of filter X is a filter Y so that: # * Y is included in X, # * X - Y is as small as possible. # This create and ordering used for branchmap purpose. # the ordering may be partial subsettable = {None: 'visible', 'visible': 'served', 'served': 'immutable', 'immutable': 'base'} def updatecache(repo): cl = repo.changelog filtername = repo.filtername partial = repo._branchcaches.get(filtername) revs = [] if partial is None or not partial.validfor(repo): partial = read(repo) if partial is None: subsetname = subsettable.get(filtername) if subsetname is None: partial = branchcache() else: subset = repo.filtered(subsetname) partial = subset.branchmap().copy() extrarevs = subset.changelog.filteredrevs - cl.filteredrevs revs.extend(r for r in extrarevs if r <= partial.tiprev) revs.extend(cl.revs(start=partial.tiprev + 1)) if revs: partial.update(repo, revs) partial.write(repo) assert partial.validfor(repo), filtername repo._branchcaches[repo.filtername] = partial def replacecache(repo, bm): """Replace the branchmap cache for a repo with a branch mapping. This is likely only called during clone with a branch map from a remote. """ rbheads = [] closed = [] for bheads in bm.itervalues(): rbheads.extend(bheads) for h in bheads: r = repo.changelog.rev(h) b, c = repo.changelog.branchinfo(r) if c: closed.append(h) if rbheads: rtiprev = max((int(repo.changelog.rev(node)) for node in rbheads)) cache = branchcache(bm, repo[rtiprev].node(), rtiprev, closednodes=closed) # Try to stick it as low as possible # filter above served are unlikely to be fetch from a clone for candidate in ('base', 'immutable', 'served'): rview = repo.filtered(candidate) if cache.validfor(rview): repo._branchcaches[candidate] = cache cache.write(rview) break class branchcache(dict): """A dict like object that hold branches heads cache. This cache is used to avoid costly computations to determine all the branch heads of a repo. The cache is serialized on disk in the following format: [optional filtered repo hex hash] ... The first line is used to check if the cache is still valid. If the branch cache is for a filtered repo view, an optional third hash is included that hashes the hashes of all filtered revisions. The open/closed state is represented by a single letter 'o' or 'c'. This field can be used to avoid changelog reads when determining if a branch head closes a branch or not. """ def __init__(self, entries=(), tipnode=nullid, tiprev=nullrev, filteredhash=None, closednodes=None): super(branchcache, self).__init__(entries) self.tipnode = tipnode self.tiprev = tiprev self.filteredhash = filteredhash # closednodes is a set of nodes that close their branch. If the branch # cache has been updated, it may contain nodes that are no longer # heads. if closednodes is None: self._closednodes = set() else: self._closednodes = closednodes def validfor(self, repo): """Is the cache content valid regarding a repo - False when cached tipnode is unknown or if we detect a strip. - True when cache is up to date or a subset of current repo.""" try: return ((self.tipnode == repo.changelog.node(self.tiprev)) and (self.filteredhash == \ scmutil.filteredhash(repo, self.tiprev))) except IndexError: return False def _branchtip(self, heads): '''Return tuple with last open head in heads and false, otherwise return last closed head and true.''' tip = heads[-1] closed = True for h in reversed(heads): if h not in self._closednodes: tip = h closed = False break return tip, closed def branchtip(self, branch): '''Return the tipmost open head on branch head, otherwise return the tipmost closed head on branch. Raise KeyError for unknown branch.''' return self._branchtip(self[branch])[0] def branchheads(self, branch, closed=False): heads = self[branch] if not closed: heads = [h for h in heads if h not in self._closednodes] return heads def iterbranches(self): for bn, heads in self.iteritems(): yield (bn, heads) + self._branchtip(heads) def copy(self): """return an deep copy of the branchcache object""" return branchcache(self, self.tipnode, self.tiprev, self.filteredhash, self._closednodes) def write(self, repo): try: f = repo.vfs(_filename(repo), "w", atomictemp=True) cachekey = [hex(self.tipnode), str(self.tiprev)] if self.filteredhash is not None: cachekey.append(hex(self.filteredhash)) f.write(" ".join(cachekey) + '\n') nodecount = 0 for label, nodes in sorted(self.iteritems()): for node in nodes: nodecount += 1 if node in self._closednodes: state = 'c' else: state = 'o' f.write("%s %s %s\n" % (hex(node), state, encoding.fromlocal(label))) f.close() repo.ui.log('branchcache', 'wrote %s branch cache with %d labels and %d nodes\n', repo.filtername, len(self), nodecount) except (IOError, OSError, error.Abort) as inst: repo.ui.debug("couldn't write branch cache: %s\n" % inst) # Abort may be raise by read only opener pass def update(self, repo, revgen): """Given a branchhead cache, self, that may have extra nodes or be missing heads, and a generator of nodes that are strictly a superset of heads missing, this function updates self to be correct. """ starttime = time.time() cl = repo.changelog # collect new branch entries newbranches = {} getbranchinfo = repo.revbranchcache().branchinfo for r in revgen: branch, closesbranch = getbranchinfo(r) newbranches.setdefault(branch, []).append(r) if closesbranch: self._closednodes.add(cl.node(r)) # fetch current topological heads to speed up filtering topoheads = set(cl.headrevs()) # if older branchheads are reachable from new ones, they aren't # really branchheads. Note checking parents is insufficient: # 1 (branch a) -> 2 (branch b) -> 3 (branch a) for branch, newheadrevs in newbranches.iteritems(): bheads = self.setdefault(branch, []) bheadset = set(cl.rev(node) for node in bheads) # This have been tested True on all internal usage of this function. # run it again in case of doubt # assert not (set(bheadrevs) & set(newheadrevs)) newheadrevs.sort() bheadset.update(newheadrevs) # This prunes out two kinds of heads - heads that are superseded by # a head in newheadrevs, and newheadrevs that are not heads because # an existing head is their descendant. uncertain = bheadset - topoheads if uncertain: floorrev = min(uncertain) ancestors = set(cl.ancestors(newheadrevs, floorrev)) bheadset -= ancestors bheadrevs = sorted(bheadset) self[branch] = [cl.node(rev) for rev in bheadrevs] tiprev = bheadrevs[-1] if tiprev > self.tiprev: self.tipnode = cl.node(tiprev) self.tiprev = tiprev if not self.validfor(repo): # cache key are not valid anymore self.tipnode = nullid self.tiprev = nullrev for heads in self.values(): tiprev = max(cl.rev(node) for node in heads) if tiprev > self.tiprev: self.tipnode = cl.node(tiprev) self.tiprev = tiprev self.filteredhash = scmutil.filteredhash(repo, self.tiprev) duration = time.time() - starttime repo.ui.log('branchcache', 'updated %s branch cache in %.4f seconds\n', repo.filtername, duration) # Revision branch info cache _rbcversion = '-v1' _rbcnames = 'cache/rbc-names' + _rbcversion _rbcrevs = 'cache/rbc-revs' + _rbcversion # [4 byte hash prefix][4 byte branch name number with sign bit indicating open] _rbcrecfmt = '>4sI' _rbcrecsize = calcsize(_rbcrecfmt) _rbcnodelen = 4 _rbcbranchidxmask = 0x7fffffff _rbccloseflag = 0x80000000 class revbranchcache(object): """Persistent cache, mapping from revision number to branch name and close. This is a low level cache, independent of filtering. Branch names are stored in rbc-names in internal encoding separated by 0. rbc-names is append-only, and each branch name is only stored once and will thus have a unique index. The branch info for each revision is stored in rbc-revs as constant size records. The whole file is read into memory, but it is only 'parsed' on demand. The file is usually append-only but will be truncated if repo modification is detected. The record for each revision contains the first 4 bytes of the corresponding node hash, and the record is only used if it still matches. Even a completely trashed rbc-revs fill thus still give the right result while converging towards full recovery ... assuming no incorrectly matching node hashes. The record also contains 4 bytes where 31 bits contains the index of the branch and the last bit indicate that it is a branch close commit. The usage pattern for rbc-revs is thus somewhat similar to 00changelog.i and will grow with it but be 1/8th of its size. """ def __init__(self, repo, readonly=True): assert repo.filtername is None self._repo = repo self._names = [] # branch names in local encoding with static index self._rbcrevs = array('c') # structs of type _rbcrecfmt self._rbcsnameslen = 0 try: bndata = repo.vfs.read(_rbcnames) self._rbcsnameslen = len(bndata) # for verification before writing self._names = [encoding.tolocal(bn) for bn in bndata.split('\0')] except (IOError, OSError) as inst: if readonly: # don't try to use cache - fall back to the slow path self.branchinfo = self._branchinfo if self._names: try: data = repo.vfs.read(_rbcrevs) self._rbcrevs.fromstring(data) except (IOError, OSError) as inst: repo.ui.debug("couldn't read revision branch cache: %s\n" % inst) # remember number of good records on disk self._rbcrevslen = min(len(self._rbcrevs) // _rbcrecsize, len(repo.changelog)) if self._rbcrevslen == 0: self._names = [] self._rbcnamescount = len(self._names) # number of good names on disk self._namesreverse = dict((b, r) for r, b in enumerate(self._names)) def branchinfo(self, rev): """Return branch name and close flag for rev, using and updating persistent cache.""" changelog = self._repo.changelog rbcrevidx = rev * _rbcrecsize # avoid negative index, changelog.read(nullrev) is fast without cache if rev == nullrev: return changelog.branchinfo(rev) # if requested rev is missing, add and populate all missing revs if len(self._rbcrevs) < rbcrevidx + _rbcrecsize: self._rbcrevs.extend('\0' * (len(changelog) * _rbcrecsize - len(self._rbcrevs))) # fast path: extract data from cache, use it if node is matching reponode = changelog.node(rev)[:_rbcnodelen] cachenode, branchidx = unpack( _rbcrecfmt, buffer(self._rbcrevs, rbcrevidx, _rbcrecsize)) close = bool(branchidx & _rbccloseflag) if close: branchidx &= _rbcbranchidxmask if cachenode == '\0\0\0\0': pass elif cachenode == reponode: return self._names[branchidx], close else: # rev/node map has changed, invalidate the cache from here up truncate = rbcrevidx + _rbcrecsize del self._rbcrevs[truncate:] self._rbcrevslen = min(self._rbcrevslen, truncate) # fall back to slow path and make sure it will be written to disk return self._branchinfo(rev) def _branchinfo(self, rev): """Retrieve branch info from changelog and update _rbcrevs""" changelog = self._repo.changelog b, close = changelog.branchinfo(rev) if b in self._namesreverse: branchidx = self._namesreverse[b] else: branchidx = len(self._names) self._names.append(b) self._namesreverse[b] = branchidx reponode = changelog.node(rev) if close: branchidx |= _rbccloseflag self._setcachedata(rev, reponode, branchidx) return b, close def _setcachedata(self, rev, node, branchidx): """Writes the node's branch data to the in-memory cache data.""" rbcrevidx = rev * _rbcrecsize rec = array('c') rec.fromstring(pack(_rbcrecfmt, node, branchidx)) self._rbcrevs[rbcrevidx:rbcrevidx + _rbcrecsize] = rec self._rbcrevslen = min(self._rbcrevslen, rev) tr = self._repo.currenttransaction() if tr: tr.addfinalize('write-revbranchcache', self.write) def write(self, tr=None): """Save branch cache if it is dirty.""" repo = self._repo if self._rbcnamescount < len(self._names): try: if self._rbcnamescount != 0: f = repo.vfs.open(_rbcnames, 'ab') if f.tell() == self._rbcsnameslen: f.write('\0') else: f.close() repo.ui.debug("%s changed - rewriting it\n" % _rbcnames) self._rbcnamescount = 0 self._rbcrevslen = 0 if self._rbcnamescount == 0: f = repo.vfs.open(_rbcnames, 'wb') f.write('\0'.join(encoding.fromlocal(b) for b in self._names[self._rbcnamescount:])) self._rbcsnameslen = f.tell() f.close() except (IOError, OSError, error.Abort) as inst: repo.ui.debug("couldn't write revision branch cache names: " "%s\n" % inst) return self._rbcnamescount = len(self._names) start = self._rbcrevslen * _rbcrecsize if start != len(self._rbcrevs): revs = min(len(repo.changelog), len(self._rbcrevs) // _rbcrecsize) try: f = repo.vfs.open(_rbcrevs, 'ab') if f.tell() != start: repo.ui.debug("truncating %s to %s\n" % (_rbcrevs, start)) f.seek(start) f.truncate() end = revs * _rbcrecsize f.write(self._rbcrevs[start:end]) f.close() except (IOError, OSError, error.Abort) as inst: repo.ui.debug("couldn't write revision branch cache: %s\n" % inst) return self._rbcrevslen = revs mercurial-3.7.3/mercurial/filemerge.py0000644000175000017500000005744312676531525017440 0ustar mpmmpm00000000000000# filemerge.py - file-level merge handling for Mercurial # # Copyright 2006, 2007, 2008 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from __future__ import absolute_import import filecmp import os import re import tempfile from .i18n import _ from .node import nullid, short from . import ( error, match, scmutil, simplemerge, tagmerge, templatekw, templater, util, ) def _toolstr(ui, tool, part, default=""): return ui.config("merge-tools", tool + "." + part, default) def _toolbool(ui, tool, part, default=False): return ui.configbool("merge-tools", tool + "." + part, default) def _toollist(ui, tool, part, default=[]): return ui.configlist("merge-tools", tool + "." + part, default) internals = {} # Merge tools to document. internalsdoc = {} # internal tool merge types nomerge = None mergeonly = 'mergeonly' # just the full merge, no premerge fullmerge = 'fullmerge' # both premerge and merge class absentfilectx(object): """Represents a file that's ostensibly in a context but is actually not present in it. This is here because it's very specific to the filemerge code for now -- other code is likely going to break with the values this returns.""" def __init__(self, ctx, f): self._ctx = ctx self._f = f def path(self): return self._f def size(self): return None def data(self): return None def filenode(self): return nullid _customcmp = True def cmp(self, fctx): """compare with other file context returns True if different from fctx. """ return not (fctx.isabsent() and fctx.ctx() == self.ctx() and fctx.path() == self.path()) def flags(self): return '' def changectx(self): return self._ctx def isbinary(self): return False def isabsent(self): return True def internaltool(name, mergetype, onfailure=None, precheck=None): '''return a decorator for populating internal merge tool table''' def decorator(func): fullname = ':' + name func.__doc__ = "``%s``\n" % fullname + func.__doc__.strip() internals[fullname] = func internals['internal:' + name] = func internalsdoc[fullname] = func func.mergetype = mergetype func.onfailure = onfailure func.precheck = precheck return func return decorator def _findtool(ui, tool): if tool in internals: return tool return findexternaltool(ui, tool) def findexternaltool(ui, tool): for kn in ("regkey", "regkeyalt"): k = _toolstr(ui, tool, kn) if not k: continue p = util.lookupreg(k, _toolstr(ui, tool, "regname")) if p: p = util.findexe(p + _toolstr(ui, tool, "regappend")) if p: return p exe = _toolstr(ui, tool, "executable", tool) return util.findexe(util.expandpath(exe)) def _picktool(repo, ui, path, binary, symlink, changedelete): def supportscd(tool): return tool in internals and internals[tool].mergetype == nomerge def check(tool, pat, symlink, binary, changedelete): tmsg = tool if pat: tmsg += " specified for " + pat if not _findtool(ui, tool): if pat: # explicitly requested tool deserves a warning ui.warn(_("couldn't find merge tool %s\n") % tmsg) else: # configured but non-existing tools are more silent ui.note(_("couldn't find merge tool %s\n") % tmsg) elif symlink and not _toolbool(ui, tool, "symlink"): ui.warn(_("tool %s can't handle symlinks\n") % tmsg) elif binary and not _toolbool(ui, tool, "binary"): ui.warn(_("tool %s can't handle binary\n") % tmsg) elif changedelete and not supportscd(tool): # the nomerge tools are the only tools that support change/delete # conflicts pass elif not util.gui() and _toolbool(ui, tool, "gui"): ui.warn(_("tool %s requires a GUI\n") % tmsg) else: return True return False # internal config: ui.forcemerge # forcemerge comes from command line arguments, highest priority force = ui.config('ui', 'forcemerge') if force: toolpath = _findtool(ui, force) if changedelete and not supportscd(toolpath): return ":prompt", None else: if toolpath: return (force, util.shellquote(toolpath)) else: # mimic HGMERGE if given tool not found return (force, force) # HGMERGE takes next precedence hgmerge = os.environ.get("HGMERGE") if hgmerge: if changedelete and not supportscd(hgmerge): return ":prompt", None else: return (hgmerge, hgmerge) # then patterns for pat, tool in ui.configitems("merge-patterns"): mf = match.match(repo.root, '', [pat]) if mf(path) and check(tool, pat, symlink, False, changedelete): toolpath = _findtool(ui, tool) return (tool, util.shellquote(toolpath)) # then merge tools tools = {} disabled = set() for k, v in ui.configitems("merge-tools"): t = k.split('.')[0] if t not in tools: tools[t] = int(_toolstr(ui, t, "priority", "0")) if _toolbool(ui, t, "disabled", False): disabled.add(t) names = tools.keys() tools = sorted([(-p, t) for t, p in tools.items() if t not in disabled]) uimerge = ui.config("ui", "merge") if uimerge: # external tools defined in uimerge won't be able to handle # change/delete conflicts if uimerge not in names and not changedelete: return (uimerge, uimerge) tools.insert(0, (None, uimerge)) # highest priority tools.append((None, "hgmerge")) # the old default, if found for p, t in tools: if check(t, None, symlink, binary, changedelete): toolpath = _findtool(ui, t) return (t, util.shellquote(toolpath)) # internal merge or prompt as last resort if symlink or binary or changedelete: return ":prompt", None return ":merge", None def _eoltype(data): "Guess the EOL type of a file" if '\0' in data: # binary return None if '\r\n' in data: # Windows return '\r\n' if '\r' in data: # Old Mac return '\r' if '\n' in data: # UNIX return '\n' return None # unknown def _matcheol(file, origfile): "Convert EOL markers in a file to match origfile" tostyle = _eoltype(util.readfile(origfile)) if tostyle: data = util.readfile(file) style = _eoltype(data) if style: newdata = data.replace(style, tostyle) if newdata != data: util.writefile(file, newdata) @internaltool('prompt', nomerge) def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf): """Asks the user which of the local or the other version to keep as the merged version.""" ui = repo.ui fd = fcd.path() try: if fco.isabsent(): index = ui.promptchoice( _("local changed %s which remote deleted\n" "use (c)hanged version, (d)elete, or leave (u)nresolved?" "$$ &Changed $$ &Delete $$ &Unresolved") % fd, 2) choice = ['local', 'other', 'unresolved'][index] elif fcd.isabsent(): index = ui.promptchoice( _("remote changed %s which local deleted\n" "use (c)hanged version, leave (d)eleted, or " "leave (u)nresolved?" "$$ &Changed $$ &Deleted $$ &Unresolved") % fd, 2) choice = ['other', 'local', 'unresolved'][index] else: index = ui.promptchoice( _("no tool found to merge %s\n" "keep (l)ocal, take (o)ther, or leave (u)nresolved?" "$$ &Local $$ &Other $$ &Unresolved") % fd, 2) choice = ['local', 'other', 'unresolved'][index] if choice == 'other': return _iother(repo, mynode, orig, fcd, fco, fca, toolconf) elif choice == 'local': return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf) elif choice == 'unresolved': return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf) except error.ResponseExpected: ui.write("\n") return _ifail(repo, mynode, orig, fcd, fco, fca, toolconf) @internaltool('local', nomerge) def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf): """Uses the local version of files as the merged version.""" return 0, fcd.isabsent() @internaltool('other', nomerge) def _iother(repo, mynode, orig, fcd, fco, fca, toolconf): """Uses the other version of files as the merged version.""" if fco.isabsent(): # local changed, remote deleted -- 'deleted' picked repo.wvfs.unlinkpath(fcd.path()) deleted = True else: repo.wwrite(fcd.path(), fco.data(), fco.flags()) deleted = False return 0, deleted @internaltool('fail', nomerge) def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf): """ Rather than attempting to merge files that were modified on both branches, it marks them as unresolved. The resolve command must be used to resolve these conflicts.""" # for change/delete conflicts write out the changed version, then fail if fcd.isabsent(): repo.wwrite(fcd.path(), fco.data(), fco.flags()) return 1, False def _premerge(repo, fcd, fco, fca, toolconf, files, labels=None): tool, toolpath, binary, symlink = toolconf if symlink or fcd.isabsent() or fco.isabsent(): return 1 a, b, c, back = files ui = repo.ui validkeep = ['keep', 'keep-merge3'] # do we attempt to simplemerge first? try: premerge = _toolbool(ui, tool, "premerge", not binary) except error.ConfigError: premerge = _toolstr(ui, tool, "premerge").lower() if premerge not in validkeep: _valid = ', '.join(["'" + v + "'" for v in validkeep]) raise error.ConfigError(_("%s.premerge not valid " "('%s' is neither boolean nor %s)") % (tool, premerge, _valid)) if premerge: if premerge == 'keep-merge3': if not labels: labels = _defaultconflictlabels if len(labels) < 3: labels.append('base') r = simplemerge.simplemerge(ui, a, b, c, quiet=True, label=labels) if not r: ui.debug(" premerge successful\n") return 0 if premerge not in validkeep: util.copyfile(back, a) # restore from backup and try again return 1 # continue merging def _mergecheck(repo, mynode, orig, fcd, fco, fca, toolconf): tool, toolpath, binary, symlink = toolconf if symlink: repo.ui.warn(_('warning: internal %s cannot merge symlinks ' 'for %s\n') % (tool, fcd.path())) return False if fcd.isabsent() or fco.isabsent(): repo.ui.warn(_('warning: internal %s cannot merge change/delete ' 'conflict for %s\n') % (tool, fcd.path())) return False return True def _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, mode): """ Uses the internal non-interactive simple merge algorithm for merging files. It will fail if there are any conflicts and leave markers in the partially merged file. Markers will have two sections, one for each side of merge, unless mode equals 'union' which suppresses the markers.""" a, b, c, back = files ui = repo.ui r = simplemerge.simplemerge(ui, a, b, c, label=labels, mode=mode) return True, r, False @internaltool('union', fullmerge, _("warning: conflicts while merging %s! " "(edit, then use 'hg resolve --mark')\n"), precheck=_mergecheck) def _iunion(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): """ Uses the internal non-interactive simple merge algorithm for merging files. It will use both left and right sides for conflict regions. No markers are inserted.""" return _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, 'union') @internaltool('merge', fullmerge, _("warning: conflicts while merging %s! " "(edit, then use 'hg resolve --mark')\n"), precheck=_mergecheck) def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): """ Uses the internal non-interactive simple merge algorithm for merging files. It will fail if there are any conflicts and leave markers in the partially merged file. Markers will have two sections, one for each side of merge.""" return _merge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels, 'merge') @internaltool('merge3', fullmerge, _("warning: conflicts while merging %s! " "(edit, then use 'hg resolve --mark')\n"), precheck=_mergecheck) def _imerge3(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): """ Uses the internal non-interactive simple merge algorithm for merging files. It will fail if there are any conflicts and leave markers in the partially merged file. Marker will have three sections, one from each side of the merge and one for the base content.""" if not labels: labels = _defaultconflictlabels if len(labels) < 3: labels.append('base') return _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels) def _imergeauto(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None, localorother=None): """ Generic driver for _imergelocal and _imergeother """ assert localorother is not None tool, toolpath, binary, symlink = toolconf a, b, c, back = files r = simplemerge.simplemerge(repo.ui, a, b, c, label=labels, localorother=localorother) return True, r @internaltool('merge-local', mergeonly, precheck=_mergecheck) def _imergelocal(*args, **kwargs): """ Like :merge, but resolve all conflicts non-interactively in favor of the local changes.""" success, status = _imergeauto(localorother='local', *args, **kwargs) return success, status, False @internaltool('merge-other', mergeonly, precheck=_mergecheck) def _imergeother(*args, **kwargs): """ Like :merge, but resolve all conflicts non-interactively in favor of the other changes.""" success, status = _imergeauto(localorother='other', *args, **kwargs) return success, status, False @internaltool('tagmerge', mergeonly, _("automatic tag merging of %s failed! " "(use 'hg resolve --tool :merge' or another merge " "tool of your choice)\n")) def _itagmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): """ Uses the internal tag merge algorithm (experimental). """ success, status = tagmerge.merge(repo, fcd, fco, fca) return success, status, False @internaltool('dump', fullmerge) def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): """ Creates three versions of the files to merge, containing the contents of local, other and base. These files can then be used to perform a merge manually. If the file to be merged is named ``a.txt``, these files will accordingly be named ``a.txt.local``, ``a.txt.other`` and ``a.txt.base`` and they will be placed in the same directory as ``a.txt``.""" a, b, c, back = files fd = fcd.path() util.copyfile(a, a + ".local") repo.wwrite(fd + ".other", fco.data(), fco.flags()) repo.wwrite(fd + ".base", fca.data(), fca.flags()) return False, 1, False def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=None): tool, toolpath, binary, symlink = toolconf if fcd.isabsent() or fco.isabsent(): repo.ui.warn(_('warning: %s cannot merge change/delete conflict ' 'for %s\n') % (tool, fcd.path())) return False, 1, None a, b, c, back = files out = "" env = {'HG_FILE': fcd.path(), 'HG_MY_NODE': short(mynode), 'HG_OTHER_NODE': str(fco.changectx()), 'HG_BASE_NODE': str(fca.changectx()), 'HG_MY_ISLINK': 'l' in fcd.flags(), 'HG_OTHER_ISLINK': 'l' in fco.flags(), 'HG_BASE_ISLINK': 'l' in fca.flags(), } ui = repo.ui args = _toolstr(ui, tool, "args", '$local $base $other') if "$output" in args: out, a = a, back # read input from backup, write to original replace = {'local': a, 'base': b, 'other': c, 'output': out} args = util.interpolate(r'\$', replace, args, lambda s: util.shellquote(util.localpath(s))) cmd = toolpath + ' ' + args repo.ui.debug('launching merge tool: %s\n' % cmd) r = ui.system(cmd, cwd=repo.root, environ=env) repo.ui.debug('merge tool returned: %s\n' % r) return True, r, False def _formatconflictmarker(repo, ctx, template, label, pad): """Applies the given template to the ctx, prefixed by the label. Pad is the minimum width of the label prefix, so that multiple markers can have aligned templated parts. """ if ctx.node() is None: ctx = ctx.p1() props = templatekw.keywords.copy() props['templ'] = template props['ctx'] = ctx props['repo'] = repo templateresult = template('conflictmarker', **props) label = ('%s:' % label).ljust(pad + 1) mark = '%s %s' % (label, templater.stringify(templateresult)) if mark: mark = mark.splitlines()[0] # split for safety # 8 for the prefix of conflict marker lines (e.g. '<<<<<<< ') return util.ellipsis(mark, 80 - 8) _defaultconflictmarker = ('{node|short} ' + '{ifeq(tags, "tip", "", "{tags} ")}' + '{if(bookmarks, "{bookmarks} ")}' + '{ifeq(branch, "default", "", "{branch} ")}' + '- {author|user}: {desc|firstline}') _defaultconflictlabels = ['local', 'other'] def _formatlabels(repo, fcd, fco, fca, labels): """Formats the given labels using the conflict marker template. Returns a list of formatted labels. """ cd = fcd.changectx() co = fco.changectx() ca = fca.changectx() ui = repo.ui template = ui.config('ui', 'mergemarkertemplate', _defaultconflictmarker) tmpl = templater.templater(None, cache={'conflictmarker': template}) pad = max(len(l) for l in labels) newlabels = [_formatconflictmarker(repo, cd, tmpl, labels[0], pad), _formatconflictmarker(repo, co, tmpl, labels[1], pad)] if len(labels) > 2: newlabels.append(_formatconflictmarker(repo, ca, tmpl, labels[2], pad)) return newlabels def _filemerge(premerge, repo, mynode, orig, fcd, fco, fca, labels=None): """perform a 3-way merge in the working directory premerge = whether this is a premerge mynode = parent node before merge orig = original local filename before merge fco = other file context fca = ancestor file context fcd = local file context for current/destination file Returns whether the merge is complete, the return value of the merge, and a boolean indicating whether the file was deleted from disk.""" def temp(prefix, ctx): pre = "%s~%s." % (os.path.basename(ctx.path()), prefix) (fd, name) = tempfile.mkstemp(prefix=pre) data = repo.wwritedata(ctx.path(), ctx.data()) f = os.fdopen(fd, "wb") f.write(data) f.close() return name if not fco.cmp(fcd): # files identical? return True, None, False ui = repo.ui fd = fcd.path() binary = fcd.isbinary() or fco.isbinary() or fca.isbinary() symlink = 'l' in fcd.flags() + fco.flags() changedelete = fcd.isabsent() or fco.isabsent() tool, toolpath = _picktool(repo, ui, fd, binary, symlink, changedelete) if tool in internals and tool.startswith('internal:'): # normalize to new-style names (':merge' etc) tool = tool[len('internal'):] ui.debug("picked tool '%s' for %s (binary %s symlink %s changedelete %s)\n" % (tool, fd, binary, symlink, changedelete)) if tool in internals: func = internals[tool] mergetype = func.mergetype onfailure = func.onfailure precheck = func.precheck else: func = _xmerge mergetype = fullmerge onfailure = _("merging %s failed!\n") precheck = None toolconf = tool, toolpath, binary, symlink if mergetype == nomerge: r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf) return True, r, deleted if premerge: if orig != fco.path(): ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd)) else: ui.status(_("merging %s\n") % fd) ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca)) if precheck and not precheck(repo, mynode, orig, fcd, fco, fca, toolconf): if onfailure: ui.warn(onfailure % fd) return True, 1, False a = repo.wjoin(fd) b = temp("base", fca) c = temp("other", fco) if not fcd.isabsent(): back = scmutil.origpath(ui, repo, a) if premerge: util.copyfile(a, back) else: back = None files = (a, b, c, back) r = 1 try: markerstyle = ui.config('ui', 'mergemarkers', 'basic') if not labels: labels = _defaultconflictlabels if markerstyle != 'basic': labels = _formatlabels(repo, fcd, fco, fca, labels) if premerge and mergetype == fullmerge: r = _premerge(repo, fcd, fco, fca, toolconf, files, labels=labels) # complete if premerge successful (r is 0) return not r, r, False needcheck, r, deleted = func(repo, mynode, orig, fcd, fco, fca, toolconf, files, labels=labels) if needcheck: r = _check(r, ui, tool, fcd, files) if r: if onfailure: ui.warn(onfailure % fd) return True, r, deleted finally: if not r and back is not None: util.unlink(back) util.unlink(b) util.unlink(c) def _check(r, ui, tool, fcd, files): fd = fcd.path() a, b, c, back = files if not r and (_toolbool(ui, tool, "checkconflicts") or 'conflicts' in _toollist(ui, tool, "check")): if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(), re.MULTILINE): r = 1 checked = False if 'prompt' in _toollist(ui, tool, "check"): checked = True if ui.promptchoice(_("was merge of '%s' successful (yn)?" "$$ &Yes $$ &No") % fd, 1): r = 1 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or 'changed' in _toollist(ui, tool, "check")): if back is not None and filecmp.cmp(a, back): if ui.promptchoice(_(" output file %s appears unchanged\n" "was merge successful (yn)?" "$$ &Yes $$ &No") % fd, 1): r = 1 if back is not None and _toolbool(ui, tool, "fixeol"): _matcheol(a, back) return r def premerge(repo, mynode, orig, fcd, fco, fca, labels=None): return _filemerge(True, repo, mynode, orig, fcd, fco, fca, labels=labels) def filemerge(repo, mynode, orig, fcd, fco, fca, labels=None): return _filemerge(False, repo, mynode, orig, fcd, fco, fca, labels=labels) # tell hggettext to extract docstrings from these functions: i18nfunctions = internals.values() mercurial-3.7.3/mercurial/lsprof.py0000644000175000017500000000774412676531525017005 0ustar mpmmpm00000000000000from __future__ import absolute_import, print_function import _lsprof import sys Profiler = _lsprof.Profiler # PyPy doesn't expose profiler_entry from the module. profiler_entry = getattr(_lsprof, 'profiler_entry', None) __all__ = ['profile', 'Stats'] def profile(f, *args, **kwds): """XXX docstring""" p = Profiler() p.enable(subcalls=True, builtins=True) try: f(*args, **kwds) finally: p.disable() return Stats(p.getstats()) class Stats(object): """XXX docstring""" def __init__(self, data): self.data = data def sort(self, crit="inlinetime"): """XXX docstring""" # profiler_entries isn't defined when running under PyPy. if profiler_entry: if crit not in profiler_entry.__dict__: raise ValueError("Can't sort by %s" % crit) elif self.data and not getattr(self.data[0], crit, None): raise ValueError("Can't sort by %s" % crit) self.data.sort(key=lambda x: getattr(x, crit), reverse=True) for e in self.data: if e.calls: e.calls.sort(key=lambda x: getattr(x, crit), reverse=True) def pprint(self, top=None, file=None, limit=None, climit=None): """XXX docstring""" if file is None: file = sys.stdout d = self.data if top is not None: d = d[:top] cols = "% 12s %12s %11.4f %11.4f %s\n" hcols = "% 12s %12s %12s %12s %s\n" file.write(hcols % ("CallCount", "Recursive", "Total(s)", "Inline(s)", "module:lineno(function)")) count = 0 for e in d: file.write(cols % (e.callcount, e.reccallcount, e.totaltime, e.inlinetime, label(e.code))) count += 1 if limit is not None and count == limit: return ccount = 0 if climit and e.calls: for se in e.calls: file.write(cols % (se.callcount, se.reccallcount, se.totaltime, se.inlinetime, " %s" % label(se.code))) count += 1 ccount += 1 if limit is not None and count == limit: return if climit is not None and ccount == climit: break def freeze(self): """Replace all references to code objects with string descriptions; this makes it possible to pickle the instance.""" # this code is probably rather ickier than it needs to be! for i in range(len(self.data)): e = self.data[i] if not isinstance(e.code, str): self.data[i] = type(e)((label(e.code),) + e[1:]) if e.calls: for j in range(len(e.calls)): se = e.calls[j] if not isinstance(se.code, str): e.calls[j] = type(se)((label(se.code),) + se[1:]) _fn2mod = {} def label(code): if isinstance(code, str): return code try: mname = _fn2mod[code.co_filename] except KeyError: for k, v in list(sys.modules.iteritems()): if v is None: continue if not isinstance(getattr(v, '__file__', None), str): continue if v.__file__.startswith(code.co_filename): mname = _fn2mod[code.co_filename] = k break else: mname = _fn2mod[code.co_filename] = '<%s>' % code.co_filename return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name) if __name__ == '__main__': import os sys.argv = sys.argv[1:] if not sys.argv: print("usage: lsprof.py test: log

                    log

                    age author description
                    $ echo babar babar $ (get-with-headers.py localhost:$HGPORT 'log') 200 Script output follows test: log

                    log

                    age author description
                    $ (get-with-headers.py localhost:$HGPORT 'graph') 200 Script output follows test: revision graph

                    graph

                        $ (get-with-headers.py localhost:$HGPORT 'file') 200 Script output follows test: 000000000000 /

                        directory / @ -1:000000000000 tip

                        name size permissions
                        [up] drwxr-xr-x
                        $ cd .. mercurial-3.7.3/tests/test-walk.t0000644000175000017500000003115012676531525016371 0ustar mpmmpm00000000000000 $ hg init t $ cd t $ mkdir -p beans $ for b in kidney navy turtle borlotti black pinto; do > echo $b > beans/$b > done $ mkdir -p mammals/Procyonidae $ for m in cacomistle coatimundi raccoon; do > echo $m > mammals/Procyonidae/$m > done $ echo skunk > mammals/skunk $ echo fennel > fennel $ echo fenugreek > fenugreek $ echo fiddlehead > fiddlehead $ hg addremove adding beans/black adding beans/borlotti adding beans/kidney adding beans/navy adding beans/pinto adding beans/turtle adding fennel adding fenugreek adding fiddlehead adding mammals/Procyonidae/cacomistle adding mammals/Procyonidae/coatimundi adding mammals/Procyonidae/raccoon adding mammals/skunk $ hg commit -m "commit #0" $ hg debugwalk f beans/black beans/black f beans/borlotti beans/borlotti f beans/kidney beans/kidney f beans/navy beans/navy f beans/pinto beans/pinto f beans/turtle beans/turtle f fennel fennel f fenugreek fenugreek f fiddlehead fiddlehead f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon f mammals/skunk mammals/skunk $ hg debugwalk -I. f beans/black beans/black f beans/borlotti beans/borlotti f beans/kidney beans/kidney f beans/navy beans/navy f beans/pinto beans/pinto f beans/turtle beans/turtle f fennel fennel f fenugreek fenugreek f fiddlehead fiddlehead f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon f mammals/skunk mammals/skunk $ cd mammals $ hg debugwalk f beans/black ../beans/black f beans/borlotti ../beans/borlotti f beans/kidney ../beans/kidney f beans/navy ../beans/navy f beans/pinto ../beans/pinto f beans/turtle ../beans/turtle f fennel ../fennel f fenugreek ../fenugreek f fiddlehead ../fiddlehead f mammals/Procyonidae/cacomistle Procyonidae/cacomistle f mammals/Procyonidae/coatimundi Procyonidae/coatimundi f mammals/Procyonidae/raccoon Procyonidae/raccoon f mammals/skunk skunk $ hg debugwalk -X ../beans f fennel ../fennel f fenugreek ../fenugreek f fiddlehead ../fiddlehead f mammals/Procyonidae/cacomistle Procyonidae/cacomistle f mammals/Procyonidae/coatimundi Procyonidae/coatimundi f mammals/Procyonidae/raccoon Procyonidae/raccoon f mammals/skunk skunk $ hg debugwalk -I '*k' f mammals/skunk skunk $ hg debugwalk -I 'glob:*k' f mammals/skunk skunk $ hg debugwalk -I 'relglob:*k' f beans/black ../beans/black f fenugreek ../fenugreek f mammals/skunk skunk $ hg debugwalk -I 'relglob:*k' . f mammals/skunk skunk $ hg debugwalk -I 're:.*k$' f beans/black ../beans/black f fenugreek ../fenugreek f mammals/skunk skunk $ hg debugwalk -I 'relre:.*k$' f beans/black ../beans/black f fenugreek ../fenugreek f mammals/skunk skunk $ hg debugwalk -I 'path:beans' f beans/black ../beans/black f beans/borlotti ../beans/borlotti f beans/kidney ../beans/kidney f beans/navy ../beans/navy f beans/pinto ../beans/pinto f beans/turtle ../beans/turtle $ hg debugwalk -I 'relpath:detour/../../beans' f beans/black ../beans/black f beans/borlotti ../beans/borlotti f beans/kidney ../beans/kidney f beans/navy ../beans/navy f beans/pinto ../beans/pinto f beans/turtle ../beans/turtle $ hg debugwalk . f mammals/Procyonidae/cacomistle Procyonidae/cacomistle f mammals/Procyonidae/coatimundi Procyonidae/coatimundi f mammals/Procyonidae/raccoon Procyonidae/raccoon f mammals/skunk skunk $ hg debugwalk -I. f mammals/Procyonidae/cacomistle Procyonidae/cacomistle f mammals/Procyonidae/coatimundi Procyonidae/coatimundi f mammals/Procyonidae/raccoon Procyonidae/raccoon f mammals/skunk skunk $ hg debugwalk Procyonidae f mammals/Procyonidae/cacomistle Procyonidae/cacomistle f mammals/Procyonidae/coatimundi Procyonidae/coatimundi f mammals/Procyonidae/raccoon Procyonidae/raccoon $ cd Procyonidae $ hg debugwalk . f mammals/Procyonidae/cacomistle cacomistle f mammals/Procyonidae/coatimundi coatimundi f mammals/Procyonidae/raccoon raccoon $ hg debugwalk .. f mammals/Procyonidae/cacomistle cacomistle f mammals/Procyonidae/coatimundi coatimundi f mammals/Procyonidae/raccoon raccoon f mammals/skunk ../skunk $ cd .. $ hg debugwalk ../beans f beans/black ../beans/black f beans/borlotti ../beans/borlotti f beans/kidney ../beans/kidney f beans/navy ../beans/navy f beans/pinto ../beans/pinto f beans/turtle ../beans/turtle $ hg debugwalk . f mammals/Procyonidae/cacomistle Procyonidae/cacomistle f mammals/Procyonidae/coatimundi Procyonidae/coatimundi f mammals/Procyonidae/raccoon Procyonidae/raccoon f mammals/skunk skunk $ hg debugwalk .hg abort: path 'mammals/.hg' is inside nested repo 'mammals' (glob) [255] $ hg debugwalk ../.hg abort: path contains illegal component: .hg [255] $ cd .. $ hg debugwalk -Ibeans f beans/black beans/black f beans/borlotti beans/borlotti f beans/kidney beans/kidney f beans/navy beans/navy f beans/pinto beans/pinto f beans/turtle beans/turtle $ hg debugwalk -I '{*,{b,m}*/*}k' f beans/black beans/black f fenugreek fenugreek f mammals/skunk mammals/skunk $ hg debugwalk -Ibeans mammals $ hg debugwalk -Inon-existent $ hg debugwalk -Inon-existent -Ibeans/black f beans/black beans/black $ hg debugwalk -Ibeans beans/black f beans/black beans/black exact $ hg debugwalk -Ibeans/black beans f beans/black beans/black $ hg debugwalk -Xbeans/black beans f beans/borlotti beans/borlotti f beans/kidney beans/kidney f beans/navy beans/navy f beans/pinto beans/pinto f beans/turtle beans/turtle $ hg debugwalk -Xbeans/black -Ibeans f beans/borlotti beans/borlotti f beans/kidney beans/kidney f beans/navy beans/navy f beans/pinto beans/pinto f beans/turtle beans/turtle $ hg debugwalk -Xbeans/black beans/black f beans/black beans/black exact $ hg debugwalk -Xbeans/black -Ibeans/black $ hg debugwalk -Xbeans beans/black f beans/black beans/black exact $ hg debugwalk -Xbeans -Ibeans/black $ hg debugwalk 'glob:mammals/../beans/b*' f beans/black beans/black f beans/borlotti beans/borlotti $ hg debugwalk '-X*/Procyonidae' mammals f mammals/skunk mammals/skunk $ hg debugwalk path:mammals f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon f mammals/skunk mammals/skunk $ hg debugwalk .. abort: .. not under root '$TESTTMP/t' (glob) [255] $ hg debugwalk beans/../.. abort: beans/../.. not under root '$TESTTMP/t' (glob) [255] $ hg debugwalk .hg abort: path contains illegal component: .hg [255] $ hg debugwalk beans/../.hg abort: path contains illegal component: .hg [255] $ hg debugwalk beans/../.hg/data abort: path contains illegal component: .hg/data (glob) [255] $ hg debugwalk beans/.hg abort: path 'beans/.hg' is inside nested repo 'beans' (glob) [255] Test absolute paths: $ hg debugwalk `pwd`/beans f beans/black beans/black f beans/borlotti beans/borlotti f beans/kidney beans/kidney f beans/navy beans/navy f beans/pinto beans/pinto f beans/turtle beans/turtle $ hg debugwalk `pwd`/.. abort: $TESTTMP/t/.. not under root '$TESTTMP/t' (glob) [255] Test patterns: $ hg debugwalk glob:\* f fennel fennel f fenugreek fenugreek f fiddlehead fiddlehead #if eol-in-paths $ echo glob:glob > glob:glob $ hg addremove adding glob:glob warning: filename contains ':', which is reserved on Windows: 'glob:glob' $ hg debugwalk glob:\* f fennel fennel f fenugreek fenugreek f fiddlehead fiddlehead f glob:glob glob:glob $ hg debugwalk glob:glob glob: No such file or directory $ hg debugwalk glob:glob:glob f glob:glob glob:glob exact $ hg debugwalk path:glob:glob f glob:glob glob:glob exact $ rm glob:glob $ hg addremove removing glob:glob #endif $ hg debugwalk 'glob:**e' f beans/turtle beans/turtle f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle $ hg debugwalk 're:.*[kb]$' f beans/black beans/black f fenugreek fenugreek f mammals/skunk mammals/skunk $ hg debugwalk path:beans/black f beans/black beans/black exact $ hg debugwalk path:beans//black f beans/black beans/black exact $ hg debugwalk relglob:Procyonidae $ hg debugwalk 'relglob:Procyonidae/**' f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon $ hg debugwalk 'relglob:Procyonidae/**' fennel f fennel fennel exact f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon $ hg debugwalk beans 'glob:beans/*' f beans/black beans/black f beans/borlotti beans/borlotti f beans/kidney beans/kidney f beans/navy beans/navy f beans/pinto beans/pinto f beans/turtle beans/turtle $ hg debugwalk 'glob:mamm**' f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon f mammals/skunk mammals/skunk $ hg debugwalk 'glob:mamm**' fennel f fennel fennel exact f mammals/Procyonidae/cacomistle mammals/Procyonidae/cacomistle f mammals/Procyonidae/coatimundi mammals/Procyonidae/coatimundi f mammals/Procyonidae/raccoon mammals/Procyonidae/raccoon f mammals/skunk mammals/skunk $ hg debugwalk 'glob:j*' $ hg debugwalk NOEXIST NOEXIST: * (glob) #if fifo $ mkfifo fifo $ hg debugwalk fifo fifo: unsupported file type (type is fifo) #endif $ rm fenugreek $ hg debugwalk fenugreek f fenugreek fenugreek exact $ hg rm fenugreek $ hg debugwalk fenugreek f fenugreek fenugreek exact $ touch new $ hg debugwalk new f new new exact $ mkdir ignored $ touch ignored/file $ echo '^ignored$' > .hgignore $ hg debugwalk ignored $ hg debugwalk ignored/file f ignored/file ignored/file exact Test listfile and listfile0 $ $PYTHON -c "file('listfile0', 'wb').write('fenugreek\0new\0')" $ hg debugwalk -I 'listfile0:listfile0' f fenugreek fenugreek f new new $ $PYTHON -c "file('listfile', 'wb').write('fenugreek\nnew\r\nmammals/skunk\n')" $ hg debugwalk -I 'listfile:listfile' f fenugreek fenugreek f mammals/skunk mammals/skunk f new new $ cd .. $ hg debugwalk -R t t/mammals/skunk f mammals/skunk t/mammals/skunk exact $ mkdir t2 $ cd t2 $ hg debugwalk -R ../t ../t/mammals/skunk f mammals/skunk ../t/mammals/skunk exact $ hg debugwalk --cwd ../t mammals/skunk f mammals/skunk mammals/skunk exact $ cd .. Test split patterns on overflow $ cd t $ echo fennel > overflow.list $ $PYTHON -c "for i in xrange(20000 / 100): print 'x' * 100" >> overflow.list $ echo fenugreek >> overflow.list $ hg debugwalk 'listfile:overflow.list' 2>&1 | grep -v '^xxx' f fennel fennel exact f fenugreek fenugreek exact $ cd .. mercurial-3.7.3/tests/test-mq-qrefresh-replace-log-message.t0000644000175000017500000001712212676531525023502 0ustar mpmmpm00000000000000Environment setup for MQ $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ cat >> $HGRCPATH < [defaults] > # explicit date to commit with fixed hashid > qnew = -d "0 0" > qrefresh = -d "0 0" > qfold = -d "0 0" > EOF $ hg init $ hg qinit Should fail if no patches applied (this tests also that editor is not invoked if '--edit' is not specified) $ hg qrefresh no patches applied [1] $ hg qrefresh -e no patches applied [1] $ hg qnew -m "First commit message" first-patch $ echo aaaa > file $ hg add file $ HGEDITOR=cat hg qrefresh Should display 'First commit message' $ hg log -l1 --template "{desc}\n" First commit message Testing changing message with -m (this tests also that '--edit' can be used with '--message', and that '[committemplate] changeset' definition and commit log specific template keyword 'extramsg' work well) $ cat >> .hg/hgrc < [committemplate] > listupfiles = {file_adds % > "HG: added {file}\n" }{file_mods % > "HG: changed {file}\n" }{file_dels % > "HG: removed {file}\n" }{if(files, "", > "HG: no files changed\n")} > > changeset = HG: this is customized commit template > {desc}\n\n > HG: Enter commit message. Lines beginning with 'HG:' are removed. > HG: {extramsg} > HG: -- > HG: user: {author} > HG: branch '{branch}'\n{listupfiles} > EOF $ echo bbbb > file $ HGEDITOR=cat hg qrefresh -m "Second commit message" -e HG: this is customized commit template Second commit message HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to use default message. HG: -- HG: user: test HG: branch 'default' HG: added file $ cat >> .hg/hgrc < # disable customizing for subsequent tests > [committemplate] > changeset = > EOF Should display 'Second commit message' $ hg log -l1 --template "{desc}\n" Second commit message Testing changing message with -l $ echo "Third commit message" > logfile $ echo " This is the 3rd log message" >> logfile $ echo bbbb > file $ hg qrefresh -l logfile Should display 'Third commit message\\\n This is the 3rd log message' $ hg log -l1 --template "{desc}\n" Third commit message This is the 3rd log message Testing changing message with -l- $ hg qnew -m "First commit message" second-patch $ echo aaaa > file2 $ hg add file2 $ echo bbbb > file2 $ (echo "Fifth commit message"; echo " This is the 5th log message") | hg qrefresh -l- Should display 'Fifth commit message\\\n This is the 5th log message' $ hg log -l1 --template "{desc}\n" Fifth commit message This is the 5th log message Test saving last-message.txt: $ cat > $TESTTMP/editor.sh << EOF > echo "==== before editing" > cat \$1 > echo "====" > (echo; echo "test saving last-message.txt") >> \$1 > EOF $ cat > $TESTTMP/commitfailure.py < from mercurial import error > def reposetup(ui, repo): > class commitfailure(repo.__class__): > def commit(self, *args, **kwargs): > raise error.Abort('emulating unexpected abort') > repo.__class__ = commitfailure > EOF $ cat >> .hg/hgrc < [extensions] > # this failure occurs before editor invocation > commitfailure = $TESTTMP/commitfailure.py > EOF $ hg qapplied first-patch second-patch $ hg tip --template "{files}\n" file2 (test that editor is not invoked before transaction starting) $ rm -f .hg/last-message.txt $ HGEDITOR="sh $TESTTMP/editor.sh" hg qrefresh -e qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: emulating unexpected abort [255] $ test -f .hg/last-message.txt [1] (reset applied patches and directory status) $ cat >> .hg/hgrc < [extensions] > commitfailure = ! > EOF $ hg qapplied first-patch $ hg status -A file2 ? file2 $ rm file2 $ hg qpush -q second-patch now at: second-patch (test that editor is invoked and commit message is saved into "last-message.txt") $ cat >> .hg/hgrc < [hooks] > # this failure occurs after editor invocation > pretxncommit.unexpectedabort = false > EOF $ rm -f .hg/last-message.txt $ hg status --rev "second-patch^1" -arm A file2 $ HGEDITOR="sh $TESTTMP/editor.sh" hg qrefresh -e ==== before editing Fifth commit message This is the 5th log message HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to use default message. HG: -- HG: user: test HG: branch 'default' HG: added file2 ==== note: commit message saved in .hg/last-message.txt transaction abort! rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 [255] $ cat .hg/last-message.txt Fifth commit message This is the 5th log message test saving last-message.txt Test visibility of in-memory distate changes outside transaction to external process $ cat > $TESTTMP/checkvisibility.sh < echo "====" > hg parents --template "{rev}:{node|short}\n" > hg status -arm > echo "====" > EOF == test visibility to external editor $ hg update -C -q first-patch $ rm -f file2 $ hg qpush -q second-patch --config hooks.pretxncommit.unexpectedabort= now at: second-patch $ echo bbbb >> file2 $ sh "$TESTTMP/checkvisibility.sh" ==== 1:e30108269082 M file2 ==== $ HGEDITOR="sh \"$TESTTMP/checkvisibility.sh\"" hg qrefresh -e ==== 0:25e397dabed2 A file2 ==== note: commit message saved in .hg/last-message.txt transaction abort! rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 [255] (rebuilding at failure of qrefresh bases on rev #0, and it causes dropping status of "file2") $ sh "$TESTTMP/checkvisibility.sh" ==== 0:25e397dabed2 ==== == test visibility to precommit external hook $ hg update -C -q $ rm -f file2 $ hg qpush -q second-patch --config hooks.pretxncommit.unexpectedabort= now at: second-patch $ echo bbbb >> file2 $ cat >> .hg/hgrc < [hooks] > precommit.checkvisibility = sh "$TESTTMP/checkvisibility.sh" > EOF $ sh "$TESTTMP/checkvisibility.sh" ==== 1:e30108269082 M file2 ==== $ hg qrefresh ==== 0:25e397dabed2 A file2 ==== transaction abort! rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 [255] $ sh "$TESTTMP/checkvisibility.sh" ==== 0:25e397dabed2 ==== $ cat >> .hg/hgrc < [hooks] > precommit.checkvisibility = > EOF == test visibility to pretxncommit external hook $ hg update -C -q $ rm -f file2 $ hg qpush -q second-patch --config hooks.pretxncommit.unexpectedabort= now at: second-patch $ echo bbbb >> file2 $ cat >> .hg/hgrc < [hooks] > pretxncommit.checkvisibility = sh "$TESTTMP/checkvisibility.sh" > # make checkvisibility run before unexpectedabort > priority.pretxncommit.checkvisibility = 10 > EOF $ sh "$TESTTMP/checkvisibility.sh" ==== 1:e30108269082 M file2 ==== $ hg qrefresh ==== 0:25e397dabed2 A file2 ==== transaction abort! rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 [255] $ sh "$TESTTMP/checkvisibility.sh" ==== 0:25e397dabed2 ==== $ cat >> .hg/hgrc < [hooks] > pretxncommit.checkvisibility = > EOF mercurial-3.7.3/tests/test-cat.t0000644000175000017500000000207112676531525016202 0ustar mpmmpm00000000000000 $ hg init $ echo 0 > a $ echo 0 > b $ hg ci -A -m m adding a adding b $ hg rm a $ hg cat a 0 $ hg cat --decode a # more tests in test-encode 0 $ echo 1 > b $ hg ci -m m $ echo 2 > b $ hg cat -r 0 a 0 $ hg cat -r 0 b 0 $ hg cat -r 1 a a: no such file in rev 7040230c159c [1] $ hg cat -r 1 b 1 Test multiple files $ echo 3 > c $ hg ci -Am addmore c $ hg cat b c 1 3 $ hg cat . 1 3 $ hg cat . c 1 3 Test fileset $ hg cat 'set:not(b) or a' 3 $ hg cat 'set:c or b' 1 3 $ mkdir tmp $ hg cat --output tmp/HH_%H c $ hg cat --output tmp/RR_%R c $ hg cat --output tmp/h_%h c $ hg cat --output tmp/r_%r c $ hg cat --output tmp/%s_s c $ hg cat --output tmp/%d%%_d c $ hg cat --output tmp/%p_p c $ hg log -r . --template "{rev}: {node|short}\n" 2: 45116003780e $ find tmp -type f | sort tmp/.%_d tmp/HH_45116003780e3678b333fb2c99fa7d559c8457e9 tmp/RR_2 tmp/c_p tmp/c_s tmp/h_45116003780e tmp/r_2 Test working directory $ echo b-wdir > b $ hg cat -r 'wdir()' b b-wdir mercurial-3.7.3/tests/test-merge-revert2.t0000644000175000017500000000355512676531525020131 0ustar mpmmpm00000000000000 $ hg init $ echo "added file1" > file1 $ echo "another line of text" >> file1 $ echo "added file2" > file2 $ hg add file1 file2 $ hg commit -m "added file1 and file2" $ echo "changed file1" >> file1 $ hg commit -m "changed file1" $ hg -q log 1:dfab7f3c2efb 0:c3fa057dd86f $ hg id dfab7f3c2efb tip $ hg update -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id c3fa057dd86f $ echo "changed file1" >> file1 $ hg id c3fa057dd86f+ $ hg revert --no-backup --all reverting file1 $ hg diff $ hg status $ hg id c3fa057dd86f $ hg update 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg diff $ hg status $ hg id dfab7f3c2efb tip $ hg update -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "changed file1 different" >> file1 $ hg update merging file1 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges [1] $ hg diff --nodates diff -r dfab7f3c2efb file1 --- a/file1 +++ b/file1 @@ -1,3 +1,7 @@ added file1 another line of text +<<<<<<< working copy: c3fa057dd86f - test: added file1 and file2 +changed file1 different +======= changed file1 +>>>>>>> destination: dfab7f3c2efb - test: changed file1 $ hg status M file1 ? file1.orig $ hg id dfab7f3c2efb+ tip $ hg revert --no-backup --all reverting file1 $ hg diff $ hg status ? file1.orig $ hg id dfab7f3c2efb tip $ hg revert -r tip --no-backup --all $ hg diff $ hg status ? file1.orig $ hg id dfab7f3c2efb tip $ hg update -C 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg diff $ hg status ? file1.orig $ hg id dfab7f3c2efb tip mercurial-3.7.3/tests/test-dispatch.py0000644000175000017500000000142212676531525017416 0ustar mpmmpm00000000000000import os from mercurial import dispatch def testdispatch(cmd): """Simple wrapper around dispatch.dispatch() Prints command and result value, but does not handle quoting. """ print "running: %s" % (cmd,) req = dispatch.request(cmd.split()) result = dispatch.dispatch(req) print "result: %r" % (result,) testdispatch("init test1") os.chdir('test1') # create file 'foo', add and commit f = open('foo', 'wb') f.write('foo\n') f.close() testdispatch("add foo") testdispatch("commit -m commit1 -d 2000-01-01 foo") # append to file 'foo' and commit f = open('foo', 'ab') f.write('bar\n') f.close() testdispatch("commit -m commit2 -d 2000-01-02 foo") # check 88803a69b24 (fancyopts modified command table) testdispatch("log -r 0") testdispatch("log -r tip") mercurial-3.7.3/tests/test-http-bundle1.t0000644000175000017500000002720412676531525017747 0ustar mpmmpm00000000000000#require serve This test is a duplicate of 'test-http.t', feel free to factor out parts that are not bundle1/bundle2 specific. $ cat << EOF >> $HGRCPATH > [experimental] > # This test is dedicated to interaction through old bundle > bundle2-exp = False > EOF $ hg init test $ cd test $ echo foo>foo $ mkdir foo.d foo.d/bAr.hg.d foo.d/baR.d.hg $ echo foo>foo.d/foo $ echo bar>foo.d/bAr.hg.d/BaR $ echo bar>foo.d/baR.d.hg/bAR $ hg commit -A -m 1 adding foo adding foo.d/bAr.hg.d/BaR adding foo.d/baR.d.hg/bAR adding foo.d/foo $ hg serve -p $HGPORT -d --pid-file=../hg1.pid -E ../error.log $ hg --config server.uncompressed=False serve -p $HGPORT1 -d --pid-file=../hg2.pid Test server address cannot be reused #if windows $ hg serve -p $HGPORT1 2>&1 abort: cannot start server at ':$HGPORT1': * (glob) [255] #else $ hg serve -p $HGPORT1 2>&1 abort: cannot start server at ':$HGPORT1': Address already in use [255] #endif $ cd .. $ cat hg1.pid hg2.pid >> $DAEMON_PIDS clone via stream $ hg clone --uncompressed http://localhost:$HGPORT/ copy 2>&1 streaming all changes 6 files to transfer, 606 bytes of data transferred * bytes in * seconds (*/sec) (glob) searching for changes no changes found updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg verify -R copy checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 1 changesets, 4 total revisions try to clone via stream, should use pull instead $ hg clone --uncompressed http://localhost:$HGPORT1/ copy2 requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 4 changes to 4 files updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved clone via pull $ hg clone http://localhost:$HGPORT1/ copy-pull requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 4 changes to 4 files updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg verify -R copy-pull checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 1 changesets, 4 total revisions $ cd test $ echo bar > bar $ hg commit -A -d '1 0' -m 2 adding bar $ cd .. clone over http with --update $ hg clone http://localhost:$HGPORT1/ updated --update 0 requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 5 changes to 5 files updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -r . -R updated changeset: 0:8b6053c928fe user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 $ rm -rf updated incoming via HTTP $ hg clone http://localhost:$HGPORT1/ --rev 0 partial adding changesets adding manifests adding file changes added 1 changesets with 4 changes to 4 files updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd partial $ touch LOCAL $ hg ci -qAm LOCAL $ hg incoming http://localhost:$HGPORT1/ --template '{desc}\n' comparing with http://localhost:$HGPORT1/ searching for changes 2 $ cd .. pull $ cd copy-pull $ echo '[hooks]' >> .hg/hgrc $ echo "changegroup = printenv.py changegroup" >> .hg/hgrc $ hg pull pulling from http://localhost:$HGPORT1/ searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=http://localhost:$HGPORT1/ (glob) (run 'hg update' to get a working copy) $ cd .. clone from invalid URL $ hg clone http://localhost:$HGPORT/bad abort: HTTP Error 404: Not Found [255] test http authentication + use the same server to test server side streaming preference $ cd test $ cat << EOT > userpass.py > import base64 > from mercurial.hgweb import common > def perform_authentication(hgweb, req, op): > auth = req.env.get('HTTP_AUTHORIZATION') > if not auth: > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, 'who', > [('WWW-Authenticate', 'Basic Realm="mercurial"')]) > if base64.b64decode(auth.split()[1]).split(':', 1) != ['user', 'pass']: > raise common.ErrorResponse(common.HTTP_FORBIDDEN, 'no') > def extsetup(): > common.permhooks.insert(0, perform_authentication) > EOT $ hg --config extensions.x=userpass.py serve -p $HGPORT2 -d --pid-file=pid \ > --config server.preferuncompressed=True \ > --config web.push_ssl=False --config web.allow_push=* -A ../access.log $ cat pid >> $DAEMON_PIDS $ cat << EOF > get_pass.py > import getpass > def newgetpass(arg): > return "pass" > getpass.getpass = newgetpass > EOF $ hg id http://localhost:$HGPORT2/ abort: http authorization required for http://localhost:$HGPORT2/ [255] $ hg id http://localhost:$HGPORT2/ abort: http authorization required for http://localhost:$HGPORT2/ [255] $ hg id --config ui.interactive=true --config extensions.getpass=get_pass.py http://user@localhost:$HGPORT2/ http authorization required for http://localhost:$HGPORT2/ realm: mercurial user: user password: 5fed3813f7f5 $ hg id http://user:pass@localhost:$HGPORT2/ 5fed3813f7f5 $ echo '[auth]' >> .hg/hgrc $ echo 'l.schemes=http' >> .hg/hgrc $ echo 'l.prefix=lo' >> .hg/hgrc $ echo 'l.username=user' >> .hg/hgrc $ echo 'l.password=pass' >> .hg/hgrc $ hg id http://localhost:$HGPORT2/ 5fed3813f7f5 $ hg id http://localhost:$HGPORT2/ 5fed3813f7f5 $ hg id http://user@localhost:$HGPORT2/ 5fed3813f7f5 $ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1 streaming all changes 7 files to transfer, 916 bytes of data transferred * bytes in * seconds (*/sec) (glob) searching for changes no changes found updating to branch default 5 files updated, 0 files merged, 0 files removed, 0 files unresolved --pull should override server's preferuncompressed $ hg clone --pull http://user:pass@localhost:$HGPORT2/ dest-pull 2>&1 requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 5 changes to 5 files updating to branch default 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id http://user2@localhost:$HGPORT2/ abort: http authorization required for http://localhost:$HGPORT2/ [255] $ hg id http://user:pass2@localhost:$HGPORT2/ abort: HTTP Error 403: no [255] $ hg -R dest tag -r tip top $ hg -R dest push http://user:pass@localhost:$HGPORT2/ pushing to http://user:***@localhost:$HGPORT2/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files $ hg rollback -q $ cut -c38- ../access.log "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=branchmap HTTP/1.1" 200 - "GET /?cmd=stream_out HTTP/1.1" 401 - "GET /?cmd=stream_out HTTP/1.1" 200 - "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D5fed3813f7f5e1824344fdc9cf8f63bb662c292d "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=bookmarks "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 403 - x-hgarg-1:namespace=namespaces "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=branchmap HTTP/1.1" 200 - "GET /?cmd=branchmap HTTP/1.1" 200 - "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+5eb5abfefeea63c80dd7553bcc3783f37e0c5524 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases $ cd .. clone of serve with repo in root and unserved subrepo (issue2970) $ hg --cwd test init sub $ echo empty > test/sub/empty $ hg --cwd test/sub add empty $ hg --cwd test/sub commit -qm 'add empty' $ hg --cwd test/sub tag -r 0 something $ echo sub = sub > test/.hgsub $ hg --cwd test add .hgsub $ hg --cwd test commit -qm 'add subrepo' $ hg clone http://localhost:$HGPORT noslash-clone requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 7 changes to 7 files updating to branch default abort: HTTP Error 404: Not Found [255] $ hg clone http://localhost:$HGPORT/ slash-clone requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 7 changes to 7 files updating to branch default abort: HTTP Error 404: Not Found [255] check error log $ cat error.log mercurial-3.7.3/tests/test-bundle.t0000644000175000017500000004322612676531525016713 0ustar mpmmpm00000000000000 $ cat << EOF >> $HGRCPATH > [format] > usegeneraldelta=yes > EOF Setting up test $ hg init test $ cd test $ echo 0 > afile $ hg add afile $ hg commit -m "0.0" $ echo 1 >> afile $ hg commit -m "0.1" $ echo 2 >> afile $ hg commit -m "0.2" $ echo 3 >> afile $ hg commit -m "0.3" $ hg update -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 1 >> afile $ hg commit -m "1.1" created new head $ echo 2 >> afile $ hg commit -m "1.2" $ echo "a line" > fred $ echo 3 >> afile $ hg add fred $ hg commit -m "1.3" $ hg mv afile adifferentfile $ hg commit -m "1.3m" $ hg update -C 3 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg mv afile anotherfile $ hg commit -m "0.3m" $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 9 changesets, 7 total revisions $ cd .. $ hg init empty Bundle and phase $ hg -R test phase --force --secret 0 $ hg -R test bundle phase.hg empty searching for changes no changes found (ignored 9 secret changesets) [1] $ hg -R test phase --draft -r 'head()' Bundle --all $ hg -R test bundle --all all.hg 9 changesets found Bundle test to full.hg $ hg -R test bundle full.hg empty searching for changes 9 changesets found Unbundle full.hg in test $ hg -R test unbundle full.hg adding changesets adding manifests adding file changes added 0 changesets with 0 changes to 4 files (run 'hg update' to get a working copy) Verify empty $ hg -R empty heads [1] $ hg -R empty verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 0 files, 0 changesets, 0 total revisions Pull full.hg into test (using --cwd) $ hg --cwd test pull ../full.hg pulling from ../full.hg searching for changes no changes found Verify that there are no leaked temporary files after pull (issue2797) $ ls test/.hg | grep .hg10un [1] Pull full.hg into empty (using --cwd) $ hg --cwd empty pull ../full.hg pulling from ../full.hg requesting all changes adding changesets adding manifests adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) Rollback empty $ hg -R empty rollback repository tip rolled back to revision -1 (undo pull) Pull full.hg into empty again (using --cwd) $ hg --cwd empty pull ../full.hg pulling from ../full.hg requesting all changes adding changesets adding manifests adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) Pull full.hg into test (using -R) $ hg -R test pull full.hg pulling from full.hg searching for changes no changes found Pull full.hg into empty (using -R) $ hg -R empty pull full.hg pulling from full.hg searching for changes no changes found Rollback empty $ hg -R empty rollback repository tip rolled back to revision -1 (undo pull) Pull full.hg into empty again (using -R) $ hg -R empty pull full.hg pulling from full.hg requesting all changes adding changesets adding manifests adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) Log -R full.hg in fresh empty $ rm -r empty $ hg init empty $ cd empty $ hg -R bundle://../full.hg log changeset: 8:aa35859c02ea tag: tip parent: 3:eebf5a27f8ca user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.3m changeset: 7:a6a34bfa0076 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.3m changeset: 6:7373c1169842 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.3 changeset: 5:1bb50a9436a7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.2 changeset: 4:095197eb4973 parent: 0:f9ee2f85a263 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.1 changeset: 3:eebf5a27f8ca user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.3 changeset: 2:e38ba6f5b7e0 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.2 changeset: 1:34c2bf6b0626 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.1 changeset: 0:f9ee2f85a263 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.0 Make sure bundlerepo doesn't leak tempfiles (issue2491) $ ls .hg 00changelog.i cache requires store Pull ../full.hg into empty (with hook) $ echo "[hooks]" >> .hg/hgrc $ echo "changegroup = printenv.py changegroup" >> .hg/hgrc doesn't work (yet ?) hg -R bundle://../full.hg verify $ hg pull bundle://../full.hg pulling from bundle:../full.hg requesting all changes adding changesets adding manifests adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) changegroup hook: HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=bundle:../full.hg (glob) (run 'hg heads' to see heads, 'hg merge' to merge) Rollback empty $ hg rollback repository tip rolled back to revision -1 (undo pull) $ cd .. Log -R bundle:empty+full.hg $ hg -R bundle:empty+full.hg log --template="{rev} "; echo "" 8 7 6 5 4 3 2 1 0 Pull full.hg into empty again (using -R; with hook) $ hg -R empty pull full.hg pulling from full.hg requesting all changes adding changesets adding manifests adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) changegroup hook: HG_NODE=f9ee2f85a263049e9ae6d37a0e67e96194ffb735 HG_NODE_LAST=aa35859c02ea8bd48da5da68cd2740ac71afcbaf HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=bundle:empty+full.hg (glob) (run 'hg heads' to see heads, 'hg merge' to merge) Cannot produce streaming clone bundles with "hg bundle" $ hg -R test bundle -t packed1 packed.hg abort: packed bundles cannot be produced by "hg bundle" (use "hg debugcreatestreamclonebundle") [255] packed1 is produced properly $ hg -R test debugcreatestreamclonebundle packed.hg writing 2663 bytes for 6 files bundle requirements: generaldelta, revlogv1 $ f -B 64 --size --sha1 --hexdump packed.hg packed.hg: size=2826, sha1=e139f97692a142b19cdcff64a69697d5307ce6d4 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 06 00 00 |HGS1UN..........| 0010: 00 00 00 00 0a 67 00 16 67 65 6e 65 72 61 6c 64 |.....g..generald| 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 00 64 61 |elta,revlogv1.da| 0030: 74 61 2f 61 64 69 66 66 65 72 65 6e 74 66 69 6c |ta/adifferentfil| $ hg debugbundle --spec packed.hg none-packed1;requirements%3Dgeneraldelta%2Crevlogv1 generaldelta requirement is listed in stream clone bundles $ hg --config format.generaldelta=true init testgd $ cd testgd $ touch foo $ hg -q commit -A -m initial $ cd .. $ hg -R testgd debugcreatestreamclonebundle packedgd.hg writing 301 bytes for 3 files bundle requirements: generaldelta, revlogv1 $ f -B 64 --size --sha1 --hexdump packedgd.hg packedgd.hg: size=396, sha1=981f9e589799335304a5a9a44caa3623a48d2a9f 0000: 48 47 53 31 55 4e 00 00 00 00 00 00 00 03 00 00 |HGS1UN..........| 0010: 00 00 00 00 01 2d 00 16 67 65 6e 65 72 61 6c 64 |.....-..generald| 0020: 65 6c 74 61 2c 72 65 76 6c 6f 67 76 31 00 64 61 |elta,revlogv1.da| 0030: 74 61 2f 66 6f 6f 2e 69 00 36 34 0a 00 03 00 01 |ta/foo.i.64.....| $ hg debugbundle --spec packedgd.hg none-packed1;requirements%3Dgeneraldelta%2Crevlogv1 Unpacking packed1 bundles with "hg unbundle" isn't allowed $ hg init packed $ hg -R packed unbundle packed.hg abort: packed bundles cannot be applied with "hg unbundle" (use "hg debugapplystreamclonebundle") [255] packed1 can be consumed from debug command $ hg -R packed debugapplystreamclonebundle packed.hg 6 files to transfer, 2.60 KB of data transferred 2.60 KB in *.* seconds (* */sec) (glob) Does not work on non-empty repo $ hg -R packed debugapplystreamclonebundle packed.hg abort: cannot apply stream clone bundle on non-empty repo [255] Create partial clones $ rm -r empty $ hg init empty $ hg clone -r 3 test partial adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone partial partial2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd partial Log -R full.hg in partial $ hg -R bundle://../full.hg log -T phases changeset: 8:aa35859c02ea tag: tip phase: draft parent: 3:eebf5a27f8ca user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.3m changeset: 7:a6a34bfa0076 phase: draft user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.3m changeset: 6:7373c1169842 phase: draft user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.3 changeset: 5:1bb50a9436a7 phase: draft user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.2 changeset: 4:095197eb4973 phase: draft parent: 0:f9ee2f85a263 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.1 changeset: 3:eebf5a27f8ca phase: public user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.3 changeset: 2:e38ba6f5b7e0 phase: public user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.2 changeset: 1:34c2bf6b0626 phase: public user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.1 changeset: 0:f9ee2f85a263 phase: public user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.0 Incoming full.hg in partial $ hg incoming bundle://../full.hg comparing with bundle:../full.hg searching for changes changeset: 4:095197eb4973 parent: 0:f9ee2f85a263 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.1 changeset: 5:1bb50a9436a7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.2 changeset: 6:7373c1169842 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.3 changeset: 7:a6a34bfa0076 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.3m changeset: 8:aa35859c02ea tag: tip parent: 3:eebf5a27f8ca user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.3m Outgoing -R full.hg vs partial2 in partial $ hg -R bundle://../full.hg outgoing ../partial2 comparing with ../partial2 searching for changes changeset: 4:095197eb4973 parent: 0:f9ee2f85a263 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.1 changeset: 5:1bb50a9436a7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.2 changeset: 6:7373c1169842 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.3 changeset: 7:a6a34bfa0076 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.3m changeset: 8:aa35859c02ea tag: tip parent: 3:eebf5a27f8ca user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.3m Outgoing -R does-not-exist.hg vs partial2 in partial $ hg -R bundle://../does-not-exist.hg outgoing ../partial2 abort: *../does-not-exist.hg* (glob) [255] $ cd .. hide outer repo $ hg init Direct clone from bundle (all-history) $ hg clone full.hg full-clone requesting all changes adding changesets adding manifests adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R full-clone heads changeset: 8:aa35859c02ea tag: tip parent: 3:eebf5a27f8ca user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.3m changeset: 7:a6a34bfa0076 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1.3m $ rm -r full-clone When cloning from a non-copiable repository into '', do not recurse infinitely (issue2528) $ hg clone full.hg '' abort: empty destination path is not valid [255] test for https://bz.mercurial-scm.org/216 Unbundle incremental bundles into fresh empty in one go $ rm -r empty $ hg init empty $ hg -R test bundle --base null -r 0 ../0.hg 1 changesets found $ hg -R test bundle --base 0 -r 1 ../1.hg 1 changesets found $ hg -R empty unbundle -u ../0.hg ../1.hg adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files 1 files updated, 0 files merged, 0 files removed, 0 files unresolved View full contents of the bundle $ hg -R test bundle --base null -r 3 ../partial.hg 4 changesets found $ cd test $ hg -R ../../partial.hg log -r "bundle()" changeset: 0:f9ee2f85a263 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.0 changeset: 1:34c2bf6b0626 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.1 changeset: 2:e38ba6f5b7e0 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.2 changeset: 3:eebf5a27f8ca user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0.3 $ cd .. test for 540d1059c802 test for 540d1059c802 $ hg init orig $ cd orig $ echo foo > foo $ hg add foo $ hg ci -m 'add foo' $ hg clone . ../copy updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg tag foo $ cd ../copy $ echo >> foo $ hg ci -m 'change foo' $ hg bundle ../bundle.hg ../orig searching for changes 1 changesets found $ cd ../orig $ hg incoming ../bundle.hg comparing with ../bundle.hg searching for changes changeset: 2:ed1b79f46b9a tag: tip parent: 0:bbd179dfa0a7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change foo $ cd .. test bundle with # in the filename (issue2154): $ cp bundle.hg 'test#bundle.hg' $ cd orig $ hg incoming '../test#bundle.hg' comparing with ../test abort: unknown revision 'bundle.hg'! [255] note that percent encoding is not handled: $ hg incoming ../test%23bundle.hg abort: repository ../test%23bundle.hg not found! [255] $ cd .. test to bundle revisions on the newly created branch (issue3828): $ hg -q clone -U test test-clone $ cd test $ hg -q branch foo $ hg commit -m "create foo branch" $ hg -q outgoing ../test-clone 9:b4f5acb1ee27 $ hg -q bundle --branch foo foo.hg ../test-clone $ hg -R foo.hg -q log -r "bundle()" 9:b4f5acb1ee27 $ cd .. test for https://bz.mercurial-scm.org/1144 test that verify bundle does not traceback partial history bundle, fails w/ unknown parent $ hg -R bundle.hg verify abort: 00changelog.i@bbd179dfa0a7: unknown parent! [255] full history bundle, refuses to verify non-local repo $ hg -R all.hg verify abort: cannot verify bundle or remote repos [255] but, regular verify must continue to work $ hg -R orig verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 2 changesets, 2 total revisions diff against bundle $ hg init b $ cd b $ hg -R ../all.hg diff -r tip diff -r aa35859c02ea anotherfile --- a/anotherfile Thu Jan 01 00:00:00 1970 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +0,0 @@ -0 -1 -2 -3 $ cd .. bundle single branch $ hg init branchy $ cd branchy $ echo a >a $ echo x >x $ hg ci -Ama adding a adding x $ echo c >c $ echo xx >x $ hg ci -Amc adding c $ echo c1 >c1 $ hg ci -Amc1 adding c1 $ hg up 0 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo b >b $ hg ci -Amb adding b created new head $ echo b1 >b1 $ echo xx >x $ hg ci -Amb1 adding b1 $ hg clone -q -r2 . part == bundling via incoming $ hg in -R part --bundle incoming.hg --template "{node}\n" . comparing with . searching for changes 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a 057f4db07f61970e1c11e83be79e9d08adc4dc31 == bundling $ hg bundle bundle.hg part --debug --config progress.debug=true query 1; heads searching for changes all remote heads known locally 2 changesets found list of changesets: 1a38c1b849e8b70c756d2d80b0b9a3ac0b7ea11a 057f4db07f61970e1c11e83be79e9d08adc4dc31 bundle2-output-bundle: "HG20", (1 params) 1 parts total bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundling: 1/2 changesets (50.00%) bundling: 2/2 changesets (100.00%) bundling: 1/2 manifests (50.00%) bundling: 2/2 manifests (100.00%) bundling: b 1/3 files (33.33%) bundling: b1 2/3 files (66.67%) bundling: x 3/3 files (100.00%) == Test for issue3441 $ hg clone -q -r0 . part2 $ hg -q -R part2 pull bundle.hg $ hg -R part2 verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 3 changesets, 5 total revisions == Test bundling no commits $ hg bundle -r 'public()' no-output.hg abort: no commits to bundle [255] mercurial-3.7.3/tests/test-ssh.t0000644000175000017500000004013612676531525016234 0ustar mpmmpm00000000000000 This test tries to exercise the ssh functionality with a dummy script $ cat <> $HGRCPATH > [format] > usegeneraldelta=yes > EOF creating 'remote' repo $ hg init remote $ cd remote $ echo this > foo $ echo this > fooO $ hg ci -A -m "init" foo fooO insert a closed branch (issue4428) $ hg up null 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg branch closed marked working directory as branch closed (branches are permanent and global, did you want a bookmark?) $ hg ci -mc0 $ hg ci --close-branch -mc1 $ hg up -q default configure for serving $ cat < .hg/hgrc > [server] > uncompressed = True > > [hooks] > changegroup = printenv.py changegroup-in-remote 0 ../dummylog > EOF $ cd .. repo not found error $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local remote: abort: repository nonexistent not found! abort: no suitable response from remote hg! [255] non-existent absolute path $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/nonexistent local remote: abort: repository $TESTTMP/nonexistent not found! abort: no suitable response from remote hg! [255] clone remote via stream $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream streaming all changes 4 files to transfer, 615 bytes of data transferred 615 bytes in * seconds (*) (glob) searching for changes no changes found updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd local-stream $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 3 changesets, 2 total revisions $ hg branches default 0:1160648e36ce $ cd .. clone bookmarks via stream $ hg -R local-stream book mybook $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2 streaming all changes 4 files to transfer, 615 bytes of data transferred 615 bytes in * seconds (*) (glob) searching for changes no changes found updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd stream2 $ hg book mybook 0:1160648e36ce $ cd .. $ rm -rf local-stream stream2 clone remote via pull $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved verify $ cd local $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 3 changesets, 2 total revisions $ echo '[hooks]' >> .hg/hgrc $ echo "changegroup = printenv.py changegroup-in-local 0 ../dummylog" >> .hg/hgrc empty default pull $ hg paths default = ssh://user@dummy/remote $ hg pull -e "python \"$TESTDIR/dummyssh\"" pulling from ssh://user@dummy/remote searching for changes no changes found pull from wrong ssh URL $ hg pull -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist pulling from ssh://user@dummy/doesnotexist remote: abort: repository doesnotexist not found! abort: no suitable response from remote hg! [255] local change $ echo bleah > foo $ hg ci -m "add" updating rc $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc $ echo "[ui]" >> .hg/hgrc $ echo "ssh = python \"$TESTDIR/dummyssh\"" >> .hg/hgrc find outgoing $ hg out ssh://user@dummy/remote comparing with ssh://user@dummy/remote searching for changes changeset: 3:a28a9d1a809c tag: tip parent: 0:1160648e36ce user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add find incoming on the remote side $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/local comparing with ssh://user@dummy/local searching for changes changeset: 3:a28a9d1a809c tag: tip parent: 0:1160648e36ce user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add find incoming on the remote side (using absolute path) $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`" comparing with ssh://user@dummy/$TESTTMP/local searching for changes changeset: 3:a28a9d1a809c tag: tip parent: 0:1160648e36ce user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add push $ hg push pushing to ssh://user@dummy/remote searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files $ cd ../remote check remote tip $ hg tip changeset: 3:a28a9d1a809c tag: tip parent: 0:1160648e36ce user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 4 changesets, 3 total revisions $ hg cat -r tip foo bleah $ echo z > z $ hg ci -A -m z z created new head test pushkeys and bookmarks $ cd ../local $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces bookmarks namespaces phases $ hg book foo -r 0 $ hg out -B comparing with ssh://user@dummy/remote searching for changed bookmarks foo 1160648e36ce $ hg push -B foo pushing to ssh://user@dummy/remote searching for changes no changes found exporting bookmark foo [1] $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks foo 1160648e36cec0054048a7edc4110c6f84fde594 $ hg book -f foo $ hg push --traceback pushing to ssh://user@dummy/remote searching for changes no changes found updating bookmark foo [1] $ hg book -d foo $ hg in -B comparing with ssh://user@dummy/remote searching for changed bookmarks foo a28a9d1a809c $ hg book -f -r 0 foo $ hg pull -B foo pulling from ssh://user@dummy/remote no changes found updating bookmark foo $ hg book -d foo $ hg push -B foo pushing to ssh://user@dummy/remote searching for changes no changes found deleting remote bookmark foo [1] a bad, evil hook that prints to stdout $ cat < $TESTTMP/badhook > import sys > sys.stdout.write("KABOOM\n") > EOF $ echo '[hooks]' >> ../remote/.hg/hgrc $ echo "changegroup.stdout = python $TESTTMP/badhook" >> ../remote/.hg/hgrc $ echo r > r $ hg ci -A -m z r push should succeed even though it has an unexpected response $ hg push pushing to ssh://user@dummy/remote searching for changes remote has heads on branch 'default' that are not known locally: 6c0482d977a3 remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: KABOOM $ hg -R ../remote heads changeset: 5:1383141674ec tag: tip parent: 3:a28a9d1a809c user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: z changeset: 4:6c0482d977a3 parent: 0:1160648e36ce user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: z clone bookmarks $ hg -R ../remote bookmark test $ hg -R ../remote bookmarks * test 4:6c0482d977a3 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks requesting all changes adding changesets adding manifests adding file changes added 6 changesets with 5 changes to 4 files (+1 heads) updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R local-bookmarks bookmarks test 4:6c0482d977a3 passwords in ssh urls are not supported (we use a glob here because different Python versions give different results here) $ hg push ssh://user:erroneouspwd@dummy/remote pushing to ssh://user:*@dummy/remote (glob) abort: password in URL not supported! [255] $ cd .. hide outer repo $ hg init Test remote paths with spaces (issue2983): $ hg init --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo" $ touch "$TESTTMP/a repo/test" $ hg -R 'a repo' commit -A -m "test" adding test $ hg -R 'a repo' tag tag $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo" 73649e48688a $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO" abort: unknown revision 'noNoNO'! [255] Test (non-)escaping of remote paths with spaces when cloning (issue3145): $ hg clone --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo" destination directory: a repo abort: destination 'a repo' is not empty [255] Test hg-ssh using a helper script that will restore PYTHONPATH (which might have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right parameters: $ cat > ssh.sh << EOF > userhost="\$1" > SSH_ORIGINAL_COMMAND="\$2" > export SSH_ORIGINAL_COMMAND > PYTHONPATH="$PYTHONPATH" > export PYTHONPATH > python "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo" > EOF $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo" 73649e48688a $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo" remote: Illegal repository "$TESTTMP/a'repo" (glob) abort: no suitable response from remote hg! [255] $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo" remote: Illegal command "hacking -R 'a'\''repo' serve --stdio" abort: no suitable response from remote hg! [255] $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" python "$TESTDIR/../contrib/hg-ssh" Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation [255] Test hg-ssh in read-only mode: $ cat > ssh.sh << EOF > userhost="\$1" > SSH_ORIGINAL_COMMAND="\$2" > export SSH_ORIGINAL_COMMAND > PYTHONPATH="$PYTHONPATH" > export PYTHONPATH > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote" > EOF $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local requesting all changes adding changesets adding manifests adding file changes added 6 changesets with 5 changes to 4 files (+1 heads) updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd read-only-local $ echo "baz" > bar $ hg ci -A -m "unpushable commit" bar $ hg push --ssh "sh ../ssh.sh" pushing to ssh://user@dummy/*/remote (glob) searching for changes remote: Permission denied remote: pretxnopen.hg-ssh hook failed abort: push failed on remote [255] $ cd .. stderr from remote commands should be printed before stdout from local code (issue4336) $ hg clone remote stderr-ordering updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd stderr-ordering $ cat >> localwrite.py << EOF > from mercurial import exchange, extensions > > def wrappedpush(orig, repo, *args, **kwargs): > res = orig(repo, *args, **kwargs) > repo.ui.write('local stdout\n') > return res > > def extsetup(ui): > extensions.wrapfunction(exchange, 'push', wrappedpush) > EOF $ cat >> .hg/hgrc << EOF > [paths] > default-push = ssh://user@dummy/remote > [ui] > ssh = python "$TESTDIR/dummyssh" > [extensions] > localwrite = localwrite.py > EOF $ echo localwrite > foo $ hg commit -m 'testing localwrite' $ hg push pushing to ssh://user@dummy/remote searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: KABOOM local stdout debug output $ hg pull --debug ssh://user@dummy/remote pulling from ssh://user@dummy/remote running python ".*/dummyssh" user@dummy ('|")hg -R remote serve --stdio('|") (re) sending hello command sending between command remote: 371 remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 remote: 1 query 1; heads sending batch command searching for changes all remote heads known locally no changes found sending getbundle command bundle2-input-bundle: with-transaction bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-part: total payload size 45 bundle2-input-bundle: 1 parts total checking for updated bookmarks preparing listkeys for "phases" sending listkeys command received listkey for "phases": 15 bytes $ cd .. $ cat dummylog Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio Got arguments 1:user@dummy 2:hg -R $TESTTMP/nonexistent serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R doesnotexist serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R local serve --stdio Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio changegroup-in-remote hook: HG_BUNDLE2=1 HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:ssh:127.0.0.1 (glob) Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio changegroup-in-remote hook: HG_BUNDLE2=1 HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:ssh:127.0.0.1 (glob) Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg init 'a repo' Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio changegroup-in-remote hook: HG_BUNDLE2=1 HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:ssh:127.0.0.1 (glob) Got arguments 1:user@dummy 2:hg -R remote serve --stdio remote hook failure is attributed to remote $ cat > $TESTTMP/failhook << EOF > def hook(ui, repo, **kwargs): > ui.write('hook failure!\n') > ui.flush() > return 1 > EOF $ echo "pretxnchangegroup.fail = python:$TESTTMP/failhook:hook" >> remote/.hg/hgrc $ hg -q --config ui.ssh="python $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout $ cd hookout $ touch hookfailure $ hg -q commit -A -m 'remote hook failure' $ hg --config ui.ssh="python $TESTDIR/dummyssh" push pushing to ssh://user@dummy/remote searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: hook failure! remote: transaction abort! remote: rollback completed remote: pretxnchangegroup.fail hook failed abort: push failed on remote [255] mercurial-3.7.3/tests/test-revlog-ancestry.py.out0000644000175000017500000000045212676531525021553 0ustar mpmmpm00000000000000Ancestors of 5 4 2 0 Ancestors of 6 and 5 3 4 2 1 0 Ancestors of 5 and 4 4 2 0 Ancestors of 7, stop at 6 6 Ancestors of 7, including revs 7 6 5 3 4 2 1 0 Ancestors of 7, 5 and 3, including revs 7 5 3 6 4 2 1 0 Descendants of 5 7 8 Descendants of 5 and 3 6 7 8 Descendants of 5 and 4 5 7 8 mercurial-3.7.3/tests/test-merge6.t0000644000175000017500000000346712676531525016632 0ustar mpmmpm00000000000000 $ cat < merge > import sys, os > print "merging for", os.path.basename(sys.argv[1]) > EOF $ HGMERGE="python ../merge"; export HGMERGE $ hg init A1 $ cd A1 $ echo This is file foo1 > foo $ echo This is file bar1 > bar $ hg add foo bar $ hg commit -m "commit text" $ cd .. $ hg clone A1 B1 updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd A1 $ rm bar $ hg remove bar $ hg commit -m "commit test" $ cd ../B1 $ echo This is file foo22 > foo $ hg commit -m "commit test" $ cd .. $ hg clone A1 A2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone B1 B2 updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd A1 $ hg pull ../B1 pulling from ../B1 searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m "commit test" bar should remain deleted. $ hg manifest --debug f9b0e817f6a48de3564c6b2957687c5e7297c5a0 644 foo $ cd ../B2 $ hg pull ../A2 pulling from ../A2 searching for changes adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg merge 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m "commit test" bar should remain deleted. $ hg manifest --debug f9b0e817f6a48de3564c6b2957687c5e7297c5a0 644 foo $ cd .. mercurial-3.7.3/tests/test-pull-r.t0000644000175000017500000000654112676531525016654 0ustar mpmmpm00000000000000 $ hg init repo $ cd repo $ echo foo > foo $ hg ci -qAm 'add foo' $ echo >> foo $ hg ci -m 'change foo' $ hg up -qC 0 $ echo bar > bar $ hg ci -qAm 'add bar' $ hg log changeset: 2:effea6de0384 tag: tip parent: 0:bbd179dfa0a7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add bar changeset: 1:ed1b79f46b9a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change foo changeset: 0:bbd179dfa0a7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo $ cd .. don't show "(+1 heads)" message when pulling closed head $ hg clone -q repo repo2 $ hg clone -q repo2 repo3 $ cd repo2 $ hg up -q 0 $ echo hello >> foo $ hg ci -mx1 created new head $ hg ci -mx2 --close-branch $ cd ../repo3 $ hg heads -q --closed 2:effea6de0384 1:ed1b79f46b9a $ hg pull pulling from $TESTTMP/repo2 (glob) searching for changes adding changesets adding manifests adding file changes added 2 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) $ hg heads -q --closed 4:00cfe9073916 2:effea6de0384 1:ed1b79f46b9a $ cd .. $ hg init copy $ cd copy Pull a missing revision: $ hg pull -qr missing ../repo abort: unknown revision 'missing'! [255] Pull multiple revisions with update: $ hg pull -qu -r 0 -r 1 ../repo $ hg -q parents 0:bbd179dfa0a7 $ hg rollback repository tip rolled back to revision -1 (undo pull) working directory now based on revision -1 $ hg pull -qr 0 ../repo $ hg log changeset: 0:bbd179dfa0a7 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo $ hg pull -qr 1 ../repo $ hg log changeset: 1:ed1b79f46b9a tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change foo changeset: 0:bbd179dfa0a7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo This used to abort: received changelog group is empty: $ hg pull -qr 1 ../repo Test race condition with -r and -U (issue4707) We pull '-U -r ' and the name change right after/during the changegroup emission. We use http because http is better is our racy-est option. $ echo babar > ../repo/jungle $ cat < ../repo/.hg/hgrc > [hooks] > outgoing.makecommit = hg ci -Am 'racy commit'; echo committed in pull-race > EOF $ hg -R ../repo serve -p $HGPORT2 -d --pid-file=../repo.pid $ cat ../repo.pid >> $DAEMON_PIDS $ hg pull --rev default --update http://localhost:$HGPORT2/ pulling from http://localhost:$HGPORT2/ searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -G @ changeset: 2:effea6de0384 | tag: tip | parent: 0:bbd179dfa0a7 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: add bar | | o changeset: 1:ed1b79f46b9a |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: change foo | o changeset: 0:bbd179dfa0a7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo $ cd .. mercurial-3.7.3/tests/test-convert-bzr-directories.t0000644000175000017500000001034612676531525022224 0ustar mpmmpm00000000000000#require bzr $ . "$TESTDIR/bzr-definitions" empty directory $ mkdir test-empty $ cd test-empty $ bzr init -q source $ cd source $ echo content > a $ bzr add -q a $ bzr commit -q -m 'Initial add' $ mkdir empty $ bzr add -q empty $ bzr commit -q -m 'Empty directory added' $ echo content > empty/something $ bzr add -q empty/something $ bzr commit -q -m 'Added file into directory' $ cd .. $ hg convert source source-hg initializing destination source-hg repository scanning source... sorting... converting... 2 Initial add 1 Empty directory added 0 Added file into directory $ manifest source-hg 1 % manifest of 1 644 a $ manifest source-hg tip % manifest of tip 644 a 644 empty/something $ cd .. directory renames $ mkdir test-dir-rename $ cd test-dir-rename $ bzr init -q source $ cd source $ mkdir tpyo $ echo content > tpyo/something $ bzr add -q tpyo $ bzr commit -q -m 'Added directory' $ bzr mv tpyo typo tpyo => typo $ bzr commit -q -m 'Oops, typo' $ cd .. $ hg convert source source-hg initializing destination source-hg repository scanning source... sorting... converting... 1 Added directory 0 Oops, typo $ manifest source-hg 0 % manifest of 0 644 tpyo/something $ manifest source-hg tip % manifest of tip 644 typo/something $ cd .. nested directory renames $ mkdir test-nested-dir-rename $ cd test-nested-dir-rename $ bzr init -q source $ cd source $ mkdir -p firstlevel/secondlevel/thirdlevel $ echo content > firstlevel/secondlevel/file $ echo this_needs_to_be_there_too > firstlevel/secondlevel/thirdlevel/stuff $ bzr add -q firstlevel $ bzr commit -q -m 'Added nested directories' $ bzr mv firstlevel/secondlevel secondlevel firstlevel/secondlevel => secondlevel $ bzr commit -q -m 'Moved secondlevel one level up' $ cd .. $ hg convert source source-hg initializing destination source-hg repository scanning source... sorting... converting... 1 Added nested directories 0 Moved secondlevel one level up $ manifest source-hg tip % manifest of tip 644 secondlevel/file 644 secondlevel/thirdlevel/stuff $ cd .. directory remove $ mkdir test-dir-remove $ cd test-dir-remove $ bzr init -q source $ cd source $ mkdir src $ echo content > src/sourcecode $ bzr add -q src $ bzr commit -q -m 'Added directory' $ bzr rm -q src $ bzr commit -q -m 'Removed directory' $ cd .. $ hg convert source source-hg initializing destination source-hg repository scanning source... sorting... converting... 1 Added directory 0 Removed directory $ manifest source-hg 0 % manifest of 0 644 src/sourcecode $ manifest source-hg tip % manifest of tip $ cd .. directory replace $ mkdir test-dir-replace $ cd test-dir-replace $ bzr init -q source $ cd source $ mkdir first second $ echo content > first/file $ echo morecontent > first/dummy $ echo othercontent > second/something $ bzr add -q first second $ bzr commit -q -m 'Initial layout' $ bzr mv first/file second/file first/file => second/file $ bzr mv first third first => third $ bzr commit -q -m 'Some conflicting moves' $ cd .. $ hg convert source source-hg initializing destination source-hg repository scanning source... sorting... converting... 1 Initial layout 0 Some conflicting moves $ manifest source-hg tip % manifest of tip 644 second/file 644 second/something 644 third/dummy $ cd .. divergent nested renames (issue3089) $ mkdir test-divergent-renames $ cd test-divergent-renames $ bzr init -q source $ cd source $ mkdir -p a/c $ echo a > a/fa $ echo c > a/c/fc $ bzr add -q a $ bzr commit -q -m 'Initial layout' $ bzr mv a b a => b $ mkdir a $ bzr add a add(ed|ing) a (re) $ bzr mv b/c a/c b/c => a/c $ bzr status added: a/ renamed: a/? => b/? (re) a/c/? => a/c/? (re) $ bzr commit -q -m 'Divergent renames' $ cd .. $ hg convert source source-hg initializing destination source-hg repository scanning source... sorting... converting... 1 Initial layout 0 Divergent renames $ hg -R source-hg st -C --change 1 A b/fa a/fa R a/fa $ hg -R source-hg manifest -r 1 a/c/fc b/fa $ cd .. mercurial-3.7.3/tests/test-parseindex2.py.out0000644000175000017500000000000512676531525020645 0ustar mpmmpm00000000000000done mercurial-3.7.3/tests/test-histedit-fold.t0000644000175000017500000003117012676531525020174 0ustar mpmmpm00000000000000Test histedit extension: Fold commands ====================================== This test file is dedicated to testing the fold command in non conflicting case. Initialization --------------- $ . "$TESTDIR/histedit-helpers.sh" $ cat >> $HGRCPATH < [alias] > logt = log --template '{rev}:{node|short} {desc|firstline}\n' > [extensions] > histedit= > EOF Simple folding -------------------- $ initrepo () > { > hg init r > cd r > for x in a b c d e f ; do > echo $x > $x > hg add $x > hg ci -m $x > done > } $ initrepo log before edit $ hg logt --graph @ 5:652413bf663e f | o 4:e860deea161a e | o 3:055a42cdd887 d | o 2:177f92b77385 c | o 1:d2ae7f538514 b | o 0:cb9a9f314b8b a $ hg histedit 177f92b77385 --commands - 2>&1 < pick e860deea161a e > pick 652413bf663e f > fold 177f92b77385 c > pick 055a42cdd887 d > EOF 0 files updated, 0 files merged, 4 files removed, 0 files unresolved 0 files updated, 0 files merged, 2 files removed, 0 files unresolved 2 files updated, 0 files merged, 0 files removed, 0 files unresolved log after edit $ hg logt --graph @ 4:9c277da72c9b d | o 3:6de59d13424a f | o 2:ee283cb5f2d5 e | o 1:d2ae7f538514 b | o 0:cb9a9f314b8b a post-fold manifest $ hg manifest a b c d e f check histedit_source $ hg log --debug --rev 3 changeset: 3:6de59d13424a8a13acd3e975514aed29dd0d9b2d phase: draft parent: 2:ee283cb5f2d5955443f23a27b697a04339e9a39a parent: -1:0000000000000000000000000000000000000000 manifest: 3:81eede616954057198ead0b2c73b41d1f392829a user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: c f extra: branch=default extra: histedit_source=a4f7421b80f79fcc59fff01bcbf4a53d127dd6d3,177f92b773850b59254aa5e923436f921b55483b description: f *** c rollup will fold without preserving the folded commit's message $ OLDHGEDITOR=$HGEDITOR $ HGEDITOR=false $ hg histedit d2ae7f538514 --commands - 2>&1 < pick d2ae7f538514 b > roll ee283cb5f2d5 e > pick 6de59d13424a f > pick 9c277da72c9b d > EOF 0 files updated, 0 files merged, 4 files removed, 0 files unresolved 0 files updated, 0 files merged, 2 files removed, 0 files unresolved 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ HGEDITOR=$OLDHGEDITOR log after edit $ hg logt --graph @ 3:c4a9eb7989fc d | o 2:8e03a72b6f83 f | o 1:391ee782c689 b | o 0:cb9a9f314b8b a description is taken from rollup target commit $ hg log --debug --rev 1 changeset: 1:391ee782c68930be438ccf4c6a403daedbfbffa5 phase: draft parent: 0:cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b parent: -1:0000000000000000000000000000000000000000 manifest: 1:b5e112a3a8354e269b1524729f0918662d847c38 user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: b e extra: branch=default extra: histedit_source=d2ae7f538514cd87c17547b0de4cea71fe1af9fb,ee283cb5f2d5955443f23a27b697a04339e9a39a description: b check saving last-message.txt $ cat > $TESTTMP/abortfolding.py < from mercurial import util > def abortfolding(ui, repo, hooktype, **kwargs): > ctx = repo[kwargs.get('node')] > if set(ctx.files()) == set(['c', 'd', 'f']): > return True # abort folding commit only > ui.warn('allow non-folding commit\\n') > EOF $ cat > .hg/hgrc < [hooks] > pretxncommit.abortfolding = python:$TESTTMP/abortfolding.py:abortfolding > EOF $ cat > $TESTTMP/editor.sh << EOF > echo "==== before editing" > cat \$1 > echo "====" > echo "check saving last-message.txt" >> \$1 > EOF $ rm -f .hg/last-message.txt $ hg status --rev '8e03a72b6f83^1::c4a9eb7989fc' A c A d A f $ HGEDITOR="sh $TESTTMP/editor.sh" hg histedit 8e03a72b6f83 --commands - 2>&1 < pick 8e03a72b6f83 f > fold c4a9eb7989fc d > EOF 0 files updated, 0 files merged, 1 files removed, 0 files unresolved adding d allow non-folding commit 0 files updated, 0 files merged, 3 files removed, 0 files unresolved ==== before editing f *** c *** d HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: added c HG: added d HG: added f ==== transaction abort! rollback completed abort: pretxncommit.abortfolding hook failed [255] $ cat .hg/last-message.txt f *** c *** d check saving last-message.txt $ cd .. $ rm -r r folding preserves initial author -------------------------------- $ initrepo $ hg ci --user "someone else" --amend --quiet tip before edit $ hg log --rev . changeset: 5:a00ad806cb55 tag: tip user: someone else date: Thu Jan 01 00:00:00 1970 +0000 summary: f $ hg --config progress.debug=1 --debug \ > histedit e860deea161a --commands - 2>&1 < egrep 'editing|unresolved' > pick e860deea161a e > fold a00ad806cb55 f > EOF editing: pick e860deea161a 4 e 1/2 changes (50.00%) editing: fold a00ad806cb55 5 f 2/2 changes (100.00%) 0 files updated, 0 files merged, 1 files removed, 0 files unresolved 0 files updated, 0 files merged, 2 files removed, 0 files unresolved 2 files updated, 0 files merged, 0 files removed, 0 files unresolved tip after edit $ hg log --rev . changeset: 4:698d4e8040a1 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: e $ cd .. $ rm -r r folding and creating no new change doesn't break: ------------------------------------------------- folded content is dropped during a merge. The folded commit should properly disappear. $ mkdir fold-to-empty-test $ cd fold-to-empty-test $ hg init $ printf "1\n2\n3\n" > file $ hg add file $ hg commit -m '1+2+3' $ echo 4 >> file $ hg commit -m '+4' $ echo 5 >> file $ hg commit -m '+5' $ echo 6 >> file $ hg commit -m '+6' $ hg logt --graph @ 3:251d831eeec5 +6 | o 2:888f9082bf99 +5 | o 1:617f94f13c0f +4 | o 0:0189ba417d34 1+2+3 $ hg histedit 1 --commands - << EOF > pick 617f94f13c0f 1 +4 > drop 888f9082bf99 2 +5 > fold 251d831eeec5 3 +6 > EOF 1 files updated, 0 files merged, 0 files removed, 0 files unresolved merging file warning: conflicts while merging file! (edit, then use 'hg resolve --mark') Fix up the change (fold 251d831eeec5) (hg histedit --continue to resume) [1] There were conflicts, we keep P1 content. This should effectively drop the changes from +6. $ hg status M file ? file.orig $ hg resolve -l U file $ hg revert -r 'p1()' file $ hg resolve --mark file (no more unresolved files) continue: hg histedit --continue $ hg histedit --continue 251d831eeec5: empty changeset saved backup bundle to $TESTTMP/*-backup.hg (glob) $ hg logt --graph @ 1:617f94f13c0f +4 | o 0:0189ba417d34 1+2+3 $ cd .. Test fold through dropped ------------------------- Test corner case where folded revision is separated from its parent by a dropped revision. $ hg init fold-with-dropped $ cd fold-with-dropped $ printf "1\n2\n3\n" > file $ hg commit -Am '1+2+3' adding file $ echo 4 >> file $ hg commit -m '+4' $ echo 5 >> file $ hg commit -m '+5' $ echo 6 >> file $ hg commit -m '+6' $ hg logt -G @ 3:251d831eeec5 +6 | o 2:888f9082bf99 +5 | o 1:617f94f13c0f +4 | o 0:0189ba417d34 1+2+3 $ hg histedit 1 --commands - << EOF > pick 617f94f13c0f 1 +4 > drop 888f9082bf99 2 +5 > fold 251d831eeec5 3 +6 > EOF 1 files updated, 0 files merged, 0 files removed, 0 files unresolved merging file warning: conflicts while merging file! (edit, then use 'hg resolve --mark') Fix up the change (fold 251d831eeec5) (hg histedit --continue to resume) [1] $ cat > file << EOF > 1 > 2 > 3 > 4 > 5 > EOF $ hg resolve --mark file (no more unresolved files) continue: hg histedit --continue $ hg commit -m '+5.2' created new head $ echo 6 >> file $ HGEDITOR=cat hg histedit --continue 1 files updated, 0 files merged, 0 files removed, 0 files unresolved +4 *** +5.2 *** +6 HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: changed file 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/55c8d8dc79ce-4066cd98-backup.hg (glob) saved backup bundle to $TESTTMP/fold-with-dropped/.hg/strip-backup/617f94f13c0f-a35700fc-backup.hg (glob) $ hg logt -G @ 1:10c647b2cdd5 +4 | o 0:0189ba417d34 1+2+3 $ hg export tip # HG changeset patch # User test # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID 10c647b2cdd54db0603ecb99b2ff5ce66d5a5323 # Parent 0189ba417d34df9dda55f88b637dcae9917b5964 +4 *** +5.2 *** +6 diff -r 0189ba417d34 -r 10c647b2cdd5 file --- a/file Thu Jan 01 00:00:00 1970 +0000 +++ b/file Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +1,6 @@ 1 2 3 +4 +5 +6 $ cd .. Folding with initial rename (issue3729) --------------------------------------- $ hg init fold-rename $ cd fold-rename $ echo a > a.txt $ hg add a.txt $ hg commit -m a $ hg rename a.txt b.txt $ hg commit -m rename $ echo b >> b.txt $ hg commit -m b $ hg logt --follow b.txt 2:e0371e0426bc b 1:1c4f440a8085 rename 0:6c795aa153cb a $ hg histedit 1c4f440a8085 --commands - 2>&1 << EOF | fixbundle > pick 1c4f440a8085 rename > fold e0371e0426bc b > EOF 1 files updated, 0 files merged, 0 files removed, 0 files unresolved reverting b.txt 1 files updated, 0 files merged, 1 files removed, 0 files unresolved 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg logt --follow b.txt 1:cf858d235c76 rename 0:6c795aa153cb a $ cd .. Folding with swapping --------------------- This is an excuse to test hook with histedit temporary commit (issue4422) $ hg init issue4422 $ cd issue4422 $ echo a > a.txt $ hg add a.txt $ hg commit -m a $ echo b > b.txt $ hg add b.txt $ hg commit -m b $ echo c > c.txt $ hg add c.txt $ hg commit -m c $ hg logt 2:a1a953ffb4b0 c 1:199b6bb90248 b 0:6c795aa153cb a Setup the proper environment variable symbol for the platform, to be subbed into the hook command. #if windows $ NODE="%HG_NODE%" #else $ NODE="\$HG_NODE" #endif $ hg histedit 6c795aa153cb --config hooks.commit="echo commit $NODE" --commands - 2>&1 << EOF | fixbundle > pick 199b6bb90248 b > fold a1a953ffb4b0 c > pick 6c795aa153cb a > EOF 0 files updated, 0 files merged, 3 files removed, 0 files unresolved 0 files updated, 0 files merged, 2 files removed, 0 files unresolved 2 files updated, 0 files merged, 0 files removed, 0 files unresolved commit 9599899f62c05f4377548c32bf1c9f1a39634b0c $ hg logt 1:9599899f62c0 a 0:79b99e9c8e49 b $ echo "foo" > amended.txt $ hg add amended.txt $ hg ci -q --config extensions.largefiles= --amend -I amended.txt Test that folding multiple changes in a row doesn't show multiple editors. $ echo foo >> foo $ hg add foo $ hg ci -m foo1 $ echo foo >> foo $ hg ci -m foo2 $ echo foo >> foo $ hg ci -m foo3 $ hg logt 4:21679ff7675c foo3 3:b7389cc4d66e foo2 2:0e01aeef5fa8 foo1 1:578c7455730c a 0:79b99e9c8e49 b $ cat > "$TESTTMP/editor.sh" < echo ran editor >> "$TESTTMP/editorlog.txt" > cat \$1 >> "$TESTTMP/editorlog.txt" > echo END >> "$TESTTMP/editorlog.txt" > echo merged foos > \$1 > EOF $ HGEDITOR="sh \"$TESTTMP/editor.sh\"" hg histedit 1 --commands - 2>&1 < pick 578c7455730c 1 a > pick 0e01aeef5fa8 2 foo1 > fold b7389cc4d66e 3 foo2 > fold 21679ff7675c 4 foo3 > EOF 1 files updated, 0 files merged, 0 files removed, 0 files unresolved reverting foo 0 files updated, 0 files merged, 1 files removed, 0 files unresolved 1 files updated, 0 files merged, 0 files removed, 0 files unresolved merging foo 0 files updated, 0 files merged, 1 files removed, 0 files unresolved 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg logt 2:e8bedbda72c1 merged foos 1:578c7455730c a 0:79b99e9c8e49 b Editor should have run only once $ cat $TESTTMP/editorlog.txt ran editor foo1 *** foo2 *** foo3 HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: added foo END $ cd .. mercurial-3.7.3/tests/test-symlink-os-yes-fs-no.py0000644000175000017500000000224712676531525021550 0ustar mpmmpm00000000000000import os, sys, time from mercurial import hg, ui, commands, util TESTDIR = os.environ["TESTDIR"] BUNDLEPATH = os.path.join(TESTDIR, 'bundles', 'test-no-symlinks.hg') # only makes sense to test on os which supports symlinks if not getattr(os, "symlink", False): sys.exit(80) # SKIPPED_STATUS defined in run-tests.py u = ui.ui() # hide outer repo hg.peer(u, {}, '.', create=True) # clone with symlink support hg.clone(u, {}, BUNDLEPATH, 'test0') repo = hg.repository(u, 'test0') # wait a bit, or the status call wont update the dirstate time.sleep(1) commands.status(u, repo) # now disable symlink support -- this is what os.symlink would do on a # non-symlink file system def symlink_failure(src, dst): raise OSError(1, "Operation not permitted") os.symlink = symlink_failure # dereference links as if a Samba server has exported this to a # Windows client for f in 'test0/a.lnk', 'test0/d/b.lnk': os.unlink(f) fp = open(f, 'wb') fp.write(util.readfile(f[:-4])) fp.close() # reload repository u = ui.ui() repo = hg.repository(u, 'test0') commands.status(u, repo) # try cloning a repo which contains symlinks u = ui.ui() hg.clone(u, {}, BUNDLEPATH, 'test1') mercurial-3.7.3/tests/test-manifest-merging.t0000644000175000017500000000136012676531525020667 0ustar mpmmpm00000000000000 $ hg init base $ cd base $ echo 'alpha' > alpha $ hg ci -A -m 'add alpha' adding alpha $ cd .. $ hg clone base work updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd work $ echo 'beta' > beta $ hg ci -A -m 'add beta' adding beta $ cd .. $ cd base $ echo 'gamma' > gamma $ hg ci -A -m 'add gamma' adding gamma $ cd .. $ cd work $ hg pull -q $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) Update --clean to revision 1 to simulate a failed merge: $ rm alpha beta gamma $ hg update --clean 1 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. mercurial-3.7.3/tests/test-symlink-os-yes-fs-no.py.out0000644000175000017500000000066612676531525022361 0ustar mpmmpm00000000000000requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 4 changes to 4 files updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 4 changes to 4 files updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved mercurial-3.7.3/tests/test-commit-interactive-curses.t0000644000175000017500000001035312676531525022542 0ustar mpmmpm00000000000000Set up a repo $ cat <> $HGRCPATH > [ui] > interactive = true > [experimental] > crecord = true > crecordtest = testModeCommands > EOF $ hg init a $ cd a Committing some changes but stopping on the way $ echo "a" > a $ hg add a $ cat <testModeCommands > TOGGLE > X > EOF $ hg commit -i -m "a" -d "0 0" no changes to record $ hg tip changeset: -1:000000000000 tag: tip user: date: Thu Jan 01 00:00:00 1970 +0000 Committing some changes $ cat <testModeCommands > X > EOF $ hg commit -i -m "a" -d "0 0" $ hg tip changeset: 0:cb9a9f314b8b tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a Check that commit -i works with no changes $ hg commit -i no changes to record Committing only one file $ echo "a" >> a >>> open('b', 'wb').write("1\n2\n3\n4\n5\n6\n7\n8\n9\n10\n") $ hg add b $ cat <testModeCommands > TOGGLE > KEY_DOWN > X > EOF $ hg commit -i -m "one file" -d "0 0" $ hg tip changeset: 1:fb2705a663ea tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: one file $ hg cat -r tip a a $ cat a a a Committing only one hunk while aborting edition of hunk - Untoggle all the hunks, go down to the second file - unfold it - go down to second hunk (1 for the first hunk, 1 for the first hunkline, 1 for the second hunk, 1 for the second hunklike) - toggle the second hunk - toggle on and off the amend mode (to check that it toggles off) - edit the hunk and quit the editor immediately with non-zero status - commit $ printf "printf 'editor ran\n'; exit 1" > editor.sh $ echo "x" > c $ cat b >> c $ echo "y" >> c $ mv c b $ cat <testModeCommands > A > KEY_DOWN > f > KEY_DOWN > KEY_DOWN > KEY_DOWN > KEY_DOWN > TOGGLE > a > a > e > X > EOF $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i -m "one hunk" -d "0 0" editor ran $ rm editor.sh $ hg tip changeset: 2:7d10dfe755a8 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: one hunk $ hg cat -r tip b 1 2 3 4 5 6 7 8 9 10 y $ cat b x 1 2 3 4 5 6 7 8 9 10 y $ hg commit -m "other hunks" $ hg tip changeset: 3:a6735021574d tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: other hunks $ hg cat -r tip b x 1 2 3 4 5 6 7 8 9 10 y Newly added files can be selected with the curses interface $ hg update -C . 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "hello" > x $ hg add x $ cat <testModeCommands > TOGGLE > TOGGLE > X > EOF $ hg st A x ? testModeCommands $ hg commit -i -m "newly added file" -d "0 0" $ hg st ? testModeCommands Amend option works $ echo "hello world" > x $ hg diff -c . diff -r a6735021574d -r 2b0e9be4d336 x --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/x Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +hello $ cat <testModeCommands > a > X > EOF $ hg commit -i -m "newly added file" -d "0 0" saved backup bundle to $TESTTMP/a/.hg/strip-backup/2b0e9be4d336-28bbe4e2-amend-backup.hg (glob) $ hg diff -c . diff -r a6735021574d -r c1d239d165ae x --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/x Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +hello world Editing a hunk puts you back on that hunk when done editing (issue5041) To do that, we change two lines in a file, pretend to edit the second line, exit, toggle the line selected at the end of the edit and commit. The first line should be recorded if we were put on the second line at the end of the edit. $ hg update -C . 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "foo" > x $ echo "hello world" >> x $ echo "bar" >> x $ cat <testModeCommands > f > KEY_DOWN > KEY_DOWN > KEY_DOWN > KEY_DOWN > e > TOGGLE > X > EOF $ printf "printf 'editor ran\n'; exit 0" > editor.sh $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i -m "edit hunk" -d "0 0" editor ran $ hg cat -r . x foo hello world mercurial-3.7.3/tests/test-bisect3.t0000644000175000017500000001404212676531525016770 0ustar mpmmpm00000000000000# Here we create a simple DAG which has just enough of the required # topology to test all the bisection status labels: # # 13--14 # / # 0--1--2--3---------9--10--11--12 # \ / # 4--5--6--7--8 $ hg init $ echo '0' >a $ hg add a $ hg ci -u test -d '0 0' -m '0' $ echo '1' >a $ hg ci -u test -d '1 0' -m '1' branch 2-3 $ echo '2' >b $ hg add b $ hg ci -u test -d '2 0' -m '2' $ echo '3' >b $ hg ci -u test -d '3 0' -m '3' branch 4-8 $ hg up -r 1 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo '4' >c $ hg add c $ hg ci -u test -d '4 0' -m '4' created new head $ echo '5' >c $ hg ci -u test -d '5 0' -m '5' $ echo '6' >c $ hg ci -u test -d '6 0' -m '6' $ echo '7' >c $ hg ci -u test -d '7 0' -m '7' $ echo '8' >c $ hg ci -u test -d '8 0' -m '8' merge $ hg merge -r 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -u test -d '9 0' -m '9=8+3' $ echo '10' >a $ hg ci -u test -d '10 0' -m '10' $ echo '11' >a $ hg ci -u test -d '11 0' -m '11' $ echo '12' >a $ hg ci -u test -d '12 0' -m '12' unrelated branch $ hg up -r 3 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo '13' >d $ hg add d $ hg ci -u test -d '13 0' -m '13' created new head $ echo '14' >d $ hg ci -u test -d '14 0' -m '14' mark changesets $ hg bisect --reset $ hg bisect --good 4 $ hg bisect --good 6 $ hg bisect --bad 12 Testing changeset 9:2197c557e14c (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect --bad 10 Testing changeset 8:e74a86251f58 (4 changesets remaining, ~2 tests) 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect --skip 7 Testing changeset 8:e74a86251f58 (4 changesets remaining, ~2 tests) 0 files updated, 0 files merged, 0 files removed, 0 files unresolved test template $ hg log --template '{rev}:{node|short} {bisect}\n' 14:cbf2f3105bbf 13:e07efca37c43 12:98c6b56349c0 bad 11:03f491376e63 bad (implicit) 10:c012b15e2409 bad 9:2197c557e14c untested 8:e74a86251f58 untested 7:a5f87041c899 skipped 6:7d997bedcd8d good 5:2dd1875f1028 good (implicit) 4:2a1daef14cd4 good 3:8417d459b90c ignored 2:e1355ee1f23e ignored 1:ce7c85e06a9f good (implicit) 0:b4e73ffab476 good (implicit) $ hg log --template '{bisect|shortbisect} {rev}:{node|short}\n' 14:cbf2f3105bbf 13:e07efca37c43 B 12:98c6b56349c0 B 11:03f491376e63 B 10:c012b15e2409 U 9:2197c557e14c U 8:e74a86251f58 S 7:a5f87041c899 G 6:7d997bedcd8d G 5:2dd1875f1028 G 4:2a1daef14cd4 I 3:8417d459b90c I 2:e1355ee1f23e G 1:ce7c85e06a9f G 0:b4e73ffab476 test style $ hg log --style bisect changeset: 14:cbf2f3105bbf bisect: tag: tip user: test date: Thu Jan 01 00:00:14 1970 +0000 summary: 14 changeset: 13:e07efca37c43 bisect: parent: 3:8417d459b90c user: test date: Thu Jan 01 00:00:13 1970 +0000 summary: 13 changeset: 12:98c6b56349c0 bisect: bad user: test date: Thu Jan 01 00:00:12 1970 +0000 summary: 12 changeset: 11:03f491376e63 bisect: bad (implicit) user: test date: Thu Jan 01 00:00:11 1970 +0000 summary: 11 changeset: 10:c012b15e2409 bisect: bad user: test date: Thu Jan 01 00:00:10 1970 +0000 summary: 10 changeset: 9:2197c557e14c bisect: untested parent: 8:e74a86251f58 parent: 3:8417d459b90c user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: 9=8+3 changeset: 8:e74a86251f58 bisect: untested user: test date: Thu Jan 01 00:00:08 1970 +0000 summary: 8 changeset: 7:a5f87041c899 bisect: skipped user: test date: Thu Jan 01 00:00:07 1970 +0000 summary: 7 changeset: 6:7d997bedcd8d bisect: good user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: 6 changeset: 5:2dd1875f1028 bisect: good (implicit) user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: 5 changeset: 4:2a1daef14cd4 bisect: good parent: 1:ce7c85e06a9f user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: 4 changeset: 3:8417d459b90c bisect: ignored user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: 3 changeset: 2:e1355ee1f23e bisect: ignored user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: 2 changeset: 1:ce7c85e06a9f bisect: good (implicit) user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: 1 changeset: 0:b4e73ffab476 bisect: good (implicit) user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 $ hg log --quiet --style bisect 14:cbf2f3105bbf 13:e07efca37c43 B 12:98c6b56349c0 B 11:03f491376e63 B 10:c012b15e2409 U 9:2197c557e14c U 8:e74a86251f58 S 7:a5f87041c899 G 6:7d997bedcd8d G 5:2dd1875f1028 G 4:2a1daef14cd4 I 3:8417d459b90c I 2:e1355ee1f23e G 1:ce7c85e06a9f G 0:b4e73ffab476 $ hg --config extensions.color= --color=debug log --quiet --style bisect [log.bisect| ] 14:cbf2f3105bbf [log.bisect| ] 13:e07efca37c43 [log.bisect bisect.bad|B] 12:98c6b56349c0 [log.bisect bisect.bad|B] 11:03f491376e63 [log.bisect bisect.bad|B] 10:c012b15e2409 [log.bisect bisect.untested|U] 9:2197c557e14c [log.bisect bisect.untested|U] 8:e74a86251f58 [log.bisect bisect.skipped|S] 7:a5f87041c899 [log.bisect bisect.good|G] 6:7d997bedcd8d [log.bisect bisect.good|G] 5:2dd1875f1028 [log.bisect bisect.good|G] 4:2a1daef14cd4 [log.bisect bisect.ignored|I] 3:8417d459b90c [log.bisect bisect.ignored|I] 2:e1355ee1f23e [log.bisect bisect.good|G] 1:ce7c85e06a9f [log.bisect bisect.good|G] 0:b4e73ffab476 mercurial-3.7.3/tests/test-import-git.t0000644000175000017500000004652012676531525017535 0ustar mpmmpm00000000000000 $ hg init repo $ cd repo New file: $ hg import -d "1000000 0" -mnew - < diff --git a/new b/new > new file mode 100644 > index 0000000..7898192 > --- /dev/null > +++ b/new > @@ -0,0 +1 @@ > +a > EOF applying patch from stdin $ hg tip -q 0:ae3ee40d2079 New empty file: $ hg import -d "1000000 0" -mempty - < diff --git a/empty b/empty > new file mode 100644 > EOF applying patch from stdin $ hg tip -q 1:ab199dc869b5 $ hg locate empty empty chmod +x: $ hg import -d "1000000 0" -msetx - < diff --git a/new b/new > old mode 100644 > new mode 100755 > EOF applying patch from stdin #if execbit $ hg tip -q 2:3a34410f282e $ test -x new $ hg rollback -q #else $ hg tip -q 1:ab199dc869b5 #endif Copy and removing x bit: $ hg import -f -d "1000000 0" -mcopy - < diff --git a/new b/copy > old mode 100755 > new mode 100644 > similarity index 100% > copy from new > copy to copy > diff --git a/new b/copyx > similarity index 100% > copy from new > copy to copyx > EOF applying patch from stdin $ test -f copy #if execbit $ test ! -x copy $ test -x copyx $ hg tip -q 2:21dfaae65c71 #else $ hg tip -q 2:0efdaa8e3bf3 #endif $ hg up -qCr1 $ hg rollback -q Copy (like above but independent of execbit): $ hg import -d "1000000 0" -mcopy - < diff --git a/new b/copy > similarity index 100% > copy from new > copy to copy > diff --git a/new b/copyx > similarity index 100% > copy from new > copy to copyx > EOF applying patch from stdin $ hg tip -q 2:0efdaa8e3bf3 $ test -f copy $ cat copy a $ hg cat copy a Rename: $ hg import -d "1000000 0" -mrename - < diff --git a/copy b/rename > similarity index 100% > rename from copy > rename to rename > EOF applying patch from stdin $ hg tip -q 3:b1f57753fad2 $ hg locate copyx empty new rename Delete: $ hg import -d "1000000 0" -mdelete - < diff --git a/copyx b/copyx > deleted file mode 100755 > index 7898192..0000000 > --- a/copyx > +++ /dev/null > @@ -1 +0,0 @@ > -a > EOF applying patch from stdin $ hg tip -q 4:1bd1da94b9b2 $ hg locate empty new rename $ test -f copyx [1] Regular diff: $ hg import -d "1000000 0" -mregular - < diff --git a/rename b/rename > index 7898192..72e1fe3 100644 > --- a/rename > +++ b/rename > @@ -1 +1,5 @@ > a > +a > +a > +a > +a > EOF applying patch from stdin $ hg tip -q 5:46fe99cb3035 Copy and modify: $ hg import -d "1000000 0" -mcopymod - < diff --git a/rename b/copy2 > similarity index 80% > copy from rename > copy to copy2 > index 72e1fe3..b53c148 100644 > --- a/rename > +++ b/copy2 > @@ -1,5 +1,5 @@ > a > a > -a > +b > a > a > EOF applying patch from stdin $ hg tip -q 6:ffeb3197c12d $ hg cat copy2 a a b a a Rename and modify: $ hg import -d "1000000 0" -mrenamemod - < diff --git a/copy2 b/rename2 > similarity index 80% > rename from copy2 > rename to rename2 > index b53c148..8f81e29 100644 > --- a/copy2 > +++ b/rename2 > @@ -1,5 +1,5 @@ > a > a > b > -a > +c > a > EOF applying patch from stdin $ hg tip -q 7:401aede9e6bb $ hg locate copy2 [1] $ hg cat rename2 a a b c a One file renamed multiple times: $ hg import -d "1000000 0" -mmultirenames - < diff --git a/rename2 b/rename3 > rename from rename2 > rename to rename3 > diff --git a/rename2 b/rename3-2 > rename from rename2 > rename to rename3-2 > EOF applying patch from stdin $ hg tip -q 8:2ef727e684e8 $ hg log -vr. --template '{rev} {files} / {file_copies}\n' 8 rename2 rename3 rename3-2 / rename3 (rename2)rename3-2 (rename2) $ hg locate rename2 rename3 rename3-2 rename3 rename3-2 $ hg cat rename3 a a b c a $ hg cat rename3-2 a a b c a $ echo foo > foo $ hg add foo $ hg ci -m 'add foo' Binary files and regular patch hunks: $ hg import -d "1000000 0" -m binaryregular - < diff --git a/binary b/binary > new file mode 100644 > index 0000000000000000000000000000000000000000..593f4708db84ac8fd0f5cc47c634f38c013fe9e4 > GIT binary patch > literal 4 > Lc\${NkU|;|M00aO5 > > diff --git a/foo b/foo2 > rename from foo > rename to foo2 > EOF applying patch from stdin $ hg tip -q 10:27377172366e $ cat foo2 foo $ hg manifest --debug | grep binary 045c85ba38952325e126c70962cc0f9d9077bc67 644 binary Multiple binary files: $ hg import -d "1000000 0" -m multibinary - < diff --git a/mbinary1 b/mbinary1 > new file mode 100644 > index 0000000000000000000000000000000000000000..593f4708db84ac8fd0f5cc47c634f38c013fe9e4 > GIT binary patch > literal 4 > Lc\${NkU|;|M00aO5 > > diff --git a/mbinary2 b/mbinary2 > new file mode 100644 > index 0000000000000000000000000000000000000000..112363ac1917b417ffbd7f376ca786a1e5fa7490 > GIT binary patch > literal 5 > Mc\${NkU|\`?^000jF3jhEB > > EOF applying patch from stdin $ hg tip -q 11:18b73a84b4ab $ hg manifest --debug | grep mbinary 045c85ba38952325e126c70962cc0f9d9077bc67 644 mbinary1 a874b471193996e7cb034bb301cac7bdaf3e3f46 644 mbinary2 Binary file and delta hunk (we build the patch using this sed hack to avoid an unquoted ^, which check-code says breaks sh on Solaris): $ sed 's/ caret /^/g;s/ dollarparen /$(/g' > quote-hack.patch <<'EOF' > diff --git a/delta b/delta > new file mode 100644 > index 0000000000000000000000000000000000000000..8c9b7831b231c2600843e303e66b521353a200b3 > GIT binary patch > literal 3749 > zcmV;W4qEYvP) zcVTj606}DLVr3vnZDD6+Qe|Oed2z{QJOBU=M@d9MRCwC#oC!>o#}>x{(W-y~UN*tK > z%A%sxiUy2Ys)0Vm#ueArYKoYqX;GuiqZpgirM6nCVoYk?YNAz3G~z;BZ~@~&OQEe4 > zmGvS5isFJI;Pd_7J+EKxyHZeu` caret t4r2>F;h-+VK3{_{WoGv8dSpFDYDrA%3UX03pt > zOaVoi0*W#P6lDr1$`nwPDWE7*rhuYM0Y#YtiZTThWeObRRaI42 > zS3iFIxJ8Q=EnBv1Z7?pBw_bLjJb3V+tgP(Tty_2R-mR#p04x78n2n7MSOFyt4i1iv > zjxH`PPEJmgD7U?IK&h;(EGQ@_DJc<@01=4fiNXHcKZ8LhZQ8T}E3U4tUS3}OrcgQW > zWdX{K8#l7Ev&#$ysR)G#0*rC+Dj$|_qJ`@D*stNP_AFUe&x!Q > zJ9q9B7Z=ym)MyZ?Tg1ROunUYr81nV?B@!tYS~5_|%gfW#(_s<4UN1!Q?Dv8d>g#m6 > z%*@R2@bI2JdnzxQ!EDU`$eQY!tgI~Zn$prz;gaXNod5*5p(1Bz=P$qfvZ$y?dC@X~ > zlAD+NAKhB{=;6bMwzjqn>9mavvKOGd`s%A+fBiL>Q;xJWpa72C+}u{JTHUX>{~}Qj > zUb%hyHgN~c?cBLjInvUALMD9g-aXt54ZL8AOCvXL-V6!~ijR*kEG$&Mv?!pE61OlI > z8nzMSPE8F7bH|Py*RNl1VUCggq@_6gkEeiz7{rmTeuNTW6+KVS#0FG%IHf-3L > zGiS21vn>WCCr+GLx caret !uNetzB6u3o(w6&1C2?_LW8ij$+$sZ*zZ`|US3H@8N~%&V%Z > zAeA0HdhFS=$6|nzn3%YH`SN<>DQRO;Qc caret )dfdvA caret 5u`Xf;Zzu zzkh#LA)v7gpoE5ou3o*GoUUF%b#iht&kl9d0)><$FE1}ACr68;uCA`6DrGmz_U+rp > zL>Rx;X_yhk$fP_yJrTCQ|NgsW0A<985g&c@k-NKly<>mgU8n||ZPPV<`SN8#%$+-T > zfP$T!ou8jypFVwnzqhxyUvIxXd-wF~*U!ht=hCH1wzjqn9x#)IrhDa;S0JbK caret z_$W > zd(8rX@;7|t*;GJ5h$SZ{v(}+UBEs$4w~?{@9%`_Z zunCgwT@1|CU9+;X caret 4z&|M~@yw23Ay50NFWn=FqF%yLZEUty;AT2??1oV@B)Nt))J7 > zh>{5j2@f7T=-an%L_`E)h;mZ4D_5>?7tjQtVPRo2XU-&;mX(!l-MSTJP4XWY82JAC > z@57+y&!1=P{Mn{W8)-HzEsgAtd63}Cazc>O6vGb>51%@9DzbyI3?4j~$ijmT95_IS > zS#r!LCDW%*4-O7CGnkr$xXR1RQ&UrA z*s){8pw68;i+kiG%CpBKYSJLLFyq&*U8}qDp+kpe&6ZK?&s7y?b}i > zuwcOgO%x-27A;y785zknl_{sU;E6v$8{pWmVS{KaJPpu`i;HP$#flY@u~Ua~K3%tN > z-LhrNh{9SoHgDd%WXTc$$~Dq{?AWou3!H&?V8K{ caret {P9Ot5vecD?%1&-E-ntBFj87( > zy5`QE%QRX7qcHC%1{Ua}M~}L6=`wQUNEQ=I;qc+ZMMXtK2T+0os;jEco;}OV9z1w3 > zARqv caret bm-85xnRCng3OT|MyVSmR3ND7 caret ?KaQGG! caret (aTbo1N;Nz;X3Q9FJbwK6`0?Yp > zj*X2ac;Pw3!I2|JShDaF>-gJmzm1NLj){rk&o|$E caret WAsfrK=x&@B!`w7Hik81sPz4 > zuJTaiCppM>-+c!wPzcUw)5@?J4U-u|pJ~xbWUe-C+60k caret 7>9!)56DbjmA~`OJJ40v > zu3hCA7eJXZWeN|1iJLu87$;+fS8+Kq6O`aT)*_x@sY#t7LxwoEcVw*)cWhhQW@l%! > z{#Z=y+qcK@%z{p*D=8_Fcg278AnH3fI5;~yGu?9TscxXaaP*4$f zpxmunH#%=+ICMvZA~wyNH%~eMl!-g caret R!cYJ#WmLq5N8viz#J%%LPtkO?V)tZ81cp> > z{ALK?fNPePmd;289&M8Q3>YwgZX5GcGY&n>K1 zpJmIJO`A3iy+Y3X`k>cY-@}Iw2Onq`=!ba3eATgs3yg3Wej=+P-Z8WF#w=RXvS@J3 > zEyhVTj-gO?kfDu1g9afo9lx6 zO6c6FbNt@;;*w$z;N|H>h{czme)_4V6UC4hv**kX2@L caret Bgds dollarparen &P7M4dhfmWe)!=B > zR3X=Y{P9N}p@-##@1ZNW1YbVaiP~D@8m&i*Hpp&@ > z`9!Sj+O;byD~s8qZ>6QB8uv7Bpn&&?xe;;e z6DH*4=AB7C1D9Amu?ia-wtxSAlmTEO96XHx)-+rKP;ip$pukuSJGW3P1aUmc2yo%) > z&d1X+1qzaag-%x+eKHx{?Afz3GBQSw9u0lw zHr6)1ynEF zrok&TPU40rL0ZYDwenNrrmPZ`gjo@DEF`7 caret cKP||pUr;+r)hyn9O37=xA`3%Bj-ih > z+1usk<%5G-y+R?tA`qY=)6&vNjL{P?QzHg%P%>`ZxP=QB%DHY6L26?36V_p caret {}n$q > z3@9W=KmGI*Ng_Q#AzA%-z|Z caret |#oW(hkfgpuS$RKRhlrarX%efMMCs}GLChec5+y{6 > z1Qnxim_C-fmQuaAK_NUHUBV&;1c0V)wjihVnlt caret asFCe0&a@tqp > zEEy;$L}D$X6)wfQNl8gu6Z>oB3_RrP=gTyK2@@w#LbQfLNHj>Q&z(C5wUFhK+}0aV > zSohlc=7K+spN z!}t+Yljq7-p$X;4_YL?6d;mdY3R##o1e%rlPxrsMh8|;sKTr~ caret QD#sw3&vS$FwlTk > zp1#Gw!Qo-$LtvpXt#ApV0g) caret F=qFB`VB!W297x=$mr<$>rco3v$QKih_xN!k6;M=@ > zCr?gDNQj7tm@;JwD;Ty&NlBSCYZk(b3dZeN8D4h2{r20dSFc7;(>E&r`s=TVtzpB4 > zk+ caret N&zCAiRns(?p6iBlk9v&h{1ve(FNtc)td51M>)TkXhc6{>5C)`fS$&)A1*CP1% > zld+peue4aYbg3C0!+4mu+}vE caret j_feX+ZijvffBI7Ofh#RZ*U3<3J5(+nfRCzexqQ5 > zgM&##Y4Dd{e%ZKjqrbm@|Ni}l4jo!AqtFynj3Xsd$o caret ?yV4$|UQ(j&UWCH>M=o_&N > zmclXc3i|Q#<;#EoG>~V}4unTHbUK}u=y4;rA3S&vzC3 caret aJP!&D4RvvGfoyo(>C>la > zijP<=v>X{3Ne&2BXo}DV8l0V-jdv`$am0ubG{Wuh%CTd|l9Q7m;G&|U@#Dvbhlj(d > zg6W{3ATxYt#T?)3;SmIgOP4M|Dki~I_TX7SxP0x}wI~DQI7Lhm2BI7gph(aPIFAd; > zQ&UsF`Q{rOz+z=87c5v%@5u~d6dWV5OlX`oH3cAH&UlvsZUEo(Q(P|lKs17rXvaiU > zQcj}IEufi1+Bnh6&(EhF{7O3vLHp`jjlp0J zwrmcd5MnP}xByB_)P@{J>DR9x6;`cUwPM8z){yooNiXPOc9_{W-gtwxE5TUg0vJk6 > zO#JGruV&1cL6VGK2?+_YQr4`+EY8;Sm$9U$uuGRN=uj3k7?O9b+R~J7t_y*K64ZnI > zM+{aEpcRbC29ZyG!Cfdp > zutFf`Q`vljgo!(wHf=)F#m2_MIuj;L(2ja2YsQRX+rswV{d z;tq*`y}dm#NDJHKlV}uTIm!_vAq5E7!X-p{P=Z=Sh668>PuVS1*6e}OwOiMc;u3OQ > z@Bs)w3=lzfKoufH$SFuPG@uZ4NOnM#+=8LnQ2Q4zUd+nM+OT26;lqbN{P07dhH{jH > zManE8 caret dLms-Q2;1kB<*Q1a3f8kZr;xX=!Qro@`~@xN*Qj>gx;i;0Z24!~i2uLb`}v > zA?R$|wvC+m caret Ups=*(4lDh*=UN8{5h(A?p#D caret 2N$8u4Z55!q?ZAh(iEEng9_Zi>IgO > z#~**JC8hE4@n{hO&8btT5F*?nC_%LhA3i)PDhh-pB_&1wGrDIl caret *=8x3n&;akBf caret - > zJd&86kq$%%907v caret tgWoQdwI`|oNK%VvU~S#CHFD%&|Ni~t > zKJ(|#H`$<5W+6ZkBb213rXonKZLB+X> caret L}J@W6osP3piLD_5?R!`S}*{xLBzFiL4@ > zX+}l{`A%?f@T5tT%ztu60p;)be`fWC`tP@WpO=?cpf8Xuf1OSj6d3f@Ki(ovDYq%0 > z{4ZSe`kOay5@=lAT!}vFzxyemC{sXDrhuYM0Y#ZI1r%ipD9W11{w=@&xgJ}t2x;ep > P00000NkvXXu0mjfZ5|Er > > literal 0 > HcmV?d00001 > > EOF $ hg import -d "1000000 0" -m delta quote-hack.patch applying quote-hack.patch $ rm quote-hack.patch $ hg manifest --debug | grep delta 9600f98bb60ce732634d126aaa4ac1ec959c573e 644 delta $ hg import -d "1000000 0" -m delta - <<'EOF' > diff --git a/delta b/delta > index 8c9b7831b231c2600843e303e66b521353a200b3..0021dd95bc0dba53c39ce81377126d43731d68df 100644 > GIT binary patch > delta 49 > zcmZ1~yHs|=21Z8J$r~9bFdA-lVv=EEw4WT$qRf2QSa5SIOAHI6(&k4T8H|kLo4vWB > FSO9ZT4bA`n > > delta 49 > zcmV-10M7rV9i<(xumJ(}ld%Di0Xefm0vrMXpOaq%BLm9I%d>?9Tm%6Vv*HM70RcC& > HOA1;9yU-AD > > EOF applying patch from stdin $ hg manifest --debug | grep delta 56094bbea136dcf8dbd4088f6af469bde1a98b75 644 delta Filenames with spaces: $ sed 's,EOL$,,g' < diff --git a/foo bar b/foo bar > new file mode 100644 > index 0000000..257cc56 > --- /dev/null > +++ b/foo bar EOL > @@ -0,0 +1 @@ > +foo > EOF applying patch from stdin $ hg tip -q 14:4b79479c9a6d $ cat "foo bar" foo Copy then modify the original file: $ hg import -d "1000000 0" -m copy-mod-orig - < diff --git a/foo2 b/foo2 > index 257cc56..fe08ec6 100644 > --- a/foo2 > +++ b/foo2 > @@ -1 +1,2 @@ > foo > +new line > diff --git a/foo2 b/foo3 > similarity index 100% > copy from foo2 > copy to foo3 > EOF applying patch from stdin $ hg tip -q 15:9cbe44af4ae9 $ cat foo3 foo Move text file and patch as binary $ echo a > text2 $ hg ci -Am0 adding text2 $ hg import -d "1000000 0" -m rename-as-binary - <<"EOF" > diff --git a/text2 b/binary2 > rename from text2 > rename to binary2 > index 78981922613b2afb6025042ff6bd878ac1994e85..10efcb362e9f3b3420fcfbfc0e37f3dc16e29757 > GIT binary patch > literal 5 > Mc$`b*O5$Pw00T?_*Z=?k > > EOF applying patch from stdin $ cat binary2 a b \x00 (no-eol) (esc) $ hg st --copies --change . A binary2 text2 R text2 Invalid base85 content $ hg rollback repository tip rolled back to revision 16 (undo import) working directory now based on revision 16 $ hg revert -aq $ hg import -d "1000000 0" -m invalid-binary - <<"EOF" > diff --git a/text2 b/binary2 > rename from text2 > rename to binary2 > index 78981922613b2afb6025042ff6bd878ac1994e85..10efcb362e9f3b3420fcfbfc0e37f3dc16e29757 > GIT binary patch > literal 5 > Mc$`b*O.$Pw00T?_*Z=?k > > EOF applying patch from stdin abort: could not decode "binary2" binary patch: bad base85 character at position 6 [255] $ hg revert -aq $ hg import -d "1000000 0" -m rename-as-binary - <<"EOF" > diff --git a/text2 b/binary2 > rename from text2 > rename to binary2 > index 78981922613b2afb6025042ff6bd878ac1994e85..10efcb362e9f3b3420fcfbfc0e37f3dc16e29757 > GIT binary patch > literal 6 > Mc$`b*O5$Pw00T?_*Z=?k > > EOF applying patch from stdin abort: "binary2" length is 5 bytes, should be 6 [255] $ hg revert -aq $ hg import -d "1000000 0" -m rename-as-binary - <<"EOF" > diff --git a/text2 b/binary2 > rename from text2 > rename to binary2 > index 78981922613b2afb6025042ff6bd878ac1994e85..10efcb362e9f3b3420fcfbfc0e37f3dc16e29757 > GIT binary patch > Mc$`b*O5$Pw00T?_*Z=?k > > EOF applying patch from stdin abort: could not extract "binary2" binary data [255] Simulate a copy/paste turning LF into CRLF (issue2870) $ hg revert -aq $ cat > binary.diff <<"EOF" > diff --git a/text2 b/binary2 > rename from text2 > rename to binary2 > index 78981922613b2afb6025042ff6bd878ac1994e85..10efcb362e9f3b3420fcfbfc0e37f3dc16e29757 > GIT binary patch > literal 5 > Mc$`b*O5$Pw00T?_*Z=?k > > EOF >>> fp = file('binary.diff', 'rb') >>> data = fp.read() >>> fp.close() >>> file('binary.diff', 'wb').write(data.replace('\n', '\r\n')) $ rm binary2 $ hg import --no-commit binary.diff applying binary.diff $ cd .. Consecutive import with renames (issue2459) $ hg init issue2459 $ cd issue2459 $ hg import --no-commit --force - < diff --git a/a b/a > new file mode 100644 > EOF applying patch from stdin $ hg import --no-commit --force - < diff --git a/a b/b > rename from a > rename to b > EOF applying patch from stdin a has not been committed yet, so no copy data will be stored for b. $ hg debugstate a 0 -1 unset b $ hg ci -m done $ cd .. Renames and strip $ hg init renameandstrip $ cd renameandstrip $ echo a > a $ hg ci -Am adda adding a $ hg import --no-commit -p2 - < diff --git a/foo/a b/foo/b > rename from foo/a > rename to foo/b > EOF applying patch from stdin $ hg st --copies A b a R a Prefix with strip, renames, creates etc $ hg revert -aC undeleting a forgetting b $ rm b $ mkdir -p dir/dir2 $ echo b > dir/dir2/b $ echo c > dir/dir2/c $ echo d > dir/d $ hg ci -Am addbcd adding dir/d adding dir/dir2/b adding dir/dir2/c prefix '.' is the same as no prefix $ hg import --no-commit --prefix . - < diff --git a/dir/a b/dir/a > --- /dev/null > +++ b/dir/a > @@ -0,0 +1 @@ > +aaaa > diff --git a/dir/d b/dir/d > --- a/dir/d > +++ b/dir/d > @@ -1,1 +1,2 @@ > d > +dddd > EOF applying patch from stdin $ cat dir/a aaaa $ cat dir/d d dddd $ hg revert -aC forgetting dir/a (glob) reverting dir/d (glob) $ rm dir/a prefix with default strip $ hg import --no-commit --prefix dir/ - < diff --git a/a b/a > --- /dev/null > +++ b/a > @@ -0,0 +1 @@ > +aaa > diff --git a/d b/d > --- a/d > +++ b/d > @@ -1,1 +1,2 @@ > d > +dd > EOF applying patch from stdin $ cat dir/a aaa $ cat dir/d d dd $ hg revert -aC forgetting dir/a (glob) reverting dir/d (glob) $ rm dir/a (test that prefixes are relative to the cwd) $ mkdir tmpdir $ cd tmpdir $ hg import --no-commit -p2 --prefix ../dir/ - < diff --git a/foo/a b/foo/a > new file mode 100644 > --- /dev/null > +++ b/foo/a > @@ -0,0 +1 @@ > +a > diff --git a/foo/dir2/b b/foo/dir2/b2 > rename from foo/dir2/b > rename to foo/dir2/b2 > diff --git a/foo/dir2/c b/foo/dir2/c > --- a/foo/dir2/c > +++ b/foo/dir2/c > @@ -0,0 +1 @@ > +cc > diff --git a/foo/d b/foo/d > deleted file mode 100644 > --- a/foo/d > +++ /dev/null > @@ -1,1 +0,0 @@ > -d > EOF applying patch from stdin $ hg st --copies M dir/dir2/c A dir/a A dir/dir2/b2 dir/dir2/b R dir/d R dir/dir2/b $ cd .. Renames, similarity and git diff $ hg revert -aC forgetting dir/a (glob) undeleting dir/d (glob) undeleting dir/dir2/b (glob) forgetting dir/dir2/b2 (glob) reverting dir/dir2/c (glob) $ rm dir/a dir/dir2/b2 $ hg import --similarity 90 --no-commit - < diff --git a/a b/b > rename from a > rename to b > EOF applying patch from stdin $ hg st --copies A b a R a $ cd .. Pure copy with existing destination $ hg init copytoexisting $ cd copytoexisting $ echo a > a $ echo b > b $ hg ci -Am add adding a adding b $ hg import --no-commit - < diff --git a/a b/b > copy from a > copy to b > EOF applying patch from stdin abort: cannot create b: destination already exists [255] $ cat b b Copy and changes with existing destination $ hg import --no-commit - < diff --git a/a b/b > copy from a > copy to b > --- a/a > +++ b/b > @@ -1,1 +1,2 @@ > a > +b > EOF applying patch from stdin cannot create b: destination already exists 1 out of 1 hunks FAILED -- saving rejects to file b.rej abort: patch failed to apply [255] $ cat b b #if symlink $ ln -s b linkb $ hg add linkb $ hg ci -m addlinkb $ hg import --no-commit - < diff --git a/linkb b/linkb > deleted file mode 120000 > --- a/linkb > +++ /dev/null > @@ -1,1 +0,0 @@ > -badhunk > \ No newline at end of file > EOF applying patch from stdin patching file linkb Hunk #1 FAILED at 0 1 out of 1 hunks FAILED -- saving rejects to file linkb.rej abort: patch failed to apply [255] $ hg st ? b.rej ? linkb.rej #endif Test corner case involving copies and multiple hunks (issue3384) $ hg revert -qa $ hg import --no-commit - < diff --git a/a b/c > copy from a > copy to c > --- a/a > +++ b/c > @@ -1,1 +1,2 @@ > a > +a > @@ -2,1 +2,2 @@ > a > +a > diff --git a/a b/a > --- a/a > +++ b/a > @@ -1,1 +1,2 @@ > a > +b > EOF applying patch from stdin $ cd .. mercurial-3.7.3/tests/test-grep.t0000644000175000017500000000757512676531525016406 0ustar mpmmpm00000000000000 $ hg init t $ cd t $ echo import > port $ hg add port $ hg commit -m 0 -u spam -d '0 0' $ echo export >> port $ hg commit -m 1 -u eggs -d '1 0' $ echo export > port $ echo vaportight >> port $ echo 'import/export' >> port $ hg commit -m 2 -u spam -d '2 0' $ echo 'import/export' >> port $ hg commit -m 3 -u eggs -d '3 0' $ head -n 3 port > port1 $ mv port1 port $ hg commit -m 4 -u spam -d '4 0' pattern error $ hg grep '**test**' grep: invalid match pattern: nothing to repeat [1] simple $ hg grep '.*' port:4:export port:4:vaportight port:4:import/export $ hg grep port port port:4:export port:4:vaportight port:4:import/export simple with color $ hg --config extensions.color= grep --config color.mode=ansi \ > --color=always port port \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32m4\x1b[0m\x1b[0;36m:\x1b[0mex\x1b[0;31;1mport\x1b[0m (esc) \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32m4\x1b[0m\x1b[0;36m:\x1b[0mva\x1b[0;31;1mport\x1b[0might (esc) \x1b[0;35mport\x1b[0m\x1b[0;36m:\x1b[0m\x1b[0;32m4\x1b[0m\x1b[0;36m:\x1b[0mim\x1b[0;31;1mport\x1b[0m/ex\x1b[0;31;1mport\x1b[0m (esc) all $ hg grep --traceback --all -nu port port port:4:4:-:spam:import/export port:3:4:+:eggs:import/export port:2:1:-:spam:import port:2:2:-:spam:export port:2:1:+:spam:export port:2:2:+:spam:vaportight port:2:3:+:spam:import/export port:1:2:+:eggs:export port:0:1:+:spam:import other $ hg grep -l port port port:4 $ hg grep import port port:4:import/export $ hg cp port port2 $ hg commit -m 4 -u spam -d '5 0' follow $ hg grep --traceback -f 'import\n\Z' port2 port:0:import $ echo deport >> port2 $ hg commit -m 5 -u eggs -d '6 0' $ hg grep -f --all -nu port port2 port2:6:4:+:eggs:deport port:4:4:-:spam:import/export port:3:4:+:eggs:import/export port:2:1:-:spam:import port:2:2:-:spam:export port:2:1:+:spam:export port:2:2:+:spam:vaportight port:2:3:+:spam:import/export port:1:2:+:eggs:export port:0:1:+:spam:import $ hg up -q null $ hg grep -f port [1] $ cd .. $ hg init t2 $ cd t2 $ hg grep foobar foo [1] $ hg grep foobar [1] $ echo blue >> color $ echo black >> color $ hg add color $ hg ci -m 0 $ echo orange >> color $ hg ci -m 1 $ echo black > color $ hg ci -m 2 $ echo orange >> color $ echo blue >> color $ hg ci -m 3 $ hg grep orange color:3:orange $ hg grep --all orange color:3:+:orange color:2:-:orange color:1:+:orange match in last "line" without newline $ $PYTHON -c 'fp = open("noeol", "wb"); fp.write("no infinite loop"); fp.close();' $ hg ci -Amnoeol adding noeol $ hg grep loop noeol:4:no infinite loop $ cd .. Issue685: traceback in grep -r after rename Got a traceback when using grep on a single revision with renamed files. $ hg init issue685 $ cd issue685 $ echo octarine > color $ hg ci -Amcolor adding color $ hg rename color colour $ hg ci -Am rename $ hg grep octarine colour:1:octarine color:0:octarine Used to crash here $ hg grep -r 1 octarine colour:1:octarine $ cd .. Issue337: test that grep follows parent-child relationships instead of just using revision numbers. $ hg init issue337 $ cd issue337 $ echo white > color $ hg commit -A -m "0 white" adding color $ echo red > color $ hg commit -A -m "1 red" $ hg update 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo black > color $ hg commit -A -m "2 black" created new head $ hg update --clean 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo blue > color $ hg commit -A -m "3 blue" $ hg grep --all red color:3:-:red color:1:+:red $ cd .. $ hg init a $ cd a $ cp "$TESTDIR/binfile.bin" . $ hg add binfile.bin $ hg ci -m 'add binfile.bin' $ hg grep "MaCam" --all binfile.bin:0:+: Binary file matches $ cd .. mercurial-3.7.3/tests/test-mq-qdiff.t0000644000175000017500000000471312676531525017144 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [extensions] > mq = > [mq] > git = keep > EOF $ hg init a $ cd a $ echo 'base' > base $ hg ci -Ambase adding base $ hg qnew -mmqbase mqbase $ echo 'patched' > base $ hg qrefresh qdiff: $ hg qdiff diff -r d20a80d4def3 base --- a/base Thu Jan 01 00:00:00 1970 +0000 +++ b/base* (glob) @@ -1,1 +1,1 @@ -base +patched qdiff dirname: $ hg qdiff --nodates . diff -r d20a80d4def3 base --- a/base +++ b/base @@ -1,1 +1,1 @@ -base +patched qdiff filename: $ hg qdiff --nodates base diff -r d20a80d4def3 base --- a/base +++ b/base @@ -1,1 +1,1 @@ -base +patched $ hg revert -a $ hg qpop popping mqbase patch queue now empty $ hg qdelete mqbase $ printf '1\n2\n3\n4\nhello world\ngoodbye world\n7\n8\n9\n' > lines $ hg ci -Amlines -d '2 0' adding lines $ hg qnew -mmqbase2 mqbase2 $ printf '\n\n1\n2\n3\n4\nhello world\n goodbye world\n7\n8\n9\n' > lines $ hg qdiff --nodates -U 1 diff -r b0c220e1cf43 lines --- a/lines +++ b/lines @@ -1,1 +1,3 @@ + + 1 @@ -4,4 +6,4 @@ 4 -hello world -goodbye world +hello world + goodbye world 7 $ hg qdiff --nodates -b diff -r b0c220e1cf43 lines --- a/lines +++ b/lines @@ -1,9 +1,11 @@ + + 1 2 3 4 hello world -goodbye world + goodbye world 7 8 9 $ hg qdiff --nodates -U 1 -B diff -r b0c220e1cf43 lines --- a/lines +++ b/lines @@ -4,4 +4,4 @@ 4 -hello world -goodbye world +hello world + goodbye world 7 $ hg qdiff --nodates -w diff -r b0c220e1cf43 lines --- a/lines +++ b/lines @@ -1,3 +1,5 @@ + + 1 2 3 $ hg qdiff --nodates --reverse diff -r b0c220e1cf43 lines --- a/lines +++ b/lines @@ -1,11 +1,9 @@ - - 1 2 3 4 -hello world - goodbye world +hello world +goodbye world 7 8 9 qdiff preserve existing git flag: $ hg qrefresh --git $ echo a >> lines $ hg qdiff diff --git a/lines b/lines --- a/lines +++ b/lines @@ -1,9 +1,12 @@ + + 1 2 3 4 -hello world -goodbye world +hello world + goodbye world 7 8 9 +a $ hg qdiff --stat lines | 7 +++++-- 1 files changed, 5 insertions(+), 2 deletions(-) $ hg qrefresh qdiff when file deleted (but not removed) in working dir: $ hg qnew deleted-file $ echo a > newfile $ hg add newfile $ hg qrefresh $ rm newfile $ hg qdiff $ cd .. mercurial-3.7.3/tests/test-hgweb.t0000644000175000017500000004622512676531525016540 0ustar mpmmpm00000000000000#require serve Some tests for hgweb. Tests static files, plain files and different 404's. $ hg init test $ cd test $ mkdir da $ echo foo > da/foo $ echo foo > foo $ hg ci -Ambase adding da/foo adding foo $ hg bookmark -r0 '@' $ hg bookmark -r0 'a b c' $ hg bookmark -r0 'd/e/f' $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS manifest $ (get-with-headers.py localhost:$HGPORT 'file/tip/?style=raw') 200 Script output follows drwxr-xr-x da -rw-r--r-- 4 foo $ (get-with-headers.py localhost:$HGPORT 'file/tip/da?style=raw') 200 Script output follows -rw-r--r-- 4 foo plain file $ get-with-headers.py localhost:$HGPORT 'file/tip/foo?style=raw' 200 Script output follows foo should give a 404 - static file that does not exist $ get-with-headers.py localhost:$HGPORT 'static/bogus' 404 Not Found test: error

                        error

                        An error occurred while processing your request:

                        Not Found

                        [1] should give a 404 - bad revision $ get-with-headers.py localhost:$HGPORT 'file/spam/foo?style=raw' 404 Not Found error: revision not found: spam [1] should give a 400 - bad command $ get-with-headers.py localhost:$HGPORT 'file/tip/foo?cmd=spam&style=raw' 400* (glob) error: no such method: spam [1] $ get-with-headers.py --headeronly localhost:$HGPORT '?cmd=spam' 400 no such method: spam [1] should give a 400 - bad command as a part of url path (issue4071) $ get-with-headers.py --headeronly localhost:$HGPORT 'spam' 400 no such method: spam [1] $ get-with-headers.py --headeronly localhost:$HGPORT 'raw-spam' 400 no such method: spam [1] $ get-with-headers.py --headeronly localhost:$HGPORT 'spam/tip/foo' 400 no such method: spam [1] should give a 404 - file does not exist $ get-with-headers.py localhost:$HGPORT 'file/tip/bork?style=raw' 404 Not Found error: bork@2ef0ac749a14: not found in manifest [1] $ get-with-headers.py localhost:$HGPORT 'file/tip/bork' 404 Not Found test: error

                        error

                        An error occurred while processing your request:

                        bork@2ef0ac749a14: not found in manifest

                        [1] $ get-with-headers.py localhost:$HGPORT 'diff/tip/bork?style=raw' 404 Not Found error: bork@2ef0ac749a14: not found in manifest [1] try bad style $ (get-with-headers.py localhost:$HGPORT 'file/tip/?style=foobar') 200 Script output follows test: 2ef0ac749a14 /

                        directory / @ 0:2ef0ac749a14 tip @ a b c d/e/f

                        name size permissions
                        [up] drwxr-xr-x
                        dir. da/ drwxr-xr-x
                        file foo 4 -rw-r--r--
                        stop and restart $ killdaemons.py $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log $ cat hg.pid >> $DAEMON_PIDS Test the access/error files are opened in append mode $ $PYTHON -c "print len(file('access.log').readlines()), 'log lines written'" 14 log lines written static file $ get-with-headers.py --twice localhost:$HGPORT 'static/style-gitweb.css' - date etag server 200 Script output follows content-length: 6521 content-type: text/css body { font-family: sans-serif; font-size: 12px; border:solid #d9d8d1; border-width:1px; margin:10px; background: white; color: black; } a { color:#0000cc; } a:hover, a:visited, a:active { color:#880000; } div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; } div.page_header a:visited { color:#0000cc; } div.page_header a:hover { color:#880000; } div.page_nav { padding:8px; } div.page_nav a:visited { color:#0000cc; } div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px} div.page_footer { padding:4px 8px; background-color: #d9d8d1; } div.page_footer_text { float:left; color:#555555; font-style:italic; } div.page_body { padding:8px; } div.title, a.title { display:block; padding:6px 8px; font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000; } a.title:hover { background-color: #d9d8d1; } div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; } div.log_body { padding:8px 8px 8px 150px; } .age { white-space:nowrap; } span.age { position:relative; float:left; width:142px; font-style:italic; } div.log_link { padding:0px 8px; font-size:10px; font-family:sans-serif; font-style:normal; position:relative; float:left; width:136px; } div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; } a.list { text-decoration:none; color:#000000; } a.list:hover { text-decoration:underline; color:#880000; } table { padding:8px 4px; } th { padding:2px 5px; font-size:12px; text-align:left; } tr.dark, .parity1, pre.sourcelines.stripes > :nth-child(4n+4) { background-color:#f6f6f0; } tr.light:hover, .parity0:hover, tr.dark:hover, .parity1:hover, pre.sourcelines.stripes > :nth-child(4n+2):hover, pre.sourcelines.stripes > :nth-child(4n+4):hover, pre.sourcelines.stripes > :nth-child(4n+1):hover + :nth-child(4n+2), pre.sourcelines.stripes > :nth-child(4n+3):hover + :nth-child(4n+4) { background-color:#edece6; } td { padding:2px 5px; font-size:12px; vertical-align:top; } td.closed { background-color: #99f; } td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; } td.indexlinks { white-space: nowrap; } td.indexlinks a { padding: 2px 5px; line-height: 10px; border: 1px solid; color: #ffffff; background-color: #7777bb; border-color: #aaaadd #333366 #333366 #aaaadd; font-weight: bold; text-align: center; text-decoration: none; font-size: 10px; } td.indexlinks a:hover { background-color: #6666aa; } div.pre { font-family:monospace; font-size:12px; white-space:pre; } div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; } div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; } div.search { margin:4px 8px; position:absolute; top:56px; right:12px } .linenr { color:#999999; text-decoration:none } div.rss_logo { float: right; white-space: nowrap; } div.rss_logo a { padding:3px 6px; line-height:10px; border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e; color:#ffffff; background-color:#ff6600; font-weight:bold; font-family:sans-serif; font-size:10px; text-align:center; text-decoration:none; } div.rss_logo a:hover { background-color:#ee5500; } pre { margin: 0; } span.logtags span { padding: 0px 4px; font-size: 10px; font-weight: normal; border: 1px solid; background-color: #ffaaff; border-color: #ffccff #ff00ee #ff00ee #ffccff; } span.logtags span.tagtag { background-color: #ffffaa; border-color: #ffffcc #ffee00 #ffee00 #ffffcc; } span.logtags span.branchtag { background-color: #aaffaa; border-color: #ccffcc #00cc33 #00cc33 #ccffcc; } span.logtags span.inbranchtag { background-color: #d5dde6; border-color: #e3ecf4 #9398f4 #9398f4 #e3ecf4; } span.logtags span.bookmarktag { background-color: #afdffa; border-color: #ccecff #46ace6 #46ace6 #ccecff; } span.difflineplus { color:#008800; } span.difflineminus { color:#cc0000; } span.difflineat { color:#990099; } div.diffblocks { counter-reset: lineno; } div.diffblock { counter-increment: lineno; } pre.sourcelines { position: relative; counter-reset: lineno; } pre.sourcelines > span { display: inline-block; box-sizing: border-box; width: 100%; padding: 0 0 0 5em; counter-increment: lineno; vertical-align: top; } pre.sourcelines > span:before { -moz-user-select: -moz-none; -khtml-user-select: none; -webkit-user-select: none; -ms-user-select: none; user-select: none; display: inline-block; margin-left: -5em; width: 4em; color: #999; text-align: right; content: counters(lineno,"."); float: left; } pre.sourcelines > a { display: inline-block; position: absolute; left: 0px; width: 4em; height: 1em; } tr:target td, pre.sourcelines > span:target, pre.sourcelines.stripes > span:target { background-color: #bfdfff; } /* Graph */ div#wrapper { position: relative; margin: 0; padding: 0; margin-top: 3px; } canvas { position: absolute; z-index: 5; top: -0.9em; margin: 0; } ul#nodebgs { list-style: none inside none; padding: 0; margin: 0; top: -0.7em; } ul#graphnodes li, ul#nodebgs li { height: 39px; } ul#graphnodes { position: absolute; z-index: 10; top: -0.8em; list-style: none inside none; padding: 0; } ul#graphnodes li .info { display: block; font-size: 100%; position: relative; top: -3px; font-style: italic; } /* Comparison */ .legend { padding: 1.5% 0 1.5% 0; } .legendinfo { border: 1px solid #d9d8d1; font-size: 80%; text-align: center; padding: 0.5%; } .equal { background-color: #ffffff; } .delete { background-color: #faa; color: #333; } .insert { background-color: #ffa; } .replace { background-color: #e8e8e8; } .comparison { overflow-x: auto; } .header th { text-align: center; } .block { border-top: 1px solid #d9d8d1; } .scroll-loading { -webkit-animation: change_color 1s linear 0s infinite alternate; -moz-animation: change_color 1s linear 0s infinite alternate; -o-animation: change_color 1s linear 0s infinite alternate; animation: change_color 1s linear 0s infinite alternate; } @-webkit-keyframes change_color { from { background-color: #A0CEFF; } to { } } @-moz-keyframes change_color { from { background-color: #A0CEFF; } to { } } @-o-keyframes change_color { from { background-color: #A0CEFF; } to { } } @keyframes change_color { from { background-color: #A0CEFF; } to { } } .scroll-loading-error { background-color: #FFCCCC !important; } #doc { margin: 0 8px; } 304 Not Modified phase changes are refreshed (issue4061) $ echo bar >> foo $ hg ci -msecret --secret $ get-with-headers.py localhost:$HGPORT 'log?style=raw' 200 Script output follows # HG changelog # Node ID 2ef0ac749a14e4f57a5a822464a0902c6f7f448f changeset: 2ef0ac749a14e4f57a5a822464a0902c6f7f448f revision: 0 user: test date: Thu, 01 Jan 1970 00:00:00 +0000 summary: base branch: default tag: tip bookmark: @ bookmark: a b c bookmark: d/e/f $ hg phase --draft tip $ get-with-headers.py localhost:$HGPORT 'log?style=raw' 200 Script output follows # HG changelog # Node ID a084749e708a9c4c0a5b652a2a446322ce290e04 changeset: a084749e708a9c4c0a5b652a2a446322ce290e04 revision: 1 user: test date: Thu, 01 Jan 1970 00:00:00 +0000 summary: secret branch: default tag: tip changeset: 2ef0ac749a14e4f57a5a822464a0902c6f7f448f revision: 0 user: test date: Thu, 01 Jan 1970 00:00:00 +0000 summary: base bookmark: @ bookmark: a b c bookmark: d/e/f access bookmarks $ get-with-headers.py localhost:$HGPORT 'rev/@?style=paper' | egrep '^200|changeset 0:' 200 Script output follows changeset 0:2ef0ac749a14 $ get-with-headers.py localhost:$HGPORT 'rev/%40?style=paper' | egrep '^200|changeset 0:' 200 Script output follows changeset 0:2ef0ac749a14 $ get-with-headers.py localhost:$HGPORT 'rev/a%20b%20c?style=paper' | egrep '^200|changeset 0:' 200 Script output follows changeset 0:2ef0ac749a14 $ get-with-headers.py localhost:$HGPORT 'rev/d%252Fe%252Ff?style=paper' | egrep '^200|changeset 0:' 200 Script output follows changeset 0:2ef0ac749a14 no style can be loaded from directories other than the specified paths $ mkdir -p x/templates/fallback $ cat < x/templates/fallback/map > default = 'shortlog' > shortlog = 'fall back to default\n' > mimetype = 'text/plain' > EOF $ cat < x/map > default = 'shortlog' > shortlog = 'access to outside of templates directory\n' > mimetype = 'text/plain' > EOF $ killdaemons.py $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log \ > --config web.style=fallback --config web.templates=x/templates $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT "?style=`pwd`/x" 200 Script output follows fall back to default $ get-with-headers.py localhost:$HGPORT '?style=..' 200 Script output follows fall back to default $ get-with-headers.py localhost:$HGPORT '?style=./..' 200 Script output follows fall back to default $ get-with-headers.py localhost:$HGPORT '?style=.../.../' 200 Script output follows fall back to default errors $ cat errors.log Uncaught exceptions result in a logged error and canned HTTP response $ killdaemons.py $ hg --config extensions.hgweberror=$TESTDIR/hgweberror.py serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT 'raiseerror' transfer-encoding content-type 500 Internal Server Error transfer-encoding: chunked Internal Server Error (no-eol) [1] $ killdaemons.py $ head -1 errors.log .* Exception happened during processing request '/raiseerror': (re) Uncaught exception after partial content sent $ hg --config extensions.hgweberror=$TESTDIR/hgweberror.py serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT 'raiseerror?partialresponse=1' transfer-encoding content-type 200 Script output follows transfer-encoding: chunked content-type: text/plain partial content Internal Server Error (no-eol) $ killdaemons.py $ cd .. mercurial-3.7.3/tests/test-fileset.t0000644000175000017500000001512712676531525017074 0ustar mpmmpm00000000000000 $ fileset() { > hg debugfileset "$@" > } $ hg init repo $ cd repo $ echo a > a1 $ echo a > a2 $ echo b > b1 $ echo b > b2 $ hg ci -Am addfiles adding a1 adding a2 adding b1 adding b2 Test operators and basic patterns $ fileset -v a1 ('symbol', 'a1') a1 $ fileset -v 'a*' ('symbol', 'a*') a1 a2 $ fileset -v '"re:a\d"' ('string', 're:a\\d') a1 a2 $ fileset -v 'a1 or a2' (or ('symbol', 'a1') ('symbol', 'a2')) a1 a2 $ fileset 'a1 | a2' a1 a2 $ fileset 'a* and "*1"' a1 $ fileset 'a* & "*1"' a1 $ fileset 'not (r"a*")' b1 b2 $ fileset '! ("a*")' b1 b2 $ fileset 'a* - a1' a2 $ fileset 'a_b' $ fileset '"\xy"' hg: parse error: invalid \x escape [255] Test files status $ rm a1 $ hg rm a2 $ echo b >> b2 $ hg cp b1 c1 $ echo c > c2 $ echo c > c3 $ cat > .hgignore < \.hgignore > 2$ > EOF $ fileset 'modified()' b2 $ fileset 'added()' c1 $ fileset 'removed()' a2 $ fileset 'deleted()' a1 $ fileset 'missing()' a1 $ fileset 'unknown()' c3 $ fileset 'ignored()' .hgignore c2 $ fileset 'hgignore()' a2 b2 $ fileset 'clean()' b1 $ fileset 'copied()' c1 Test files properties >>> file('bin', 'wb').write('\0a') $ fileset 'binary()' $ fileset 'binary() and unknown()' bin $ echo '^bin$' >> .hgignore $ fileset 'binary() and ignored()' bin $ hg add bin $ fileset 'binary()' bin $ fileset 'grep("b{1}")' b2 c1 b1 $ fileset 'grep("missingparens(")' hg: parse error: invalid match pattern: unbalanced parenthesis [255] #if execbit $ chmod +x b2 $ fileset 'exec()' b2 #endif #if symlink $ ln -s b2 b2link $ fileset 'symlink() and unknown()' b2link $ hg add b2link #endif #if no-windows $ echo foo > con.xml $ fileset 'not portable()' con.xml $ hg --config ui.portablefilenames=ignore add con.xml #endif >>> file('1k', 'wb').write(' '*1024) >>> file('2k', 'wb').write(' '*2048) $ hg add 1k 2k $ fileset 'size("bar")' hg: parse error: couldn't parse size: bar [255] $ fileset '(1k, 2k)' hg: parse error: can't use a list in this context (see hg help "filesets.x or y") [255] $ fileset 'size(1k)' 1k $ fileset '(1k or 2k) and size("< 2k")' 1k $ fileset '(1k or 2k) and size("<=2k")' 1k 2k $ fileset '(1k or 2k) and size("> 1k")' 2k $ fileset '(1k or 2k) and size(">=1K")' 1k 2k $ fileset '(1k or 2k) and size(".5KB - 1.5kB")' 1k $ fileset 'size("1M")' $ fileset 'size("1 GB")' Test merge states $ hg ci -m manychanges $ hg up -C 0 * files updated, 0 files merged, * files removed, 0 files unresolved (glob) $ echo c >> b2 $ hg ci -m diverging b2 created new head $ fileset 'resolved()' $ fileset 'unresolved()' $ hg merge merging b2 warning: conflicts while merging b2! (edit, then use 'hg resolve --mark') * files updated, 0 files merged, 1 files removed, 1 files unresolved (glob) use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ fileset 'resolved()' $ fileset 'unresolved()' b2 $ echo e > b2 $ hg resolve -m b2 (no more unresolved files) $ fileset 'resolved()' b2 $ fileset 'unresolved()' $ hg ci -m merge Test subrepo predicate $ hg init sub $ echo a > sub/suba $ hg -R sub add sub/suba $ hg -R sub ci -m sub $ echo 'sub = sub' > .hgsub $ hg init sub2 $ echo b > sub2/b $ hg -R sub2 ci -Am sub2 adding b $ echo 'sub2 = sub2' >> .hgsub $ fileset 'subrepo()' $ hg add .hgsub $ fileset 'subrepo()' sub sub2 $ fileset 'subrepo("sub")' sub $ fileset 'subrepo("glob:*")' sub sub2 $ hg ci -m subrepo Test that .hgsubstate is updated as appropriate during a conversion. The saverev property is enough to alter the hashes of the subrepo. $ hg init ../converted $ hg --config extensions.convert= convert --config convert.hg.saverev=True \ > sub ../converted/sub initializing destination ../converted/sub repository scanning source... sorting... converting... 0 sub $ hg clone -U sub2 ../converted/sub2 $ hg --config extensions.convert= convert --config convert.hg.saverev=True \ > . ../converted scanning source... sorting... converting... 4 addfiles 3 manychanges 2 diverging 1 merge 0 subrepo no ".hgsubstate" updates will be made for "sub2" $ hg up -q -R ../converted -r tip $ hg --cwd ../converted cat sub/suba sub2/b -r tip a b $ oldnode=`hg log -r tip -T "{node}\n"` $ newnode=`hg log -R ../converted -r tip -T "{node}\n"` $ [ "$oldnode" != "$newnode" ] || echo "nothing changed" Test with a revision $ hg log -G --template '{rev} {desc}\n' @ 4 subrepo | o 3 merge |\ | o 2 diverging | | o | 1 manychanges |/ o 0 addfiles $ echo unknown > unknown $ fileset -r1 'modified()' b2 $ fileset -r1 'added() and c1' c1 $ fileset -r1 'removed()' a2 $ fileset -r1 'deleted()' $ fileset -r1 'unknown()' $ fileset -r1 'ignored()' $ fileset -r1 'hgignore()' b2 bin $ fileset -r1 'binary()' bin $ fileset -r1 'size(1k)' 1k $ fileset -r3 'resolved()' $ fileset -r3 'unresolved()' #if execbit $ fileset -r1 'exec()' b2 #endif #if symlink $ fileset -r1 'symlink()' b2link #endif #if no-windows $ fileset -r1 'not portable()' con.xml $ hg forget 'con.xml' #endif $ fileset -r4 'subrepo("re:su.*")' sub sub2 $ fileset -r4 'subrepo("sub")' sub $ fileset -r4 'b2 or c1' b2 c1 >>> open('dos', 'wb').write("dos\r\n") >>> open('mixed', 'wb').write("dos\r\nunix\n") >>> open('mac', 'wb').write("mac\r") $ hg add dos mixed mac (remove a1, to examine safety of 'eol' on removed files) $ rm a1 $ fileset 'eol(dos)' dos mixed $ fileset 'eol(unix)' mixed .hgsub .hgsubstate b1 b2 c1 $ fileset 'eol(mac)' mac Test safety of 'encoding' on removed files #if symlink $ fileset 'encoding("ascii")' dos mac mixed .hgsub .hgsubstate 1k 2k b1 b2 b2link bin c1 #else $ fileset 'encoding("ascii")' dos mac mixed .hgsub .hgsubstate 1k 2k b1 b2 bin c1 #endif Test detection of unintentional 'matchctx.existing()' invocation $ cat > $TESTTMP/existingcaller.py < from mercurial import fileset > > @fileset.predicate('existingcaller()', callexisting=False) > def existingcaller(mctx, x): > # this 'mctx.existing()' invocation is unintentional > return [f for f in mctx.existing()] > EOF $ cat >> .hg/hgrc < [extensions] > existingcaller = $TESTTMP/existingcaller.py > EOF $ fileset 'existingcaller()' 2>&1 | tail -1 AssertionError: unexpected existing() invocation mercurial-3.7.3/tests/test-1993.t0000644000175000017500000000225012676531525016037 0ustar mpmmpm00000000000000 $ hg init a $ cd a $ echo a > a $ hg ci -Am0 adding a $ echo b > b $ hg ci -Am1 adding b $ hg tag -r0 default warning: tag default conflicts with existing branch name $ hg log changeset: 2:30a83d1e4a1e tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag default for changeset f7b1eb17ad24 changeset: 1:925d80f479bb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 changeset: 0:f7b1eb17ad24 tag: default user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 $ hg update 'tag(default)' 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg parents changeset: 0:f7b1eb17ad24 tag: default user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 $ hg update 'branch(default)' 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg parents changeset: 2:30a83d1e4a1e tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag default for changeset f7b1eb17ad24 $ cd .. mercurial-3.7.3/tests/test-diff-issue2761.t0000644000175000017500000000067212676531525020016 0ustar mpmmpm00000000000000Test issue2761 $ hg init $ touch to-be-deleted $ hg add adding to-be-deleted $ hg ci -m first $ echo a > to-be-deleted $ hg ci -m second $ rm to-be-deleted $ hg diff -r 0 Same issue, different code path $ hg up -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ touch does-not-exist-in-1 $ hg add adding does-not-exist-in-1 $ hg ci -m third $ rm does-not-exist-in-1 $ hg diff -r 1 mercurial-3.7.3/tests/test-check-config.t0000644000175000017500000000033012676531525017747 0ustar mpmmpm00000000000000#require test-repo $ cd "$TESTDIR"/.. New errors are not allowed. Warnings are strongly discouraged. $ hg files "set:(**.py or **.txt) - tests/**" | sed 's|\\|/|g' | > xargs python contrib/check-config.py mercurial-3.7.3/tests/test-static-http.t0000644000175000017500000001035712676531525017705 0ustar mpmmpm00000000000000#require killdaemons #if windows $ hg clone http://localhost:$HGPORT/ copy abort: * (glob) [255] #else $ hg clone http://localhost:$HGPORT/ copy abort: error: Connection refused [255] #endif $ test -d copy [1] This server doesn't do range requests so it's basically only good for one pull $ python "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid $ cat dumb.pid >> $DAEMON_PIDS $ hg init remote $ cd remote $ echo foo > bar $ echo c2 > '.dotfile with spaces' $ hg add adding .dotfile with spaces adding bar $ hg commit -m"test" $ hg tip changeset: 0:02770d679fb8 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: test $ cd .. $ hg clone static-http://localhost:$HGPORT/remote local requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd local $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 1 changesets, 2 total revisions $ cat bar foo $ cd ../remote $ echo baz > quux $ hg commit -A -mtest2 adding quux check for HTTP opener failures when cachefile does not exist $ rm .hg/cache/* $ cd ../local $ echo '[hooks]' >> .hg/hgrc $ echo "changegroup = printenv.py changegroup" >> .hg/hgrc $ hg pull pulling from static-http://localhost:$HGPORT/remote searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files changegroup hook: HG_NODE=4ac2e3648604439c580c69b09ec9d93a88d93432 HG_NODE_LAST=4ac2e3648604439c580c69b09ec9d93a88d93432 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=http://localhost:$HGPORT/remote (glob) (run 'hg update' to get a working copy) trying to push $ hg update 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo more foo >> bar $ hg commit -m"test" $ hg push pushing to static-http://localhost:$HGPORT/remote abort: destination does not support push [255] trying clone -r $ cd .. $ hg clone -r doesnotexist static-http://localhost:$HGPORT/remote local0 abort: unknown revision 'doesnotexist'! [255] $ hg clone -r 0 static-http://localhost:$HGPORT/remote local0 adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved test with "/" URI (issue747) and subrepo $ hg init $ hg init sub $ touch sub/test $ hg -R sub commit -A -m "test" adding test $ hg -R sub tag not-empty $ echo sub=sub > .hgsub $ echo a > a $ hg add a .hgsub $ hg -q ci -ma $ hg clone static-http://localhost:$HGPORT/ local2 requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 3 changes to 3 files updating to branch default cloning subrepo sub from static-http://localhost:$HGPORT/sub requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd local2 $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 1 changesets, 3 total revisions checking subrepo links $ cat a a $ hg paths default = static-http://localhost:$HGPORT/ test with empty repo (issue965) $ cd .. $ hg init remotempty $ hg clone static-http://localhost:$HGPORT/remotempty local3 no changes found updating to branch default 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd local3 $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 0 files, 0 changesets, 0 total revisions $ hg paths default = static-http://localhost:$HGPORT/remotempty test with non-repo $ cd .. $ mkdir notarepo $ hg clone static-http://localhost:$HGPORT/notarepo local3 abort: 'http://localhost:$HGPORT/notarepo' does not appear to be an hg repository! [255] $ killdaemons.py mercurial-3.7.3/tests/test-encoding-align.t0000644000175000017500000001226112676531525020313 0ustar mpmmpm00000000000000Test alignment of multibyte characters $ HGENCODING=utf-8 $ export HGENCODING $ hg init t $ cd t $ python << EOF > # (byte, width) = (6, 4) > s = "\xe7\x9f\xad\xe5\x90\x8d" > # (byte, width) = (7, 7): odd width is good for alignment test > m = "MIDDLE_" > # (byte, width) = (18, 12) > l = "\xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d" > f = file('s', 'w'); f.write(s); f.close() > f = file('m', 'w'); f.write(m); f.close() > f = file('l', 'w'); f.write(l); f.close() > # instant extension to show list of options > f = file('showoptlist.py', 'w'); f.write("""# encoding: utf-8 > from mercurial import cmdutil > cmdtable = {} > command = cmdutil.command(cmdtable) > > @command('showoptlist', > [('s', 'opt1', '', 'short width' + ' %(s)s' * 8, '%(s)s'), > ('m', 'opt2', '', 'middle width' + ' %(m)s' * 8, '%(m)s'), > ('l', 'opt3', '', 'long width' + ' %(l)s' * 8, '%(l)s')], > '') > def showoptlist(ui, repo, *pats, **opts): > '''dummy command to show option descriptions''' > return 0 > """ % globals()) > f.close() > EOF $ S=`cat s` $ M=`cat m` $ L=`cat l` alignment of option descriptions in help $ cat < .hg/hgrc > [extensions] > ja_ext = `pwd`/showoptlist.py > EOF check alignment of option descriptions in help $ hg help showoptlist hg showoptlist dummy command to show option descriptions options: -s --opt1 \xe7\x9f\xad\xe5\x90\x8d short width \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d \xe7\x9f\xad\xe5\x90\x8d (esc) -m --opt2 MIDDLE_ middle width MIDDLE_ MIDDLE_ MIDDLE_ MIDDLE_ MIDDLE_ MIDDLE_ MIDDLE_ MIDDLE_ -l --opt3 \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d long width \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d (esc) \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d (esc) \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d (esc) (some details hidden, use --verbose to show complete help) $ rm -f s; touch s $ rm -f m; touch m $ rm -f l; touch l add files $ cp s $S $ hg add $S $ cp m $M $ hg add $M $ cp l $L $ hg add $L commit(1) $ echo 'first line(1)' >> s; cp s $S $ echo 'first line(2)' >> m; cp m $M $ echo 'first line(3)' >> l; cp l $L $ hg commit -m 'first commit' -u $S commit(2) $ echo 'second line(1)' >> s; cp s $S $ echo 'second line(2)' >> m; cp m $M $ echo 'second line(3)' >> l; cp l $L $ hg commit -m 'second commit' -u $M commit(3) $ echo 'third line(1)' >> s; cp s $S $ echo 'third line(2)' >> m; cp m $M $ echo 'third line(3)' >> l; cp l $L $ hg commit -m 'third commit' -u $L check alignment of user names in annotate $ hg annotate -u $M \xe7\x9f\xad\xe5\x90\x8d: first line(2) (esc) MIDDLE_: second line(2) \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d: third line(2) (esc) check alignment of filenames in diffstat $ hg diff -c tip --stat MIDDLE_ | 1 + \xe7\x9f\xad\xe5\x90\x8d | 1 + (esc) \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d | 1 + (esc) 3 files changed, 3 insertions(+), 0 deletions(-) add branches/tags $ hg branch $S marked working directory as branch \xe7\x9f\xad\xe5\x90\x8d (esc) (branches are permanent and global, did you want a bookmark?) $ hg tag $S $ hg book -f $S $ hg branch $M marked working directory as branch MIDDLE_ $ hg tag $M $ hg book -f $M $ hg branch $L marked working directory as branch \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d (esc) $ hg tag $L $ hg book -f $L check alignment of branches $ hg branches \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d 5:d745ff46155b (esc) MIDDLE_ 4:9259be597f19 (inactive) \xe7\x9f\xad\xe5\x90\x8d 3:b06c5b6def9e (inactive) (esc) default 2:64a70663cee8 (inactive) check alignment of tags $ hg tags tip 5:d745ff46155b \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d 4:9259be597f19 (esc) MIDDLE_ 3:b06c5b6def9e \xe7\x9f\xad\xe5\x90\x8d 2:64a70663cee8 (esc) check alignment of bookmarks $ hg book MIDDLE_ 5:d745ff46155b \xe7\x9f\xad\xe5\x90\x8d 4:9259be597f19 (esc) * \xe9\x95\xb7\xe3\x81\x84\xe9\x95\xb7\xe3\x81\x84\xe5\x90\x8d\xe5\x89\x8d 5:d745ff46155b (esc) mercurial-3.7.3/tests/test-rebase-detach.t0000644000175000017500000001446212676531525020131 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > > [phases] > publish=False > > [alias] > tglog = log -G --template "{rev}: '{desc}' {branches}\n" > EOF $ hg init a $ cd a $ hg unbundle "$TESTDIR/bundles/rebase.hg" adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up tip 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. Rebasing D onto H detaching from C: $ hg clone -q -u . a a1 $ cd a1 $ hg tglog @ 7: 'H' | | o 6: 'G' |/| o | 5: 'F' | | | o 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ hg phase --force --secret 3 $ hg rebase -s 3 -d 7 rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a1/.hg/strip-backup/32af7686d403-6f7dface-backup.hg (glob) $ hg log -G --template "{rev}:{phase} '{desc}' {branches}\n" o 7:secret 'D' | @ 6:draft 'H' | | o 5:draft 'G' |/| o | 4:draft 'F' | | | o 3:draft 'E' |/ | o 2:draft 'C' | | | o 1:draft 'B' |/ o 0:draft 'A' $ hg manifest --rev tip A D F H $ cd .. Rebasing C onto H detaching from B: $ hg clone -q -u . a a2 $ cd a2 $ hg tglog @ 7: 'H' | | o 6: 'G' |/| o | 5: 'F' | | | o 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ hg rebase -s 2 -d 7 rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a2/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob) $ hg tglog o 7: 'D' | o 6: 'C' | @ 5: 'H' | | o 4: 'G' |/| o | 3: 'F' | | | o 2: 'E' |/ | o 1: 'B' |/ o 0: 'A' $ hg manifest --rev tip A C D F H $ cd .. Rebasing B onto H using detach (same as not using it): $ hg clone -q -u . a a3 $ cd a3 $ hg tglog @ 7: 'H' | | o 6: 'G' |/| o | 5: 'F' | | | o 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ hg rebase -s 1 -d 7 rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a3/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglog o 7: 'D' | o 6: 'C' | o 5: 'B' | @ 4: 'H' | | o 3: 'G' |/| o | 2: 'F' | | | o 1: 'E' |/ o 0: 'A' $ hg manifest --rev tip A B C D F H $ cd .. Rebasing C onto H detaching from B and collapsing: $ hg clone -q -u . a a4 $ cd a4 $ hg phase --force --secret 3 $ hg tglog @ 7: 'H' | | o 6: 'G' |/| o | 5: 'F' | | | o 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ hg rebase --collapse -s 2 -d 7 rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a4/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob) $ hg log -G --template "{rev}:{phase} '{desc}' {branches}\n" o 6:secret 'Collapsed revision | * C | * D' @ 5:draft 'H' | | o 4:draft 'G' |/| o | 3:draft 'F' | | | o 2:draft 'E' |/ | o 1:draft 'B' |/ o 0:draft 'A' $ hg manifest --rev tip A C D F H $ cd .. Rebasing across null as ancestor $ hg clone -q -U a a5 $ cd a5 $ echo x > x $ hg add x $ hg ci -m "extra branch" created new head $ hg tglog @ 8: 'extra branch' o 7: 'H' | | o 6: 'G' |/| o | 5: 'F' | | | o 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ hg rebase -s 1 -d tip rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a5/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglog o 8: 'D' | o 7: 'C' | o 6: 'B' | @ 5: 'extra branch' o 4: 'H' | | o 3: 'G' |/| o | 2: 'F' | | | o 1: 'E' |/ o 0: 'A' $ hg rebase -d 5 -s 7 rebasing 7:13547172c9c0 "C" rebasing 8:4e27a76c371a "D" (tip) saved backup bundle to $TESTTMP/a5/.hg/strip-backup/13547172c9c0-35685ded-backup.hg (glob) $ hg tglog o 8: 'D' | o 7: 'C' | | o 6: 'B' |/ @ 5: 'extra branch' o 4: 'H' | | o 3: 'G' |/| o | 2: 'F' | | | o 1: 'E' |/ o 0: 'A' $ cd .. Verify that target is not selected as external rev (issue3085) $ hg clone -q -U a a6 $ cd a6 $ hg up -q 6 $ echo "I" >> E $ hg ci -m "I" $ hg merge 7 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m "Merge" $ echo "J" >> F $ hg ci -m "J" $ hg rebase -s 8 -d 7 --collapse --config ui.merge=internal:other rebasing 8:9790e768172d "I" rebasing 9:5d7b11f5fb97 "Merge" rebasing 10:9427d4d5af81 "J" (tip) saved backup bundle to $TESTTMP/a6/.hg/strip-backup/9790e768172d-c2111e9d-backup.hg (glob) $ hg tglog @ 8: 'Collapsed revision | * I | * Merge | * J' o 7: 'H' | | o 6: 'G' |/| o | 5: 'F' | | | o 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ hg log --rev tip changeset: 8:9472f4b1d736 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Collapsed revision $ cd .. Ensure --continue restores a correct state (issue3046) and phase: $ hg clone -q a a7 $ cd a7 $ hg up -q 3 $ echo 'H2' > H $ hg ci -A -m 'H2' adding H $ hg phase --force --secret 8 $ hg rebase -s 8 -d 7 --config ui.merge=internal:fail rebasing 8:6215fafa5447 "H2" (tip) merging H warning: conflicts while merging H! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] $ hg resolve --all -t internal:local (no more unresolved files) continue: hg rebase --continue $ hg rebase -c rebasing 8:6215fafa5447 "H2" (tip) note: rebase of 8:6215fafa5447 created no changes to commit saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6215fafa5447-5804ebd5-backup.hg (glob) $ hg log -G --template "{rev}:{phase} '{desc}' {branches}\n" @ 7:draft 'H' | | o 6:draft 'G' |/| o | 5:draft 'F' | | | o 4:draft 'E' |/ | o 3:draft 'D' | | | o 2:draft 'C' | | | o 1:draft 'B' |/ o 0:draft 'A' $ cd .. mercurial-3.7.3/tests/test-rebase-rename.t0000644000175000017500000001651012676531525020144 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > > [alias] > tlog = log --template "{rev}: '{desc}' {branches}\n" > tglog = tlog --graph > EOF $ hg init a $ cd a $ mkdir d $ echo a > a $ hg ci -Am A adding a $ echo b > d/b $ hg ci -Am B adding d/b $ hg mv d d-renamed moving d/b to d-renamed/b (glob) $ hg ci -m 'rename B' $ hg up -q -C 1 $ hg mv a a-renamed $ echo x > d/x $ hg add d/x $ hg ci -m 'rename A' created new head $ hg tglog @ 3: 'rename A' | | o 2: 'rename B' |/ o 1: 'B' | o 0: 'A' Rename is tracked: $ hg tlog -p --git -r tip 3: 'rename A' diff --git a/a b/a-renamed rename from a rename to a-renamed diff --git a/d/x b/d/x new file mode 100644 --- /dev/null +++ b/d/x @@ -0,0 +1,1 @@ +x Rebase the revision containing the rename: $ hg rebase -s 3 -d 2 rebasing 3:73a3ee40125d "rename A" (tip) saved backup bundle to $TESTTMP/a/.hg/strip-backup/73a3ee40125d-1d78ebcf-backup.hg (glob) $ hg tglog @ 3: 'rename A' | o 2: 'rename B' | o 1: 'B' | o 0: 'A' Rename is not lost: $ hg tlog -p --git -r tip 3: 'rename A' diff --git a/a b/a-renamed rename from a rename to a-renamed diff --git a/d-renamed/x b/d-renamed/x new file mode 100644 --- /dev/null +++ b/d-renamed/x @@ -0,0 +1,1 @@ +x Rebased revision does not contain information about b (issue3739) $ hg log -r 3 --debug changeset: 3:032a9b75e83bff1dcfb6cbfa4ef50a704bf1b569 tag: tip phase: draft parent: 2:220d0626d185f372d9d8f69d9c73b0811d7725f7 parent: -1:0000000000000000000000000000000000000000 manifest: 3:035d66b27a1b06b2d12b46d41a39adb7a200c370 user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: a-renamed d-renamed/x files-: a extra: branch=default extra: rebase_source=73a3ee40125d6f0f347082e5831ceccb3f005f8a description: rename A $ cd .. $ hg init b $ cd b $ echo a > a $ hg ci -Am A adding a $ echo b > b $ hg ci -Am B adding b $ hg cp b b-copied $ hg ci -Am 'copy B' $ hg up -q -C 1 $ hg cp a a-copied $ hg ci -m 'copy A' created new head $ hg tglog @ 3: 'copy A' | | o 2: 'copy B' |/ o 1: 'B' | o 0: 'A' Copy is tracked: $ hg tlog -p --git -r tip 3: 'copy A' diff --git a/a b/a-copied copy from a copy to a-copied Rebase the revision containing the copy: $ hg rebase -s 3 -d 2 rebasing 3:0a8162ff18a8 "copy A" (tip) saved backup bundle to $TESTTMP/b/.hg/strip-backup/0a8162ff18a8-dd06302a-backup.hg (glob) $ hg tglog @ 3: 'copy A' | o 2: 'copy B' | o 1: 'B' | o 0: 'A' Copy is not lost: $ hg tlog -p --git -r tip 3: 'copy A' diff --git a/a b/a-copied copy from a copy to a-copied Rebased revision does not contain information about b (issue3739) $ hg log -r 3 --debug changeset: 3:98f6e6dbf45ab54079c2237fbd11066a5c41a11d tag: tip phase: draft parent: 2:39e588434882ff77d01229d169cdc77f29e8855e parent: -1:0000000000000000000000000000000000000000 manifest: 3:2232f329d66fffe3930d43479ae624f66322b04d user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: a-copied extra: branch=default extra: rebase_source=0a8162ff18a8900df8df8ef7ac0046955205613e description: copy A $ cd .. Test rebase across repeating renames: $ hg init repo $ cd repo $ echo testing > file1.txt $ hg add file1.txt $ hg ci -m "Adding file1" $ hg rename file1.txt file2.txt $ hg ci -m "Rename file1 to file2" $ echo Unrelated change > unrelated.txt $ hg add unrelated.txt $ hg ci -m "Unrelated change" $ hg rename file2.txt file1.txt $ hg ci -m "Rename file2 back to file1" $ hg update -r -2 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo Another unrelated change >> unrelated.txt $ hg ci -m "Another unrelated change" created new head $ hg tglog @ 4: 'Another unrelated change' | | o 3: 'Rename file2 back to file1' |/ o 2: 'Unrelated change' | o 1: 'Rename file1 to file2' | o 0: 'Adding file1' $ hg rebase -s 4 -d 3 rebasing 4:b918d683b091 "Another unrelated change" (tip) saved backup bundle to $TESTTMP/repo/.hg/strip-backup/b918d683b091-3024bc57-backup.hg (glob) $ hg diff --stat -c . unrelated.txt | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) $ cd .. Verify that copies get preserved (issue4192). $ hg init copy-gets-preserved $ cd copy-gets-preserved $ echo a > a $ hg add a $ hg commit --message "File a created" $ hg copy a b $ echo b > b $ hg commit --message "File b created as copy of a and modified" $ hg copy b c $ echo c > c $ hg commit --message "File c created as copy of b and modified" $ hg copy c d $ echo d > d $ hg commit --message "File d created as copy of c and modified" Note that there are four entries in the log for d $ hg tglog --follow d @ 3: 'File d created as copy of c and modified' | o 2: 'File c created as copy of b and modified' | o 1: 'File b created as copy of a and modified' | o 0: 'File a created' Update back to before we performed copies, and inject an unrelated change. $ hg update 0 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ echo unrelated > unrelated $ hg add unrelated $ hg commit --message "Unrelated file created" created new head $ hg update 4 0 files updated, 0 files merged, 0 files removed, 0 files unresolved Rebase the copies on top of the unrelated change. $ hg rebase --source 1 --dest 4 rebasing 1:79d255d24ad2 "File b created as copy of a and modified" rebasing 2:327f772bc074 "File c created as copy of b and modified" rebasing 3:421b7e82bb85 "File d created as copy of c and modified" saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/79d255d24ad2-a2265555-backup.hg (glob) $ hg update 4 3 files updated, 0 files merged, 0 files removed, 0 files unresolved There should still be four entries in the log for d $ hg tglog --follow d @ 4: 'File d created as copy of c and modified' | o 3: 'File c created as copy of b and modified' | o 2: 'File b created as copy of a and modified' | o 0: 'File a created' Same steps as above, but with --collapse on rebase to make sure the copy records collapse correctly. $ hg co 1 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ echo more >> unrelated $ hg ci -m 'unrelated commit is unrelated' created new head $ hg rebase -s 2 --dest 5 --collapse rebasing 2:68bf06433839 "File b created as copy of a and modified" rebasing 3:af74b229bc02 "File c created as copy of b and modified" merging b and c to c rebasing 4:dbb9ba033561 "File d created as copy of c and modified" merging c and d to d saved backup bundle to $TESTTMP/copy-gets-preserved/.hg/strip-backup/68bf06433839-dde37595-backup.hg (glob) $ hg co tip 3 files updated, 0 files merged, 0 files removed, 0 files unresolved This should show both revision 3 and 0 since 'd' was transitively a copy of 'a'. $ hg tglog --follow d @ 3: 'Collapsed revision | * File b created as copy of a and modified | * File c created as copy of b and modified | * File d created as copy of c and modified' o 0: 'File a created' $ cd .. mercurial-3.7.3/tests/test-getbundle.t0000644000175000017500000003430112676531525017405 0ustar mpmmpm00000000000000#require serve = Test the getbundle() protocol function = Create a test repository: $ hg init repo $ cd repo $ hg debugbuilddag -n -m '+2 :fork +5 :p1 *fork +6 :p2 /p1 :m1 +3' > /dev/null $ hg log -G --template '{node}\n' o 10c14a2cc935e1d8c31f9e98587dcf27fb08a6da | o 4801a72e5d88cb515b0c7e40fae34180f3f837f2 | o 0b2f73f04880d9cb6a5cd8a757f0db0ad01e32c3 | o 8365676dbab05860ce0d9110f2af51368b961bbd |\ | o 5686dbbd9fc46cb806599c878d02fe1cb56b83d3 | | | o 13c0170174366b441dc68e8e33757232fa744458 | | | o 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 | | | o 700b7e19db54103633c4bf4a6a6b6d55f4d50c03 | | | o 928b5f94cdb278bb536eba552de348a4e92ef24d | | | o f34414c64173e0ecb61b25dc55e116dbbcc89bee | | | o 8931463777131cd73923e560b760061f2aa8a4bc | | o | 6621d79f61b23ec74cf4b69464343d9e0980ec8b | | o | bac16991d12ff45f9dc43c52da1946dfadb83e80 | | o | ff42371d57168345fdf1a3aac66a51f6a45d41d2 | | o | d5f6e1ea452285324836a49d7d3c2a63cfed1d31 | | o | 713346a995c363120712aed1aee7e04afd867638 |/ o 29a4d1f17bd3f0779ca0525bebb1cfb51067c738 | o 7704483d56b2a7b5db54dcee7c62378ac629b348 $ cd .. = Test locally = Get everything: $ hg debuggetbundle repo bundle $ hg debugbundle bundle 7704483d56b2a7b5db54dcee7c62378ac629b348 29a4d1f17bd3f0779ca0525bebb1cfb51067c738 713346a995c363120712aed1aee7e04afd867638 d5f6e1ea452285324836a49d7d3c2a63cfed1d31 ff42371d57168345fdf1a3aac66a51f6a45d41d2 bac16991d12ff45f9dc43c52da1946dfadb83e80 6621d79f61b23ec74cf4b69464343d9e0980ec8b 8931463777131cd73923e560b760061f2aa8a4bc f34414c64173e0ecb61b25dc55e116dbbcc89bee 928b5f94cdb278bb536eba552de348a4e92ef24d 700b7e19db54103633c4bf4a6a6b6d55f4d50c03 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 13c0170174366b441dc68e8e33757232fa744458 5686dbbd9fc46cb806599c878d02fe1cb56b83d3 8365676dbab05860ce0d9110f2af51368b961bbd 0b2f73f04880d9cb6a5cd8a757f0db0ad01e32c3 4801a72e5d88cb515b0c7e40fae34180f3f837f2 10c14a2cc935e1d8c31f9e98587dcf27fb08a6da Get part of linear run: $ hg debuggetbundle repo bundle -H 4801a72e5d88cb515b0c7e40fae34180f3f837f2 -C 8365676dbab05860ce0d9110f2af51368b961bbd $ hg debugbundle bundle 0b2f73f04880d9cb6a5cd8a757f0db0ad01e32c3 4801a72e5d88cb515b0c7e40fae34180f3f837f2 Get missing branch and merge: $ hg debuggetbundle repo bundle -H 4801a72e5d88cb515b0c7e40fae34180f3f837f2 -C 13c0170174366b441dc68e8e33757232fa744458 $ hg debugbundle bundle 713346a995c363120712aed1aee7e04afd867638 d5f6e1ea452285324836a49d7d3c2a63cfed1d31 ff42371d57168345fdf1a3aac66a51f6a45d41d2 bac16991d12ff45f9dc43c52da1946dfadb83e80 6621d79f61b23ec74cf4b69464343d9e0980ec8b 5686dbbd9fc46cb806599c878d02fe1cb56b83d3 8365676dbab05860ce0d9110f2af51368b961bbd 0b2f73f04880d9cb6a5cd8a757f0db0ad01e32c3 4801a72e5d88cb515b0c7e40fae34180f3f837f2 Get from only one head: $ hg debuggetbundle repo bundle -H 928b5f94cdb278bb536eba552de348a4e92ef24d -C 29a4d1f17bd3f0779ca0525bebb1cfb51067c738 $ hg debugbundle bundle 8931463777131cd73923e560b760061f2aa8a4bc f34414c64173e0ecb61b25dc55e116dbbcc89bee 928b5f94cdb278bb536eba552de348a4e92ef24d Get parts of two branches: $ hg debuggetbundle repo bundle -H 13c0170174366b441dc68e8e33757232fa744458 -C 700b7e19db54103633c4bf4a6a6b6d55f4d50c03 -H bac16991d12ff45f9dc43c52da1946dfadb83e80 -C d5f6e1ea452285324836a49d7d3c2a63cfed1d31 $ hg debugbundle bundle ff42371d57168345fdf1a3aac66a51f6a45d41d2 bac16991d12ff45f9dc43c52da1946dfadb83e80 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 13c0170174366b441dc68e8e33757232fa744458 Check that we get all needed file changes: $ hg debugbundle bundle --all format: id, p1, p2, cset, delta base, len(delta) changelog ff42371d57168345fdf1a3aac66a51f6a45d41d2 d5f6e1ea452285324836a49d7d3c2a63cfed1d31 0000000000000000000000000000000000000000 ff42371d57168345fdf1a3aac66a51f6a45d41d2 d5f6e1ea452285324836a49d7d3c2a63cfed1d31 99 bac16991d12ff45f9dc43c52da1946dfadb83e80 ff42371d57168345fdf1a3aac66a51f6a45d41d2 0000000000000000000000000000000000000000 bac16991d12ff45f9dc43c52da1946dfadb83e80 ff42371d57168345fdf1a3aac66a51f6a45d41d2 99 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 700b7e19db54103633c4bf4a6a6b6d55f4d50c03 0000000000000000000000000000000000000000 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 bac16991d12ff45f9dc43c52da1946dfadb83e80 102 13c0170174366b441dc68e8e33757232fa744458 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 0000000000000000000000000000000000000000 13c0170174366b441dc68e8e33757232fa744458 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 102 manifest dac7984588fc4eea7acbf39693a9c1b06f5b175d 591f732a3faf1fb903815273f3c199a514a61ccb 0000000000000000000000000000000000000000 ff42371d57168345fdf1a3aac66a51f6a45d41d2 591f732a3faf1fb903815273f3c199a514a61ccb 113 0772616e6b48a76afb6c1458e193cbb3dae2e4ff dac7984588fc4eea7acbf39693a9c1b06f5b175d 0000000000000000000000000000000000000000 bac16991d12ff45f9dc43c52da1946dfadb83e80 dac7984588fc4eea7acbf39693a9c1b06f5b175d 113 eb498cd9af6c44108e43041e951ce829e29f6c80 bff2f4817ced57b386caf7c4e3e36a4bc9af7e93 0000000000000000000000000000000000000000 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 0772616e6b48a76afb6c1458e193cbb3dae2e4ff 295 b15709c071ddd2d93188508ba156196ab4f19620 eb498cd9af6c44108e43041e951ce829e29f6c80 0000000000000000000000000000000000000000 13c0170174366b441dc68e8e33757232fa744458 eb498cd9af6c44108e43041e951ce829e29f6c80 114 mf 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 301ca08d026bb72cb4258a9d211bdf7ca0bcd810 0000000000000000000000000000000000000000 ff42371d57168345fdf1a3aac66a51f6a45d41d2 301ca08d026bb72cb4258a9d211bdf7ca0bcd810 17 c7b583de053293870e145f45bd2d61643563fd06 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 0000000000000000000000000000000000000000 bac16991d12ff45f9dc43c52da1946dfadb83e80 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 18 266ee3c0302a5a18f1cf96817ac79a51836179e9 edc0f6b8db80d68ae6aff2b19f7e5347ab68fa63 0000000000000000000000000000000000000000 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 c7b583de053293870e145f45bd2d61643563fd06 149 698c6a36220548cd3903ca7dada27c59aa500c52 266ee3c0302a5a18f1cf96817ac79a51836179e9 0000000000000000000000000000000000000000 13c0170174366b441dc68e8e33757232fa744458 266ee3c0302a5a18f1cf96817ac79a51836179e9 19 nf11 33fbc651630ffa7ccbebfe4eb91320a873e7291c 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 0000000000000000000000000000000000000000 16 nf12 ddce0544363f037e9fb889faca058f52dc01c0a5 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 13c0170174366b441dc68e8e33757232fa744458 0000000000000000000000000000000000000000 16 nf4 3c1407305701051cbed9f9cb9a68bdfb5997c235 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 ff42371d57168345fdf1a3aac66a51f6a45d41d2 0000000000000000000000000000000000000000 15 nf5 0dbd89c185f53a1727c54cd1ce256482fa23968e 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 bac16991d12ff45f9dc43c52da1946dfadb83e80 0000000000000000000000000000000000000000 15 Get branch and merge: $ hg debuggetbundle repo bundle -C 7704483d56b2a7b5db54dcee7c62378ac629b348 -H 0b2f73f04880d9cb6a5cd8a757f0db0ad01e32c3 $ hg debugbundle bundle 29a4d1f17bd3f0779ca0525bebb1cfb51067c738 713346a995c363120712aed1aee7e04afd867638 d5f6e1ea452285324836a49d7d3c2a63cfed1d31 ff42371d57168345fdf1a3aac66a51f6a45d41d2 bac16991d12ff45f9dc43c52da1946dfadb83e80 6621d79f61b23ec74cf4b69464343d9e0980ec8b 8931463777131cd73923e560b760061f2aa8a4bc f34414c64173e0ecb61b25dc55e116dbbcc89bee 928b5f94cdb278bb536eba552de348a4e92ef24d 700b7e19db54103633c4bf4a6a6b6d55f4d50c03 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 13c0170174366b441dc68e8e33757232fa744458 5686dbbd9fc46cb806599c878d02fe1cb56b83d3 8365676dbab05860ce0d9110f2af51368b961bbd 0b2f73f04880d9cb6a5cd8a757f0db0ad01e32c3 = Test bundle2 = $ hg debuggetbundle repo bundle -t bundle2 $ hg debugbundle bundle Stream params: {} changegroup -- "{'version': '01'}" 7704483d56b2a7b5db54dcee7c62378ac629b348 29a4d1f17bd3f0779ca0525bebb1cfb51067c738 713346a995c363120712aed1aee7e04afd867638 d5f6e1ea452285324836a49d7d3c2a63cfed1d31 ff42371d57168345fdf1a3aac66a51f6a45d41d2 bac16991d12ff45f9dc43c52da1946dfadb83e80 6621d79f61b23ec74cf4b69464343d9e0980ec8b 8931463777131cd73923e560b760061f2aa8a4bc f34414c64173e0ecb61b25dc55e116dbbcc89bee 928b5f94cdb278bb536eba552de348a4e92ef24d 700b7e19db54103633c4bf4a6a6b6d55f4d50c03 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 13c0170174366b441dc68e8e33757232fa744458 5686dbbd9fc46cb806599c878d02fe1cb56b83d3 8365676dbab05860ce0d9110f2af51368b961bbd 0b2f73f04880d9cb6a5cd8a757f0db0ad01e32c3 4801a72e5d88cb515b0c7e40fae34180f3f837f2 10c14a2cc935e1d8c31f9e98587dcf27fb08a6da = Test via HTTP = Get everything: $ hg serve -R repo -p $HGPORT -d --pid-file=hg.pid -E error.log -A access.log $ cat hg.pid >> $DAEMON_PIDS $ hg debuggetbundle http://localhost:$HGPORT/ bundle $ hg debugbundle bundle 7704483d56b2a7b5db54dcee7c62378ac629b348 29a4d1f17bd3f0779ca0525bebb1cfb51067c738 713346a995c363120712aed1aee7e04afd867638 d5f6e1ea452285324836a49d7d3c2a63cfed1d31 ff42371d57168345fdf1a3aac66a51f6a45d41d2 bac16991d12ff45f9dc43c52da1946dfadb83e80 6621d79f61b23ec74cf4b69464343d9e0980ec8b 8931463777131cd73923e560b760061f2aa8a4bc f34414c64173e0ecb61b25dc55e116dbbcc89bee 928b5f94cdb278bb536eba552de348a4e92ef24d 700b7e19db54103633c4bf4a6a6b6d55f4d50c03 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 13c0170174366b441dc68e8e33757232fa744458 5686dbbd9fc46cb806599c878d02fe1cb56b83d3 8365676dbab05860ce0d9110f2af51368b961bbd 0b2f73f04880d9cb6a5cd8a757f0db0ad01e32c3 4801a72e5d88cb515b0c7e40fae34180f3f837f2 10c14a2cc935e1d8c31f9e98587dcf27fb08a6da Get parts of two branches: $ hg debuggetbundle http://localhost:$HGPORT/ bundle -H 13c0170174366b441dc68e8e33757232fa744458 -C 700b7e19db54103633c4bf4a6a6b6d55f4d50c03 -H bac16991d12ff45f9dc43c52da1946dfadb83e80 -C d5f6e1ea452285324836a49d7d3c2a63cfed1d31 $ hg debugbundle bundle ff42371d57168345fdf1a3aac66a51f6a45d41d2 bac16991d12ff45f9dc43c52da1946dfadb83e80 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 13c0170174366b441dc68e8e33757232fa744458 Check that we get all needed file changes: $ hg debugbundle bundle --all format: id, p1, p2, cset, delta base, len(delta) changelog ff42371d57168345fdf1a3aac66a51f6a45d41d2 d5f6e1ea452285324836a49d7d3c2a63cfed1d31 0000000000000000000000000000000000000000 ff42371d57168345fdf1a3aac66a51f6a45d41d2 d5f6e1ea452285324836a49d7d3c2a63cfed1d31 99 bac16991d12ff45f9dc43c52da1946dfadb83e80 ff42371d57168345fdf1a3aac66a51f6a45d41d2 0000000000000000000000000000000000000000 bac16991d12ff45f9dc43c52da1946dfadb83e80 ff42371d57168345fdf1a3aac66a51f6a45d41d2 99 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 700b7e19db54103633c4bf4a6a6b6d55f4d50c03 0000000000000000000000000000000000000000 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 bac16991d12ff45f9dc43c52da1946dfadb83e80 102 13c0170174366b441dc68e8e33757232fa744458 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 0000000000000000000000000000000000000000 13c0170174366b441dc68e8e33757232fa744458 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 102 manifest dac7984588fc4eea7acbf39693a9c1b06f5b175d 591f732a3faf1fb903815273f3c199a514a61ccb 0000000000000000000000000000000000000000 ff42371d57168345fdf1a3aac66a51f6a45d41d2 591f732a3faf1fb903815273f3c199a514a61ccb 113 0772616e6b48a76afb6c1458e193cbb3dae2e4ff dac7984588fc4eea7acbf39693a9c1b06f5b175d 0000000000000000000000000000000000000000 bac16991d12ff45f9dc43c52da1946dfadb83e80 dac7984588fc4eea7acbf39693a9c1b06f5b175d 113 eb498cd9af6c44108e43041e951ce829e29f6c80 bff2f4817ced57b386caf7c4e3e36a4bc9af7e93 0000000000000000000000000000000000000000 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 0772616e6b48a76afb6c1458e193cbb3dae2e4ff 295 b15709c071ddd2d93188508ba156196ab4f19620 eb498cd9af6c44108e43041e951ce829e29f6c80 0000000000000000000000000000000000000000 13c0170174366b441dc68e8e33757232fa744458 eb498cd9af6c44108e43041e951ce829e29f6c80 114 mf 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 301ca08d026bb72cb4258a9d211bdf7ca0bcd810 0000000000000000000000000000000000000000 ff42371d57168345fdf1a3aac66a51f6a45d41d2 301ca08d026bb72cb4258a9d211bdf7ca0bcd810 17 c7b583de053293870e145f45bd2d61643563fd06 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 0000000000000000000000000000000000000000 bac16991d12ff45f9dc43c52da1946dfadb83e80 4f73f97080266ab8e0c0561ca8d0da3eaf65b695 18 266ee3c0302a5a18f1cf96817ac79a51836179e9 edc0f6b8db80d68ae6aff2b19f7e5347ab68fa63 0000000000000000000000000000000000000000 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 c7b583de053293870e145f45bd2d61643563fd06 149 698c6a36220548cd3903ca7dada27c59aa500c52 266ee3c0302a5a18f1cf96817ac79a51836179e9 0000000000000000000000000000000000000000 13c0170174366b441dc68e8e33757232fa744458 266ee3c0302a5a18f1cf96817ac79a51836179e9 19 nf11 33fbc651630ffa7ccbebfe4eb91320a873e7291c 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 63476832d8ec6558cf9bbe3cbe0c757e5cf18043 0000000000000000000000000000000000000000 16 nf12 ddce0544363f037e9fb889faca058f52dc01c0a5 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 13c0170174366b441dc68e8e33757232fa744458 0000000000000000000000000000000000000000 16 nf4 3c1407305701051cbed9f9cb9a68bdfb5997c235 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 ff42371d57168345fdf1a3aac66a51f6a45d41d2 0000000000000000000000000000000000000000 15 nf5 0dbd89c185f53a1727c54cd1ce256482fa23968e 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 bac16991d12ff45f9dc43c52da1946dfadb83e80 0000000000000000000000000000000000000000 15 Verify we hit the HTTP server: $ cat access.log * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:common=700b7e19db54103633c4bf4a6a6b6d55f4d50c03+d5f6e1ea452285324836a49d7d3c2a63cfed1d31&heads=13c0170174366b441dc68e8e33757232fa744458+bac16991d12ff45f9dc43c52da1946dfadb83e80 (glob) $ cat error.log mercurial-3.7.3/tests/test-committer.t0000644000175000017500000000503012676531525017434 0ustar mpmmpm00000000000000 $ unset HGUSER $ EMAIL="My Name " $ export EMAIL $ hg init test $ cd test $ touch asdf $ hg add asdf $ hg commit -m commit-1 $ hg tip changeset: 0:53f268a58230 tag: tip user: My Name date: Thu Jan 01 00:00:00 1970 +0000 summary: commit-1 $ unset EMAIL $ echo 1234 > asdf $ hg commit -u "foo@bar.com" -m commit-1 $ hg tip changeset: 1:3871b2a9e9bf tag: tip user: foo@bar.com date: Thu Jan 01 00:00:00 1970 +0000 summary: commit-1 $ echo "[ui]" >> .hg/hgrc $ echo "username = foobar " >> .hg/hgrc $ echo 12 > asdf $ hg commit -m commit-1 $ hg tip changeset: 2:8eeac6695c1c tag: tip user: foobar date: Thu Jan 01 00:00:00 1970 +0000 summary: commit-1 $ echo 1 > asdf $ hg commit -u "foo@bar.com" -m commit-1 $ hg tip changeset: 3:957606a725e4 tag: tip user: foo@bar.com date: Thu Jan 01 00:00:00 1970 +0000 summary: commit-1 $ echo 123 > asdf $ echo "[ui]" > .hg/hgrc $ echo "username = " >> .hg/hgrc $ hg commit -m commit-1 abort: no username supplied (use "hg config --edit" to set your username) [255] # test alternate config var $ echo 1234 > asdf $ echo "[ui]" > .hg/hgrc $ echo "user = Foo Bar II " >> .hg/hgrc $ hg commit -m commit-1 $ hg tip changeset: 4:6f24bfb4c617 tag: tip user: Foo Bar II date: Thu Jan 01 00:00:00 1970 +0000 summary: commit-1 # test prompt username $ cat > .hg/hgrc < [ui] > askusername = True > EOF $ echo 12345 > asdf $ hg commit --config ui.interactive=False -m ask enter a commit username: no username found, using '[^']*' instead (re) $ hg rollback -q $ hg commit --config ui.interactive=True -m ask < Asked User > EOF enter a commit username: Asked User $ hg tip changeset: 5:84c91d963b70 tag: tip user: Asked User date: Thu Jan 01 00:00:00 1970 +0000 summary: ask # test no .hg/hgrc (uses generated non-interactive username) $ echo space > asdf $ rm .hg/hgrc $ hg commit -m commit-1 2>&1 no username found, using '[^']*' instead (re) $ echo space2 > asdf $ hg commit -u ' ' -m commit-1 transaction abort! rollback completed abort: empty username! [255] # don't add tests here, previous test is unstable $ cd .. mercurial-3.7.3/tests/get-with-headers.py0000755000175000017500000000420212676531525020005 0ustar mpmmpm00000000000000#!/usr/bin/env python """This does HTTP GET requests given a host:port and path and returns a subset of the headers plus the body of the result.""" from __future__ import absolute_import import httplib import json import os import sys try: import msvcrt msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY) except ImportError: pass twice = False if '--twice' in sys.argv: sys.argv.remove('--twice') twice = True headeronly = False if '--headeronly' in sys.argv: sys.argv.remove('--headeronly') headeronly = True formatjson = False if '--json' in sys.argv: sys.argv.remove('--json') formatjson = True tag = None def request(host, path, show): assert not path.startswith('/'), path global tag headers = {} if tag: headers['If-None-Match'] = tag conn = httplib.HTTPConnection(host) conn.request("GET", '/' + path, None, headers) response = conn.getresponse() print response.status, response.reason if show[:1] == ['-']: show = sorted(h for h, v in response.getheaders() if h.lower() not in show) for h in [h.lower() for h in show]: if response.getheader(h, None) is not None: print "%s: %s" % (h, response.getheader(h)) if not headeronly: print data = response.read() # Pretty print JSON. This also has the beneficial side-effect # of verifying emitted JSON is well-formed. if formatjson: # json.dumps() will print trailing newlines. Eliminate them # to make tests easier to write. data = json.loads(data) lines = json.dumps(data, sort_keys=True, indent=2).splitlines() for line in lines: print line.rstrip() else: sys.stdout.write(data) if twice and response.getheader('ETag', None): tag = response.getheader('ETag') return response.status status = request(sys.argv[1], sys.argv[2], sys.argv[3:]) if twice: status = request(sys.argv[1], sys.argv[2], sys.argv[3:]) if 200 <= status <= 305: sys.exit(0) sys.exit(1) mercurial-3.7.3/tests/test-addremove-similar.t0000644000175000017500000000410612676531525021040 0ustar mpmmpm00000000000000 $ hg init rep; cd rep $ touch empty-file $ $PYTHON -c 'for x in range(10000): print x' > large-file $ hg addremove adding empty-file adding large-file $ hg commit -m A $ rm large-file empty-file $ $PYTHON -c 'for x in range(10,10000): print x' > another-file $ hg addremove -s50 adding another-file removing empty-file removing large-file recording removal of large-file as rename to another-file (99% similar) $ hg commit -m B comparing two empty files caused ZeroDivisionError in the past $ hg update -C 0 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ rm empty-file $ touch another-empty-file $ hg addremove -s50 adding another-empty-file removing empty-file $ cd .. $ hg init rep2; cd rep2 $ $PYTHON -c 'for x in range(10000): print x' > large-file $ $PYTHON -c 'for x in range(50): print x' > tiny-file $ hg addremove adding large-file adding tiny-file $ hg commit -m A $ $PYTHON -c 'for x in range(70): print x' > small-file $ rm tiny-file $ rm large-file $ hg addremove -s50 removing large-file adding small-file removing tiny-file recording removal of tiny-file as rename to small-file (82% similar) $ hg commit -m B should all fail $ hg addremove -s foo abort: similarity must be a number [255] $ hg addremove -s -1 abort: similarity must be between 0 and 100 [255] $ hg addremove -s 1e6 abort: similarity must be between 0 and 100 [255] $ cd .. Issue1527: repeated addremove causes Abort $ hg init rep3; cd rep3 $ mkdir d $ echo a > d/a $ hg add d/a $ hg commit -m 1 $ mv d/a d/b $ hg addremove -s80 removing d/a adding d/b recording removal of d/a as rename to d/b (100% similar) (glob) $ hg debugstate r 0 0 1970-01-01 00:00:00 d/a a 0 -1 unset d/b copy: d/a -> d/b $ mv d/b c no copies found here (since the target isn't in d $ hg addremove -s80 d removing d/b (glob) copies here $ hg addremove -s80 adding c recording removal of d/a as rename to c (100% similar) (glob) $ cd .. mercurial-3.7.3/tests/test-patch-offset.t0000644000175000017500000000320612676531525020017 0ustar mpmmpm00000000000000 $ cat > writepatterns.py < import sys > > path = sys.argv[1] > patterns = sys.argv[2:] > > fp = file(path, 'wb') > for pattern in patterns: > count = int(pattern[0:-1]) > char = pattern[-1] + '\n' > fp.write(char*count) > fp.close() > EOF prepare repo $ hg init a $ cd a These initial lines of Xs were not in the original file used to generate the patch. So all the patch hunks need to be applied to a constant offset within this file. If the offset isn't tracked then the hunks can be applied to the wrong lines of this file. $ python ../writepatterns.py a 34X 10A 1B 10A 1C 10A 1B 10A 1D 10A 1B 10A 1E 10A 1B 10A $ hg commit -Am adda adding a This is a cleaner patch generated via diff In this case it reproduces the problem when the output of hg export does not import patch $ hg import -v -m 'b' -d '2 0' - < --- a/a 2009-12-08 19:26:17.000000000 -0800 > +++ b/a 2009-12-08 19:26:17.000000000 -0800 > @@ -9,7 +9,7 @@ > A > A > B > -A > +a > A > A > A > @@ -53,7 +53,7 @@ > A > A > B > -A > +a > A > A > A > @@ -75,7 +75,7 @@ > A > A > B > -A > +a > A > A > A > EOF applying patch from stdin patching file a Hunk #1 succeeded at 43 (offset 34 lines). Hunk #2 succeeded at 87 (offset 34 lines). Hunk #3 succeeded at 109 (offset 34 lines). committing files: a committing manifest committing changelog created 189885cecb41 compare imported changes against reference file $ python ../writepatterns.py aref 34X 10A 1B 1a 9A 1C 10A 1B 10A 1D 10A 1B 1a 9A 1E 10A 1B 1a 9A $ diff aref a $ cd .. mercurial-3.7.3/tests/test-abort-checkin.t0000644000175000017500000000154612676531525020152 0ustar mpmmpm00000000000000 $ cat > abortcommit.py < from mercurial import error > def hook(**args): > raise error.Abort("no commits allowed") > def reposetup(ui, repo): > repo.ui.setconfig("hooks", "pretxncommit.nocommits", hook) > EOF $ abspath=`pwd`/abortcommit.py $ cat <> $HGRCPATH > [extensions] > mq = > abortcommit = $abspath > EOF $ hg init foo $ cd foo $ echo foo > foo $ hg add foo mq may keep a reference to the repository so __del__ will not be called and .hg/journal.dirstate will not be deleted: $ hg ci -m foo error: pretxncommit.nocommits hook failed: no commits allowed transaction abort! rollback completed abort: no commits allowed [255] $ hg ci -m foo error: pretxncommit.nocommits hook failed: no commits allowed transaction abort! rollback completed abort: no commits allowed [255] $ cd .. mercurial-3.7.3/tests/test-mq-qimport-fail-cleanup.t0000644000175000017500000000140412676531525022076 0ustar mpmmpm00000000000000Failed qimport of patches from files should cleanup by recording successfully imported patches in series file. $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg init repo $ cd repo $ echo a > a $ hg ci -Am'add a' adding a $ cat >b.patch< diff --git a/a b/a > --- a/a > +++ b/a > @@ -1,1 +1,2 @@ > a > +b > EOF empty series $ hg qseries qimport valid patch followed by invalid patch $ hg qimport b.patch fakepatch adding b.patch to series file abort: unable to read file fakepatch [255] valid patches before fail added to series $ hg qseries b.patch $ hg pull -q -r 0 . # update phase $ hg qimport -r 0 abort: revision 0 is not mutable (see "hg help phases" for details) [255] $ cd .. mercurial-3.7.3/tests/svnxml.py0000644000175000017500000000322712676531525016176 0ustar mpmmpm00000000000000# Read the output of a "svn log --xml" command on stdin, parse it and # print a subset of attributes common to all svn versions tested by # hg. import xml.dom.minidom, sys def xmltext(e): return ''.join(c.data for c in e.childNodes if c.nodeType == c.TEXT_NODE) def parseentry(entry): e = {} e['revision'] = entry.getAttribute('revision') e['author'] = xmltext(entry.getElementsByTagName('author')[0]) e['msg'] = xmltext(entry.getElementsByTagName('msg')[0]) e['paths'] = [] paths = entry.getElementsByTagName('paths') if paths: paths = paths[0] for p in paths.getElementsByTagName('path'): action = p.getAttribute('action') path = xmltext(p) frompath = p.getAttribute('copyfrom-path') fromrev = p.getAttribute('copyfrom-rev') e['paths'].append((path, action, frompath, fromrev)) return e def parselog(data): entries = [] doc = xml.dom.minidom.parseString(data) for e in doc.getElementsByTagName('logentry'): entries.append(parseentry(e)) return entries def printentries(entries): fp = sys.stdout for e in entries: for k in ('revision', 'author', 'msg'): fp.write(('%s: %s\n' % (k, e[k])).encode('utf-8')) for path, action, fpath, frev in sorted(e['paths']): frominfo = '' if frev: frominfo = ' (from %s@%s)' % (fpath, frev) p = ' %s %s%s\n' % (action, path, frominfo) fp.write(p.encode('utf-8')) if __name__ == '__main__': data = sys.stdin.read() entries = parselog(data) printentries(entries) mercurial-3.7.3/tests/test-1102.t0000644000175000017500000000054212676531525016017 0ustar mpmmpm00000000000000 $ rm -rf a $ hg init a $ cd a $ echo a > a $ hg ci -Am0 adding a $ hg tag t1 # 1 $ hg tag --remove t1 # 2 $ hg co 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg tag -f -r0 t1 $ hg tags tip 3:a49829c4fc11 t1 0:f7b1eb17ad24 $ cd .. mercurial-3.7.3/tests/test-convert-bzr-merges.t0000644000175000017500000000343512676531525021173 0ustar mpmmpm00000000000000#require bzr N.B. bzr 1.13 has a bug that breaks this test. If you see this test fail, check your bzr version. Upgrading to bzr 1.13.1 should fix it. $ . "$TESTDIR/bzr-definitions" test multiple merges at once $ mkdir test-multimerge $ cd test-multimerge $ bzr init -q source $ cd source $ echo content > file $ bzr add -q file $ bzr commit -q -m 'Initial add' $ cd .. $ bzr branch -q source source-branch1 $ cd source-branch1 $ echo morecontent >> file $ echo evenmorecontent > file-branch1 $ bzr add -q file-branch1 $ bzr commit -q -m 'Added branch1 file' $ cd ../source $ sleep 1 $ echo content > file-parent $ bzr add -q file-parent $ bzr commit -q -m 'Added parent file' $ cd .. $ bzr branch -q source source-branch2 $ cd source-branch2 $ echo somecontent > file-branch2 $ bzr add -q file-branch2 $ bzr commit -q -m 'Added brach2 file' $ sleep 1 $ cd ../source $ bzr merge -q ../source-branch1 $ bzr merge -q --force ../source-branch2 $ bzr commit -q -m 'Merged branches' $ cd .. $ hg convert --datesort source source-hg initializing destination source-hg repository scanning source... sorting... converting... 4 Initial add 3 Added branch1 file 2 Added parent file 1 Added brach2 file 0 Merged branches $ glog -R source-hg o 5@source "(octopus merge fixup)" files: |\ | o 4@source "Merged branches" files: file-branch2 | |\ o---+ 3@source-branch2 "Added brach2 file" files: file-branch2 / / | o 2@source "Added parent file" files: file-parent | | o | 1@source-branch1 "Added branch1 file" files: file file-branch1 |/ o 0@source "Initial add" files: file $ manifest source-hg tip % manifest of tip 644 file 644 file-branch1 644 file-branch2 644 file-parent $ cd .. mercurial-3.7.3/tests/test-issue619.t0000644000175000017500000000135512676531525017027 0ustar mpmmpm00000000000000https://bz.mercurial-scm.org/619 $ hg init $ echo a > a $ hg ci -Ama adding a $ echo b > b $ hg branch b marked working directory as branch b (branches are permanent and global, did you want a bookmark?) $ hg ci -Amb adding b $ hg co -C 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved Fast-forward: $ hg merge b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -Ammerge Bogus fast-forward should fail: $ hg merge b abort: merging with a working directory ancestor has no effect [255] Even with strange revset (issue4465) $ hg merge ::. abort: merging with a working directory ancestor has no effect [255] mercurial-3.7.3/tests/test-trusted.py0000644000175000017500000001332112676531525017312 0ustar mpmmpm00000000000000# Since it's not easy to write a test that portably deals # with files from different users/groups, we cheat a bit by # monkey-patching some functions in the util module import os from mercurial import ui, util, error hgrc = os.environ['HGRCPATH'] f = open(hgrc) basehgrc = f.read() f.close() def testui(user='foo', group='bar', tusers=(), tgroups=(), cuser='foo', cgroup='bar', debug=False, silent=False, report=True): # user, group => owners of the file # tusers, tgroups => trusted users/groups # cuser, cgroup => user/group of the current process # write a global hgrc with the list of trusted users/groups and # some setting so that we can be sure it was read f = open(hgrc, 'w') f.write(basehgrc) f.write('\n[paths]\n') f.write('global = /some/path\n\n') if tusers or tgroups: f.write('[trusted]\n') if tusers: f.write('users = %s\n' % ', '.join(tusers)) if tgroups: f.write('groups = %s\n' % ', '.join(tgroups)) f.close() # override the functions that give names to uids and gids def username(uid=None): if uid is None: return cuser return user util.username = username def groupname(gid=None): if gid is None: return 'bar' return group util.groupname = groupname def isowner(st): return user == cuser util.isowner = isowner # try to read everything #print '# File belongs to user %s, group %s' % (user, group) #print '# trusted users = %s; trusted groups = %s' % (tusers, tgroups) kind = ('different', 'same') who = ('', 'user', 'group', 'user and the group') trusted = who[(user in tusers) + 2*(group in tgroups)] if trusted: trusted = ', but we trust the ' + trusted print '# %s user, %s group%s' % (kind[user == cuser], kind[group == cgroup], trusted) u = ui.ui() u.setconfig('ui', 'debug', str(bool(debug))) u.setconfig('ui', 'report_untrusted', str(bool(report))) u.readconfig('.hg/hgrc') if silent: return u print 'trusted' for name, path in u.configitems('paths'): print ' ', name, '=', path print 'untrusted' for name, path in u.configitems('paths', untrusted=True): print '.', u.config('paths', name) # warning with debug=True print '.', u.config('paths', name, untrusted=True) # no warnings print name, '=', path print return u os.mkdir('repo') os.chdir('repo') os.mkdir('.hg') f = open('.hg/hgrc', 'w') f.write('[paths]\n') f.write('local = /another/path\n\n') f.close() #print '# Everything is run by user foo, group bar\n' # same user, same group testui() # same user, different group testui(group='def') # different user, same group testui(user='abc') # ... but we trust the group testui(user='abc', tgroups=['bar']) # different user, different group testui(user='abc', group='def') # ... but we trust the user testui(user='abc', group='def', tusers=['abc']) # ... but we trust the group testui(user='abc', group='def', tgroups=['def']) # ... but we trust the user and the group testui(user='abc', group='def', tusers=['abc'], tgroups=['def']) # ... but we trust all users print '# we trust all users' testui(user='abc', group='def', tusers=['*']) # ... but we trust all groups print '# we trust all groups' testui(user='abc', group='def', tgroups=['*']) # ... but we trust the whole universe print '# we trust all users and groups' testui(user='abc', group='def', tusers=['*'], tgroups=['*']) # ... check that users and groups are in different namespaces print "# we don't get confused by users and groups with the same name" testui(user='abc', group='def', tusers=['def'], tgroups=['abc']) # ... lists of user names work print "# list of user names" testui(user='abc', group='def', tusers=['foo', 'xyz', 'abc', 'bleh'], tgroups=['bar', 'baz', 'qux']) # ... lists of group names work print "# list of group names" testui(user='abc', group='def', tusers=['foo', 'xyz', 'bleh'], tgroups=['bar', 'def', 'baz', 'qux']) print "# Can't figure out the name of the user running this process" testui(user='abc', group='def', cuser=None) print "# prints debug warnings" u = testui(user='abc', group='def', cuser='foo', debug=True) print "# report_untrusted enabled without debug hides warnings" u = testui(user='abc', group='def', cuser='foo', report=False) print "# report_untrusted enabled with debug shows warnings" u = testui(user='abc', group='def', cuser='foo', debug=True, report=False) print "# ui.readconfig sections" filename = 'foobar' f = open(filename, 'w') f.write('[foobar]\n') f.write('baz = quux\n') f.close() u.readconfig(filename, sections=['foobar']) print u.config('foobar', 'baz') print print "# read trusted, untrusted, new ui, trusted" u = ui.ui() u.setconfig('ui', 'debug', 'on') u.readconfig(filename) u2 = u.copy() def username(uid=None): return 'foo' util.username = username u2.readconfig('.hg/hgrc') print 'trusted:' print u2.config('foobar', 'baz') print 'untrusted:' print u2.config('foobar', 'baz', untrusted=True) print print "# error handling" def assertraises(f, exc=error.Abort): try: f() except exc as inst: print 'raised', inst.__class__.__name__ else: print 'no exception?!' print "# file doesn't exist" os.unlink('.hg/hgrc') assert not os.path.exists('.hg/hgrc') testui(debug=True, silent=True) testui(user='abc', group='def', debug=True, silent=True) print print "# parse error" f = open('.hg/hgrc', 'w') f.write('foo') f.close() try: testui(user='abc', group='def', silent=True) except error.ParseError as inst: print inst try: testui(debug=True, silent=True) except error.ParseError as inst: print inst mercurial-3.7.3/tests/test-obsolete-changeset-exchange.t0000644000175000017500000001113512676531525022767 0ustar mpmmpm00000000000000Test changesets filtering during exchanges (some tests are still in test-obsolete.t) $ cat >> $HGRCPATH << EOF > [experimental] > evolution=createmarkers > EOF Push does not corrupt remote ---------------------------- Create a DAG where a changeset reuses a revision from a file first used in an extinct changeset. $ hg init local $ cd local $ echo 'base' > base $ hg commit -Am base adding base $ echo 'A' > A $ hg commit -Am A adding A $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg revert -ar 1 adding A $ hg commit -Am "A'" created new head $ hg log -G --template='{desc} {node}' @ A' f89bcc95eba5174b1ccc3e33a82e84c96e8338ee | | o A 9d73aac1b2ed7d53835eaeec212ed41ea47da53a |/ o base d20a80d4def38df63a4b330b7fb688f3d4cae1e3 $ hg debugobsolete 9d73aac1b2ed7d53835eaeec212ed41ea47da53a f89bcc95eba5174b1ccc3e33a82e84c96e8338ee Push it. The bundle should not refer to the extinct changeset. $ hg init ../other $ hg push ../other pushing to ../other searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files $ hg -R ../other verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 2 changesets, 2 total revisions Adding a changeset going extinct locally ------------------------------------------ Pull a changeset that will immediatly goes extinct (because you already have a marker to obsolete him) (test resolution of issue3788) $ hg phase --draft --force f89bcc95eba5 $ hg phase -R ../other --draft --force f89bcc95eba5 $ hg commit --amend -m "A''" $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5 $ hg pull ../other pulling from ../other searching for changes adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) check that bundle is not affected $ hg bundle --hidden --rev f89bcc95eba5 --base "f89bcc95eba5^" ../f89bcc95eba5.hg 1 changesets found $ hg --hidden --config extensions.mq= strip --no-backup f89bcc95eba5 $ hg unbundle ../f89bcc95eba5.hg adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 1 files (+1 heads) (run 'hg heads' to see heads) $ cd .. pull does not fetch excessive changesets when common node is hidden (issue4982) ------------------------------------------------------------------------------- initial repo with server and client matching $ hg init pull-hidden-common $ cd pull-hidden-common $ touch foo $ hg -q commit -A -m initial $ echo 1 > foo $ hg commit -m 1 $ echo 2a > foo $ hg commit -m 2a $ cd .. $ hg clone --pull pull-hidden-common pull-hidden-common-client requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved server obsoletes the old head $ cd pull-hidden-common $ hg -q up -r 1 $ echo 2b > foo $ hg -q commit -m 2b $ hg debugobsolete 6a29ed9c68defff1a139e5c6fa9696fb1a75783d bec0734cd68e84477ba7fc1d13e6cff53ab70129 $ cd .. client only pulls down 1 changeset $ cd pull-hidden-common-client $ hg pull --debug pulling from $TESTTMP/pull-hidden-common (glob) query 1; heads searching for changes taking quick initial sample query 2; still undecided: 2, sample size is: 2 2 total queries 1 changesets found list of changesets: bec0734cd68e84477ba7fc1d13e6cff53ab70129 listing keys for "phase" listing keys for "bookmarks" bundle2-output-bundle: "HG20", 3 parts total bundle2-output-part: "changegroup" (params: 1 mandatory 1 advisory) streamed payload bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload bundle2-output-part: "listkeys" (params: 1 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "changegroup" (params: 1 mandatory 1 advisory) supported adding changesets add changeset bec0734cd68e adding manifests adding file changes adding foo revisions added 1 changesets with 1 changes to 1 files (+1 heads) bundle2-input-part: total payload size 474 bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-bundle: 2 parts total checking for updated bookmarks listing keys for "phases" updating the branch cache (run 'hg heads' to see heads, 'hg merge' to merge) mercurial-3.7.3/tests/test-walkrepo.py0000644000175000017500000000360612676531525017451 0ustar mpmmpm00000000000000from __future__ import absolute_import import os from mercurial import ( hg, scmutil, ui, util, ) chdir = os.chdir mkdir = os.mkdir pjoin = os.path.join walkrepos = scmutil.walkrepos checklink = util.checklink u = ui.ui() sym = checklink('.') hg.repository(u, 'top1', create=1) mkdir('subdir') chdir('subdir') hg.repository(u, 'sub1', create=1) mkdir('subsubdir') chdir('subsubdir') hg.repository(u, 'subsub1', create=1) chdir(os.path.pardir) if sym: os.symlink(os.path.pardir, 'circle') os.symlink(pjoin('subsubdir', 'subsub1'), 'subsub1') def runtest(): reposet = frozenset(walkrepos('.', followsym=True)) if sym and (len(reposet) != 3): print "reposet = %r" % (reposet,) print ("Found %d repositories when I should have found 3" % (len(reposet),)) if (not sym) and (len(reposet) != 2): print "reposet = %r" % (reposet,) print ("Found %d repositories when I should have found 2" % (len(reposet),)) sub1set = frozenset((pjoin('.', 'sub1'), pjoin('.', 'circle', 'subdir', 'sub1'))) if len(sub1set & reposet) != 1: print "sub1set = %r" % (sub1set,) print "reposet = %r" % (reposet,) print "sub1set and reposet should have exactly one path in common." sub2set = frozenset((pjoin('.', 'subsub1'), pjoin('.', 'subsubdir', 'subsub1'))) if len(sub2set & reposet) != 1: print "sub2set = %r" % (sub2set,) print "reposet = %r" % (reposet,) print "sub2set and reposet should have exactly one path in common." sub3 = pjoin('.', 'circle', 'top1') if sym and sub3 not in reposet: print "reposet = %r" % (reposet,) print "Symbolic links are supported and %s is not in reposet" % (sub3,) runtest() if sym: # Simulate not having symlinks. del os.path.samestat sym = False runtest() mercurial-3.7.3/tests/test-strip-cross.t0000644000175000017500000001247212676531525017731 0ustar mpmmpm00000000000000test stripping of filelogs where the linkrev doesn't always increase $ echo '[extensions]' >> $HGRCPATH $ echo 'strip =' >> $HGRCPATH $ hg init orig $ cd orig $ commit() > { > hg up -qC null > count=1 > for i in "$@"; do > for f in $i; do > echo $count > $f > done > count=`expr $count + 1` > done > hg commit -qAm "$*" > } 2 1 0 2 0 1 2 $ commit '201 210' $ commit '102 120' '210' $ commit '021' $ commit '201' '021 120' $ commit '012 021' '102 201' '120 210' $ commit 'manifest-file' $ commit '102 120' '012 210' '021 201' $ commit '201 210' '021 120' '012 102' $ HGUSER=another-user; export HGUSER $ commit 'manifest-file' $ commit '012' 'manifest-file' $ cd .. $ hg clone -q -U -r -1 -r -2 -r -3 -r -4 -r -6 orig crossed $ cd crossed $ hg debugindex --manifest rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 112 ..... 0 6f105cbb914d 000000000000 000000000000 (re) 1 112 56 ..... 3 1b55917b3699 000000000000 000000000000 (re) 2 168 123 ..... 1 8f3d04e263e5 000000000000 000000000000 (re) 3 291 122 ..... 2 f0ef8726ac4f 000000000000 000000000000 (re) 4 413 87 ..... 4 0b76e38b4070 000000000000 000000000000 (re) $ for i in 012 021 102 120 201 210 manifest-file; do > echo $i > hg debugindex $i > echo > done 012 rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 ..... 0 b8e02f643373 000000000000 000000000000 (re) 1 3 3 ..... 1 5d9299349fc0 000000000000 000000000000 (re) 2 6 3 ..... 2 2661d26c6496 000000000000 000000000000 (re) 021 rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 ..... 0 b8e02f643373 000000000000 000000000000 (re) 1 3 3 ..... 2 5d9299349fc0 000000000000 000000000000 (re) 2 6 3 ..... 1 2661d26c6496 000000000000 000000000000 (re) 102 rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 ..... 1 b8e02f643373 000000000000 000000000000 (re) 1 3 3 ..... 0 5d9299349fc0 000000000000 000000000000 (re) 2 6 3 ..... 2 2661d26c6496 000000000000 000000000000 (re) 120 rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 ..... 1 b8e02f643373 000000000000 000000000000 (re) 1 3 3 ..... 2 5d9299349fc0 000000000000 000000000000 (re) 2 6 3 ..... 0 2661d26c6496 000000000000 000000000000 (re) 201 rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 ..... 2 b8e02f643373 000000000000 000000000000 (re) 1 3 3 ..... 0 5d9299349fc0 000000000000 000000000000 (re) 2 6 3 ..... 1 2661d26c6496 000000000000 000000000000 (re) 210 rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 ..... 2 b8e02f643373 000000000000 000000000000 (re) 1 3 3 ..... 1 5d9299349fc0 000000000000 000000000000 (re) 2 6 3 ..... 0 2661d26c6496 000000000000 000000000000 (re) manifest-file rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 ..... 3 b8e02f643373 000000000000 000000000000 (re) 1 3 3 ..... 4 5d9299349fc0 000000000000 000000000000 (re) $ cd .. $ for i in 0 1 2 3 4; do > hg clone -q -U --pull crossed $i > echo "% Trying to strip revision $i" > hg --cwd $i strip $i > echo "% Verifying" > hg --cwd $i verify > echo > done % Trying to strip revision 0 saved backup bundle to $TESTTMP/0/.hg/strip-backup/*-backup.hg (glob) % Verifying checking changesets checking manifests crosschecking files in changesets and manifests checking files 7 files, 4 changesets, 15 total revisions % Trying to strip revision 1 saved backup bundle to $TESTTMP/1/.hg/strip-backup/*-backup.hg (glob) % Verifying checking changesets checking manifests crosschecking files in changesets and manifests checking files 7 files, 4 changesets, 14 total revisions % Trying to strip revision 2 saved backup bundle to $TESTTMP/2/.hg/strip-backup/*-backup.hg (glob) % Verifying checking changesets checking manifests crosschecking files in changesets and manifests checking files 7 files, 4 changesets, 14 total revisions % Trying to strip revision 3 saved backup bundle to $TESTTMP/3/.hg/strip-backup/*-backup.hg (glob) % Verifying checking changesets checking manifests crosschecking files in changesets and manifests checking files 7 files, 4 changesets, 19 total revisions % Trying to strip revision 4 saved backup bundle to $TESTTMP/4/.hg/strip-backup/*-backup.hg (glob) % Verifying checking changesets checking manifests crosschecking files in changesets and manifests checking files 7 files, 4 changesets, 19 total revisions mercurial-3.7.3/tests/test-rebase-collapse.t0000644000175000017500000003256112676531525020503 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > mq= > > [phases] > publish=False > > [alias] > tglog = log -G --template "{rev}: '{desc}' {branches}\n" > tglogp = log -G --template "{rev}:{phase} '{desc}' {branches}\n" > EOF Create repo a: $ hg init a $ cd a $ hg unbundle "$TESTDIR/bundles/rebase.hg" adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up tip 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg tglog @ 7: 'H' | | o 6: 'G' |/| o | 5: 'F' | | | o 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ cd .. Rebasing B onto H and collapsing changesets with different phases: $ hg clone -q -u 3 a a1 $ cd a1 $ hg phase --force --secret 3 $ cat > $TESTTMP/editor.sh < echo "==== before editing" > cat \$1 > echo "====" > echo "edited manually" >> \$1 > EOF $ HGEDITOR="sh $TESTTMP/editor.sh" hg rebase --collapse --keepbranches -e rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" ==== before editing Collapsed revision * B * C * D HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: Nicolas Dumazet HG: branch 'default' HG: added B HG: added C HG: added D ==== saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglogp @ 5:secret 'Collapsed revision | * B | * C | * D | | | edited manually' o 4:draft 'H' | | o 3:draft 'G' |/| o | 2:draft 'F' | | | o 1:draft 'E' |/ o 0:draft 'A' $ hg manifest --rev tip A B C D F H $ cd .. Rebasing E onto H: $ hg clone -q -u . a a2 $ cd a2 $ hg phase --force --secret 6 $ hg rebase --source 4 --collapse rebasing 4:9520eea781bc "E" rebasing 6:eea13746799a "G" saved backup bundle to $TESTTMP/a2/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob) $ hg tglog o 6: 'Collapsed revision | * E | * G' @ 5: 'H' | o 4: 'F' | | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ hg manifest --rev tip A E F H $ cd .. Rebasing G onto H with custom message: $ hg clone -q -u . a a3 $ cd a3 $ hg rebase --base 6 -m 'custom message' abort: message can only be specified with collapse [255] $ cat > $TESTTMP/checkeditform.sh < env | grep HGEDITFORM > true > EOF $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg rebase --source 4 --collapse -m 'custom message' -e rebasing 4:9520eea781bc "E" rebasing 6:eea13746799a "G" HGEDITFORM=rebase.collapse saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob) $ hg tglog o 6: 'custom message' | @ 5: 'H' | o 4: 'F' | | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ hg manifest --rev tip A E F H $ cd .. Create repo b: $ hg init b $ cd b $ echo A > A $ hg ci -Am A adding A $ echo B > B $ hg ci -Am B adding B $ hg up -q 0 $ echo C > C $ hg ci -Am C adding C created new head $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ echo D > D $ hg ci -Am D adding D $ hg up -q 1 $ echo E > E $ hg ci -Am E adding E created new head $ echo F > F $ hg ci -Am F adding F $ hg merge 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m G $ hg up -q 0 $ echo H > H $ hg ci -Am H adding H created new head $ hg tglog @ 7: 'H' | | o 6: 'G' | |\ | | o 5: 'F' | | | | | o 4: 'E' | | | | o | 3: 'D' | |\| | o | 2: 'C' |/ / | o 1: 'B' |/ o 0: 'A' $ cd .. Rebase and collapse - more than one external (fail): $ hg clone -q -u . b b1 $ cd b1 $ hg rebase -s 2 --collapse abort: unable to collapse on top of 7, there is more than one external parent: 1, 5 [255] Rebase and collapse - E onto H: $ hg rebase -s 4 --collapse # root (4) is not a merge rebasing 4:8a5212ebc852 "E" rebasing 5:7f219660301f "F" rebasing 6:c772a8b2dc17 "G" saved backup bundle to $TESTTMP/b1/.hg/strip-backup/8a5212ebc852-75046b61-backup.hg (glob) $ hg tglog o 5: 'Collapsed revision |\ * E | | * F | | * G' | @ 4: 'H' | | o | 3: 'D' |\ \ | o | 2: 'C' | |/ o / 1: 'B' |/ o 0: 'A' $ hg manifest --rev tip A C D E F H $ cd .. Test that branchheads cache is updated correctly when doing a strip in which the parent of the ancestor node to be stripped does not become a head and also, the parent of a node that is a child of the node stripped becomes a head (node 3). The code is now much simpler and we could just test a simpler scenario We keep it the test this way in case new complexity is injected. $ hg clone -q -u . b b2 $ cd b2 $ hg heads --template="{rev}:{node} {branch}\n" 7:c65502d4178782309ce0574c5ae6ee9485a9bafa default 6:c772a8b2dc17629cec88a19d09c926c4814b12c7 default $ cat $TESTTMP/b2/.hg/cache/branch2-served c65502d4178782309ce0574c5ae6ee9485a9bafa 7 c772a8b2dc17629cec88a19d09c926c4814b12c7 o default c65502d4178782309ce0574c5ae6ee9485a9bafa o default $ hg strip 4 saved backup bundle to $TESTTMP/b2/.hg/strip-backup/8a5212ebc852-75046b61-backup.hg (glob) $ cat $TESTTMP/b2/.hg/cache/branch2-served c65502d4178782309ce0574c5ae6ee9485a9bafa 4 2870ad076e541e714f3c2bc32826b5c6a6e5b040 o default c65502d4178782309ce0574c5ae6ee9485a9bafa o default $ hg heads --template="{rev}:{node} {branch}\n" 4:c65502d4178782309ce0574c5ae6ee9485a9bafa default 3:2870ad076e541e714f3c2bc32826b5c6a6e5b040 default $ cd .. Create repo c: $ hg init c $ cd c $ echo A > A $ hg ci -Am A adding A $ echo B > B $ hg ci -Am B adding B $ hg up -q 0 $ echo C > C $ hg ci -Am C adding C created new head $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ echo D > D $ hg ci -Am D adding D $ hg up -q 1 $ echo E > E $ hg ci -Am E adding E created new head $ echo F > E $ hg ci -m 'F' $ echo G > G $ hg ci -Am G adding G $ hg merge 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m H $ hg up -q 0 $ echo I > I $ hg ci -Am I adding I created new head $ hg tglog @ 8: 'I' | | o 7: 'H' | |\ | | o 6: 'G' | | | | | o 5: 'F' | | | | | o 4: 'E' | | | | o | 3: 'D' | |\| | o | 2: 'C' |/ / | o 1: 'B' |/ o 0: 'A' $ cd .. Rebase and collapse - E onto I: $ hg clone -q -u . c c1 $ cd c1 $ hg rebase -s 4 --collapse # root (4) is not a merge rebasing 4:8a5212ebc852 "E" rebasing 5:dca5924bb570 "F" merging E rebasing 6:55a44ad28289 "G" rebasing 7:417d3b648079 "H" saved backup bundle to $TESTTMP/c1/.hg/strip-backup/8a5212ebc852-f95d0879-backup.hg (glob) $ hg tglog o 5: 'Collapsed revision |\ * E | | * F | | * G | | * H' | @ 4: 'I' | | o | 3: 'D' |\ \ | o | 2: 'C' | |/ o / 1: 'B' |/ o 0: 'A' $ hg manifest --rev tip A C D E G I $ hg up tip -q $ cat E F $ cd .. Create repo d: $ hg init d $ cd d $ echo A > A $ hg ci -Am A adding A $ echo B > B $ hg ci -Am B adding B $ echo C > C $ hg ci -Am C adding C $ hg up -q 1 $ echo D > D $ hg ci -Am D adding D created new head $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m E $ hg up -q 0 $ echo F > F $ hg ci -Am F adding F created new head $ hg tglog @ 5: 'F' | | o 4: 'E' | |\ | | o 3: 'D' | | | | o | 2: 'C' | |/ | o 1: 'B' |/ o 0: 'A' $ cd .. Rebase and collapse - B onto F: $ hg clone -q -u . d d1 $ cd d1 $ hg rebase -s 1 --collapse rebasing 1:27547f69f254 "B" rebasing 2:f838bfaca5c7 "C" rebasing 3:7bbcd6078bcc "D" rebasing 4:0a42590ed746 "E" saved backup bundle to $TESTTMP/d1/.hg/strip-backup/27547f69f254-9a3f7d92-backup.hg (glob) $ hg tglog o 2: 'Collapsed revision | * B | * C | * D | * E' @ 1: 'F' | o 0: 'A' $ hg manifest --rev tip A B C D F Interactions between collapse and keepbranches $ cd .. $ hg init e $ cd e $ echo 'a' > a $ hg ci -Am 'A' adding a $ hg branch 'one' marked working directory as branch one (branches are permanent and global, did you want a bookmark?) $ echo 'b' > b $ hg ci -Am 'B' adding b $ hg branch 'two' marked working directory as branch two $ echo 'c' > c $ hg ci -Am 'C' adding c $ hg up -q 0 $ echo 'd' > d $ hg ci -Am 'D' adding d $ hg tglog @ 3: 'D' | | o 2: 'C' two | | | o 1: 'B' one |/ o 0: 'A' $ hg rebase --keepbranches --collapse -s 1 -d 3 abort: cannot collapse multiple named branches [255] $ repeatchange() { > hg checkout $1 > hg cp d z > echo blah >> z > hg commit -Am "$2" --user "$3" > } $ repeatchange 3 "E" "user1" 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ repeatchange 3 "E" "user2" 0 files updated, 0 files merged, 1 files removed, 0 files unresolved created new head $ hg tglog @ 5: 'E' | | o 4: 'E' |/ o 3: 'D' | | o 2: 'C' two | | | o 1: 'B' one |/ o 0: 'A' $ hg rebase -s 5 -d 4 rebasing 5:fbfb97b1089a "E" (tip) note: rebase of 5:fbfb97b1089a created no changes to commit saved backup bundle to $TESTTMP/e/.hg/strip-backup/fbfb97b1089a-553e1d85-backup.hg (glob) $ hg tglog @ 4: 'E' | o 3: 'D' | | o 2: 'C' two | | | o 1: 'B' one |/ o 0: 'A' $ hg export tip # HG changeset patch # User user1 # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID f338eb3c2c7cc5b5915676a2376ba7ac558c5213 # Parent 41acb9dca9eb976e84cd21fcb756b4afa5a35c09 E diff -r 41acb9dca9eb -r f338eb3c2c7c z --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/z Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,2 @@ +d +blah $ cd .. Rebase, collapse and copies $ hg init copies $ cd copies $ hg unbundle "$TESTDIR/bundles/renames.hg" adding changesets adding manifests adding file changes added 4 changesets with 11 changes to 7 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up -q tip $ hg tglog @ 3: 'move2' | o 2: 'move1' | | o 1: 'change' |/ o 0: 'add' $ hg rebase --collapse -d 1 rebasing 2:6e7340ee38c0 "move1" merging a and d to d merging b and e to e merging c and f to f rebasing 3:338e84e2e558 "move2" (tip) merging f and c to c merging e and g to g saved backup bundle to $TESTTMP/copies/.hg/strip-backup/6e7340ee38c0-ef8ef003-backup.hg (glob) $ hg st $ hg st --copies --change tip A d a A g b R b $ hg up tip -q $ cat c c c $ cat d a a $ cat g b b $ hg log -r . --template "{file_copies}\n" d (a)g (b) Test collapsing a middle revision in-place $ hg tglog @ 2: 'Collapsed revision | * move1 | * move2' o 1: 'change' | o 0: 'add' $ hg rebase --collapse -r 1 -d 0 abort: can't remove original changesets with unrebased descendants (use --keep to keep original changesets) [255] Test collapsing in place $ hg rebase --collapse -b . -d 0 rebasing 1:1352765a01d4 "change" rebasing 2:64b456429f67 "Collapsed revision" (tip) saved backup bundle to $TESTTMP/copies/.hg/strip-backup/1352765a01d4-45a352ea-backup.hg (glob) $ hg st --change tip --copies M a M c A d a A g b R b $ hg up tip -q $ cat a a a $ cat c c c $ cat d a a $ cat g b b $ cd .. Test stripping a revision with another child $ hg init f $ cd f $ echo A > A $ hg ci -Am A adding A $ echo B > B $ hg ci -Am B adding B $ hg up -q 0 $ echo C > C $ hg ci -Am C adding C created new head $ hg tglog @ 2: 'C' | | o 1: 'B' |/ o 0: 'A' $ hg heads --template="{rev}:{node} {branch}: {desc}\n" 2:c5cefa58fd557f84b72b87f970135984337acbc5 default: C 1:27547f69f25460a52fff66ad004e58da7ad3fb56 default: B $ hg strip 2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved saved backup bundle to $TESTTMP/f/.hg/strip-backup/c5cefa58fd55-629429f4-backup.hg (glob) $ hg tglog o 1: 'B' | @ 0: 'A' $ hg heads --template="{rev}:{node} {branch}: {desc}\n" 1:27547f69f25460a52fff66ad004e58da7ad3fb56 default: B $ cd .. Test collapsing changes that add then remove a file $ hg init collapseaddremove $ cd collapseaddremove $ touch base $ hg commit -Am base adding base $ touch a $ hg commit -Am a adding a $ hg rm a $ touch b $ hg commit -Am b adding b $ hg book foo $ hg rebase -d 0 -r "1::2" --collapse -m collapsed rebasing 1:6d8d9f24eec3 "a" rebasing 2:1cc73eca5ecc "b" (tip foo) saved backup bundle to $TESTTMP/collapseaddremove/.hg/strip-backup/6d8d9f24eec3-77d3b6e2-backup.hg (glob) $ hg log -G --template "{rev}: '{desc}' {bookmarks}" @ 1: 'collapsed' foo | o 0: 'base' $ hg manifest --rev tip b base $ cd .. mercurial-3.7.3/tests/test-hgweb-removed.t0000644000175000017500000001602512676531525020172 0ustar mpmmpm00000000000000#require serve setting up repo $ hg init test $ cd test $ echo a > a $ hg ci -Ama adding a $ hg rm a $ hg ci -mdel set up hgweb $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS revision $ get-with-headers.py localhost:$HGPORT 'rev/tip' 200 Script output follows test: c78f6c5cbea9

                        changeset 1:c78f6c5cbea9 tip

                        del
                        author test
                        date Thu, 01 Jan 1970 00:00:00 +0000
                        parents cb9a9f314b8b
                        children
                        files a
                        diffstat 1 files changed, 0 insertions(+), 1 deletions(-) [+]
                        line wrap: on
                        line diff
                          --- a/a	Thu Jan 01 00:00:00 1970 +0000
                          +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
                          @@ -1,1 +0,0 @@
                          -a
                        diff removed file $ get-with-headers.py localhost:$HGPORT 'diff/tip/a' 200 Script output follows test: a diff

                        diff a @ 1:c78f6c5cbea9 tip

                        del
                        author test
                        date Thu, 01 Jan 1970 00:00:00 +0000
                        parents cb9a9f314b8b
                        children
                        line wrap: on
                        line diff
                          --- a/a	Thu Jan 01 00:00:00 1970 +0000
                          +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
                          @@ -1,1 +0,0 @@
                          -a
                        $ cd .. mercurial-3.7.3/tests/test-convert-hg-source.t0000644000175000017500000001122712676531525021010 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > convert= > [convert] > hg.saverev=False > EOF $ hg init orig $ cd orig $ echo foo > foo $ echo bar > bar $ hg ci -qAm 'add foo bar' -d '0 0' $ echo >> foo $ hg ci -m 'change foo' -d '1 0' $ hg up -qC 0 $ hg copy --after --force foo bar $ hg copy foo baz $ hg ci -m 'make bar and baz copies of foo' -d '2 0' created new head Test that template can print all file copies (issue4362) $ hg log -r . --template "{file_copies % ' File: {file_copy}\n'}" File: bar (foo) File: baz (foo) $ hg bookmark premerge1 $ hg merge -r 1 merging baz and foo to baz 1 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m 'merge local copy' -d '3 0' $ hg up -C 1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (leaving bookmark premerge1) $ hg bookmark premerge2 $ hg merge 2 merging foo and baz to baz 1 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m 'merge remote copy' -d '4 0' created new head Make and delete some tags $ hg tag that $ hg tag --remove that $ hg tag this #if execbit $ chmod +x baz #else $ echo some other change to make sure we get a rev 5 > baz #endif $ hg ci -m 'mark baz executable' -d '5 0' $ cd .. $ hg convert --datesort orig new 2>&1 | grep -v 'subversion python bindings could not be loaded' initializing destination new repository scanning source... sorting... converting... 8 add foo bar 7 change foo 6 make bar and baz copies of foo 5 merge local copy 4 merge remote copy 3 Added tag that for changeset 88586c4e9f02 2 Removed tag that 1 Added tag this for changeset c56a7f387039 0 mark baz executable updating bookmarks $ cd new $ hg out ../orig comparing with ../orig searching for changes no changes found [1] #if execbit $ hg bookmarks premerge1 3:973ef48a98a4 premerge2 8:91d107c423ba #else Different hash because no x bit $ hg bookmarks premerge1 3:973ef48a98a4 premerge2 8:3537b15eaaca #endif Test that redoing a convert results in an identical graph $ cd ../ $ rm new/.hg/shamap $ hg convert --datesort orig new 2>&1 | grep -v 'subversion python bindings could not be loaded' scanning source... sorting... converting... 8 add foo bar 7 change foo 6 make bar and baz copies of foo 5 merge local copy 4 merge remote copy 3 Added tag that for changeset 88586c4e9f02 2 Removed tag that 1 Added tag this for changeset c56a7f387039 0 mark baz executable updating bookmarks $ hg -R new log -G -T '{rev} {desc}' o 8 mark baz executable | o 7 Added tag this for changeset c56a7f387039 | o 6 Removed tag that | o 5 Added tag that for changeset 88586c4e9f02 | o 4 merge remote copy |\ +---o 3 merge local copy | |/ | o 2 make bar and baz copies of foo | | o | 1 change foo |/ o 0 add foo bar check shamap LF and CRLF handling $ cat > rewrite.py < import sys > # Interlace LF and CRLF > lines = [(l.rstrip() + ((i % 2) and '\n' or '\r\n')) > for i, l in enumerate(file(sys.argv[1]))] > file(sys.argv[1], 'wb').write(''.join(lines)) > EOF $ python rewrite.py new/.hg/shamap $ cd orig $ hg up -qC 1 $ echo foo >> foo $ hg ci -qm 'change foo again' $ hg up -qC 2 $ echo foo >> foo $ hg ci -qm 'change foo again again' $ cd .. $ hg convert --datesort orig new 2>&1 | grep -v 'subversion python bindings could not be loaded' scanning source... sorting... converting... 1 change foo again again 0 change foo again updating bookmarks init broken repository $ hg init broken $ cd broken $ echo a >> a $ echo b >> b $ hg ci -qAm init $ echo a >> a $ echo b >> b $ hg copy b c $ hg ci -qAm changeall $ hg up -qC 0 $ echo bc >> b $ hg ci -m changebagain created new head $ HGMERGE=internal:local hg -q merge $ hg ci -m merge $ hg mv b d $ hg ci -m moveb break it $ rm .hg/store/data/b.* $ cd .. $ hg --config convert.hg.ignoreerrors=True convert broken fixed initializing destination fixed repository scanning source... sorting... converting... 4 init ignoring: data/b.i@1e88685f5dde: no match found 3 changeall 2 changebagain 1 merge 0 moveb $ hg -R fixed verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 5 changesets, 5 total revisions manifest -r 0 $ hg -R fixed manifest -r 0 a manifest -r tip $ hg -R fixed manifest -r tip a c d mercurial-3.7.3/tests/printenv.py0000755000175000017500000000242612676531525016517 0ustar mpmmpm00000000000000#!/usr/bin/env python # # simple script to be used in hooks # # put something like this in the repo .hg/hgrc: # # [hooks] # changegroup = python "$TESTDIR/printenv.py" [exit] [output] # # - is a mandatory argument (e.g. "changegroup") # - [exit] is the exit code of the hook (default: 0) # - [output] is the name of the output file (default: use sys.stdout) # the file will be opened in append mode. # import os import sys try: import msvcrt msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY) msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY) except ImportError: pass exitcode = 0 out = sys.stdout name = sys.argv[1] if len(sys.argv) > 2: exitcode = int(sys.argv[2]) if len(sys.argv) > 3: out = open(sys.argv[3], "ab") # variables with empty values may not exist on all platforms, filter # them now for portability sake. env = [(k, v) for k, v in os.environ.iteritems() if k.startswith("HG_") and v] env.sort() out.write("%s hook: " % name) if os.name == 'nt': filter = lambda x: x.replace('\\', '/') else: filter = lambda x: x vars = ["%s=%s" % (k, filter(v)) for k, v in env] out.write(" ".join(vars)) out.write("\n") out.close() sys.exit(exitcode) mercurial-3.7.3/tests/test-bundle-vs-outgoing.t0000644000175000017500000000454012676531525021166 0ustar mpmmpm00000000000000this structure seems to tickle a bug in bundle's search for changesets, so first we have to recreate it o 8 | | o 7 | | | o 6 |/| o | 5 | | o | 4 | | | o 3 | | | o 2 |/ o 1 | o 0 $ mkrev() > { > revno=$1 > echo "rev $revno" > echo "rev $revno" > foo.txt > hg -q ci -m"rev $revno" > } setup test repo1 $ hg init repo1 $ cd repo1 $ echo "rev 0" > foo.txt $ hg ci -Am"rev 0" adding foo.txt $ mkrev 1 rev 1 first branch $ mkrev 2 rev 2 $ mkrev 3 rev 3 back to rev 1 to create second branch $ hg up -r1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ mkrev 4 rev 4 $ mkrev 5 rev 5 merge first branch to second branch $ hg up -C -r5 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ HGMERGE=internal:local hg merge 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ echo "merge rev 5, rev 3" > foo.txt $ hg ci -m"merge first branch to second branch" one more commit following the merge $ mkrev 7 rev 7 back to "second branch" to make another head $ hg up -r5 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ mkrev 8 rev 8 the story so far $ hg log -G --template "{rev}\n" @ 8 | | o 7 | | | o 6 |/| o | 5 | | o | 4 | | | o 3 | | | o 2 |/ o 1 | o 0 check that "hg outgoing" really does the right thing sanity check of outgoing: expect revs 4 5 6 7 8 $ hg clone -r3 . ../repo2 adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved this should (and does) report 5 outgoing revisions: 4 5 6 7 8 $ hg outgoing --template "{rev}\n" ../repo2 comparing with ../repo2 searching for changes 4 5 6 7 8 test bundle (destination repo): expect 5 revisions this should bundle the same 5 revisions that outgoing reported, but it actually bundles 7 $ hg bundle foo.bundle ../repo2 searching for changes 5 changesets found test bundle (base revision): expect 5 revisions this should (and does) give exactly the same result as bundle with a destination repo... i.e. it's wrong too $ hg bundle --base 3 foo.bundle 5 changesets found $ cd .. mercurial-3.7.3/tests/test-rebase-newancestor.t0000644000175000017500000001754612676531525021237 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [format] > usegeneraldelta=yes > [extensions] > rebase= > > [alias] > tglog = log -G --template "{rev}: '{desc}' {branches}\n" > EOF $ hg init repo $ cd repo $ echo A > a $ echo >> a $ hg ci -Am A adding a $ echo B > a $ echo >> a $ hg ci -m B $ echo C > a $ echo >> a $ hg ci -m C $ hg up -q -C 0 $ echo D >> a $ hg ci -Am AD created new head $ hg tglog @ 3: 'AD' | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ hg rebase -s 1 -d 3 rebasing 1:0f4f7cb4f549 "B" merging a rebasing 2:30ae917c0e4f "C" merging a saved backup bundle to $TESTTMP/repo/.hg/strip-backup/0f4f7cb4f549-82b3b163-backup.hg (glob) $ hg tglog o 3: 'C' | o 2: 'B' | @ 1: 'AD' | o 0: 'A' $ cd .. Test rebasing of merges with ancestors of the rebase destination - a situation that often happens when trying to recover from repeated merging with a mainline branch. The test case creates a dev branch that contains a couple of merges from the default branch. When rebasing to the default branch, these merges would be merges with ancestors on the same branch. The merges _could_ contain some interesting conflict resolutions or additional changes in the merge commit, but that is mixed up with the actual merge stuff and there is in general no way to separate them. Note: The dev branch contains _no_ changes to f-default. It might be unclear how rebasing of ancestor merges should be handled, but the current behavior with spurious prompts for conflicts in files that didn't change seems very wrong. $ hg init ancestor-merge $ cd ancestor-merge $ touch f-default $ hg ci -Aqm 'default: create f-default' $ hg branch -q dev $ hg ci -qm 'dev: create branch' $ echo stuff > f-dev $ hg ci -Aqm 'dev: f-dev stuff' $ hg up -q default $ echo stuff > f-default $ hg ci -m 'default: f-default stuff' $ hg up -q dev $ hg merge -q default $ hg ci -m 'dev: merge default' $ hg up -q default $ hg rm f-default $ hg ci -m 'default: remove f-default' $ hg up -q dev $ hg merge -q default $ hg ci -m 'dev: merge default' $ hg up -q default $ echo stuff > f-other $ hg ci -Aqm 'default: f-other stuff' $ hg tglog @ 7: 'default: f-other stuff' | | o 6: 'dev: merge default' dev |/| o | 5: 'default: remove f-default' | | | o 4: 'dev: merge default' dev |/| o | 3: 'default: f-default stuff' | | | o 2: 'dev: f-dev stuff' dev | | | o 1: 'dev: create branch' dev |/ o 0: 'default: create f-default' $ hg clone -qU . ../ancestor-merge-2 Full rebase all the way back from branching point: $ hg rebase -r 'only(dev,default)' -d default --config ui.interactive=True << EOF > c > EOF rebasing 1:1d1a643d390e "dev: create branch" note: rebase of 1:1d1a643d390e created no changes to commit rebasing 2:ec2c14fb2984 "dev: f-dev stuff" rebasing 4:4b019212aaf6 "dev: merge default" remote changed f-default which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? c rebasing 6:9455ee510502 "dev: merge default" saved backup bundle to $TESTTMP/ancestor-merge/.hg/strip-backup/1d1a643d390e-43e9e04b-backup.hg (glob) $ hg tglog o 6: 'dev: merge default' | o 5: 'dev: merge default' | o 4: 'dev: f-dev stuff' | @ 3: 'default: f-other stuff' | o 2: 'default: remove f-default' | o 1: 'default: f-default stuff' | o 0: 'default: create f-default' Grafty cherry picking rebasing: $ cd ../ancestor-merge-2 $ hg phase -fdr0: $ hg rebase -r 'children(only(dev,default))' -d default --config ui.interactive=True << EOF > c > EOF rebasing 2:ec2c14fb2984 "dev: f-dev stuff" rebasing 4:4b019212aaf6 "dev: merge default" remote changed f-default which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? c rebasing 6:9455ee510502 "dev: merge default" saved backup bundle to $TESTTMP/ancestor-merge-2/.hg/strip-backup/ec2c14fb2984-62d0b222-backup.hg (glob) $ hg tglog o 7: 'dev: merge default' | o 6: 'dev: merge default' | o 5: 'dev: f-dev stuff' | o 4: 'default: f-other stuff' | o 3: 'default: remove f-default' | o 2: 'default: f-default stuff' | | o 1: 'dev: create branch' dev |/ o 0: 'default: create f-default' $ cd .. Test order of parents of rebased merged with un-rebased changes as p1. $ hg init parentorder $ cd parentorder $ touch f $ hg ci -Aqm common $ touch change $ hg ci -Aqm change $ touch target $ hg ci -Aqm target $ hg up -qr 0 $ touch outside $ hg ci -Aqm outside $ hg merge -qr 1 $ hg ci -m 'merge p1 3=outside p2 1=ancestor' $ hg par changeset: 4:6990226659be tag: tip parent: 3:f59da8fc0fcf parent: 1:dd40c13f7a6f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge p1 3=outside p2 1=ancestor $ hg up -qr 1 $ hg merge -qr 3 $ hg ci -qm 'merge p1 1=ancestor p2 3=outside' $ hg par changeset: 5:a57575f79074 tag: tip parent: 1:dd40c13f7a6f parent: 3:f59da8fc0fcf user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge p1 1=ancestor p2 3=outside $ hg tglog @ 5: 'merge p1 1=ancestor p2 3=outside' |\ +---o 4: 'merge p1 3=outside p2 1=ancestor' | |/ | o 3: 'outside' | | +---o 2: 'target' | | o | 1: 'change' |/ o 0: 'common' $ hg rebase -r 4 -d 2 rebasing 4:6990226659be "merge p1 3=outside p2 1=ancestor" saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/6990226659be-4d67a0d3-backup.hg (glob) $ hg tip changeset: 5:cca50676b1c5 tag: tip parent: 2:a60552eb93fb parent: 3:f59da8fc0fcf user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge p1 3=outside p2 1=ancestor $ hg rebase -r 4 -d 2 rebasing 4:a57575f79074 "merge p1 1=ancestor p2 3=outside" saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/a57575f79074-385426e5-backup.hg (glob) $ hg tip changeset: 5:f9daf77ffe76 tag: tip parent: 2:a60552eb93fb parent: 3:f59da8fc0fcf user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge p1 1=ancestor p2 3=outside $ hg tglog @ 5: 'merge p1 1=ancestor p2 3=outside' |\ +---o 4: 'merge p1 3=outside p2 1=ancestor' | |/ | o 3: 'outside' | | o | 2: 'target' | | o | 1: 'change' |/ o 0: 'common' rebase of merge of ancestors $ hg up -qr 2 $ hg merge -qr 3 $ echo 'other change while merging future "rebase ancestors"' > other $ hg ci -Aqm 'merge rebase ancestors' $ hg rebase -d 5 -v rebasing 6:4c5f12f25ebe "merge rebase ancestors" (tip) resolving manifests removing other note: merging f9daf77ffe76+ and 4c5f12f25ebe using bids from ancestors a60552eb93fb and f59da8fc0fcf calculating bids for ancestor a60552eb93fb resolving manifests calculating bids for ancestor f59da8fc0fcf resolving manifests auction for merging merge bids other: consensus for g end of auction getting other committing files: other committing manifest committing changelog rebase merging completed 1 changesets found uncompressed size of bundle content: 213 (changelog) 216 (manifests) 182 other saved backup bundle to $TESTTMP/parentorder/.hg/strip-backup/4c5f12f25ebe-f46990e5-backup.hg (glob) 1 changesets found uncompressed size of bundle content: 272 (changelog) 167 (manifests) 182 other adding branch adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files rebase completed $ hg tglog @ 6: 'merge rebase ancestors' | o 5: 'merge p1 1=ancestor p2 3=outside' |\ +---o 4: 'merge p1 3=outside p2 1=ancestor' | |/ | o 3: 'outside' | | o | 2: 'target' | | o | 1: 'change' |/ o 0: 'common' mercurial-3.7.3/tests/test-diff-change.t0000644000175000017500000000540712676531525017574 0ustar mpmmpm00000000000000Testing diff --change $ hg init a $ cd a $ echo "first" > file.txt $ hg add file.txt $ hg commit -m 'first commit' # 0 $ echo "second" > file.txt $ hg commit -m 'second commit' # 1 $ echo "third" > file.txt $ hg commit -m 'third commit' # 2 $ hg diff --nodates --change 1 diff -r 4bb65dda5db4 -r e9b286083166 file.txt --- a/file.txt +++ b/file.txt @@ -1,1 +1,1 @@ -first +second $ hg diff --change e9b286083166 diff -r 4bb65dda5db4 -r e9b286083166 file.txt --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -first +second $ cd .. Test dumb revspecs: top-level "x:y", "x:", ":y" and ":" ranges should be handled as pairs even if x == y, but not for "f(x:y)" nor "x::y" (issue3474, issue4774) $ hg clone -q a dumbspec $ cd dumbspec $ echo "wdir" > file.txt $ hg diff -r 2:2 $ hg diff -r 2:. $ hg diff -r 2: $ hg diff -r :0 $ hg diff -r '2:first(2:2)' $ hg diff -r 'first(2:2)' --nodates diff -r bf5ff72eb7e0 file.txt --- a/file.txt +++ b/file.txt @@ -1,1 +1,1 @@ -third +wdir $ hg diff -r 2::2 --nodates diff -r bf5ff72eb7e0 file.txt --- a/file.txt +++ b/file.txt @@ -1,1 +1,1 @@ -third +wdir $ hg diff -r "2 and 1" abort: empty revision range [255] $ cd .. $ hg clone -qr0 a dumbspec-rev0 $ cd dumbspec-rev0 $ echo "wdir" > file.txt $ hg diff -r : $ hg diff -r 'first(:)' --nodates diff -r 4bb65dda5db4 file.txt --- a/file.txt +++ b/file.txt @@ -1,1 +1,1 @@ -first +wdir $ cd .. Testing diff --change when merge: $ cd a $ for i in 1 2 3 4 5 6 7 8 9 10; do > echo $i >> file.txt > done $ hg commit -m "lots of text" # 3 $ sed -e 's,^2$,x,' file.txt > file.txt.tmp $ mv file.txt.tmp file.txt $ hg commit -m "change 2 to x" # 4 $ hg up -r 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ sed -e 's,^8$,y,' file.txt > file.txt.tmp $ mv file.txt.tmp file.txt $ hg commit -m "change 8 to y" created new head $ hg up -C -r 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge -r 5 merging file.txt 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m "merge 8 to y" # 6 $ hg diff --change 5 diff -r ae119d680c82 -r 9085c5c02e52 file.txt --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 @@ -6,6 +6,6 @@ 5 6 7 -8 +y 9 10 must be similar to 'hg diff --change 5': $ hg diff -c 6 diff -r 273b50f17c6d -r 979ca961fd2e file.txt --- a/file.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/file.txt Thu Jan 01 00:00:00 1970 +0000 @@ -6,6 +6,6 @@ 5 6 7 -8 +y 9 10 $ cd .. mercurial-3.7.3/tests/test-hg-parseurl.py0000644000175000017500000000101012676531525020041 0ustar mpmmpm00000000000000from mercurial.hg import parseurl def testparse(url, branch=[]): print '%s, branches: %r' % parseurl(url, branch) testparse('http://example.com/no/anchor') testparse('http://example.com/an/anchor#foo') testparse('http://example.com/no/anchor/branches', branch=['foo']) testparse('http://example.com/an/anchor/branches#bar', branch=['foo']) testparse('http://example.com/an/anchor/branches-None#foo', branch=None) testparse('http://example.com/') testparse('http://example.com') testparse('http://example.com#foo') mercurial-3.7.3/tests/test-relink.t0000644000175000017500000000501412676531525016717 0ustar mpmmpm00000000000000#require hardlink $ echo "[extensions]" >> $HGRCPATH $ echo "relink=" >> $HGRCPATH $ fix_path() { > tr '\\' / > } $ cat > arelinked.py < import sys, os > from mercurial import util > path1, path2 = sys.argv[1:3] > if util.samefile(path1, path2): > print '%s == %s' % (path1, path2) > else: > print '%s != %s' % (path1, path2) > EOF create source repository $ hg init repo $ cd repo $ echo a > a $ echo b > b $ hg ci -Am addfile adding a adding b $ cat "$TESTDIR/binfile.bin" >> a $ cat "$TESTDIR/binfile.bin" >> b $ hg ci -Am changefiles make another commit to create files larger than 1 KB to test formatting of final byte count $ cat "$TESTDIR/binfile.bin" >> a $ cat "$TESTDIR/binfile.bin" >> b $ hg ci -m anotherchange don't sit forever trying to double-lock the source repo $ hg relink . relinking $TESTTMP/repo/.hg/store to $TESTTMP/repo/.hg/store (glob) there is nothing to relink Test files are read in binary mode $ $PYTHON -c "file('.hg/store/data/dummy.i', 'wb').write('a\r\nb\n')" $ cd .. clone and pull to break links $ hg clone --pull -r0 repo clone adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd clone $ hg pull -q $ echo b >> b $ hg ci -m changeb created new head $ $PYTHON -c "file('.hg/store/data/dummy.i', 'wb').write('a\nb\r\n')" relink $ hg relink --debug --config progress.debug=true | fix_path relinking $TESTTMP/repo/.hg/store to $TESTTMP/clone/.hg/store tip has 2 files, estimated total number of files: 3 collecting: 00changelog.i 1/3 files (33.33%) collecting: 00manifest.i 2/3 files (66.67%) collecting: a.i 3/3 files (100.00%) collecting: b.i 4/3 files (133.33%) collecting: dummy.i 5/3 files (166.67%) collected 5 candidate storage files not linkable: 00changelog.i not linkable: 00manifest.i pruning: data/a.i 3/5 files (60.00%) not linkable: data/b.i pruning: data/dummy.i 5/5 files (100.00%) pruned down to 2 probably relinkable files relinking: data/a.i 1/2 files (50.00%) not linkable: data/dummy.i relinked 1 files (1.36 KB reclaimed) $ cd .. check hardlinks $ python arelinked.py repo/.hg/store/data/a.i clone/.hg/store/data/a.i repo/.hg/store/data/a.i == clone/.hg/store/data/a.i $ python arelinked.py repo/.hg/store/data/b.i clone/.hg/store/data/b.i repo/.hg/store/data/b.i != clone/.hg/store/data/b.i mercurial-3.7.3/tests/test-simplemerge.py0000644000175000017500000003000312676531525020125 0ustar mpmmpm00000000000000# Copyright (C) 2004, 2005 Canonical Ltd # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see . import unittest from unittest import TestCase from mercurial import util, simplemerge, error # bzr compatible interface, for the tests class Merge3(simplemerge.Merge3Text): """3-way merge of texts. Given BASE, OTHER, THIS, tries to produce a combined text incorporating the changes from both BASE->OTHER and BASE->THIS. All three will typically be sequences of lines.""" def __init__(self, base, a, b): basetext = '\n'.join([i.strip('\n') for i in base] + ['']) atext = '\n'.join([i.strip('\n') for i in a] + ['']) btext = '\n'.join([i.strip('\n') for i in b] + ['']) if util.binary(basetext) or util.binary(atext) or util.binary(btext): raise error.Abort("don't know how to merge binary files") simplemerge.Merge3Text.__init__(self, basetext, atext, btext, base, a, b) CantReprocessAndShowBase = simplemerge.CantReprocessAndShowBase def split_lines(t): from cStringIO import StringIO return StringIO(t).readlines() ############################################################ # test case data from the gnu diffutils manual # common base TZU = split_lines(""" The Nameless is the origin of Heaven and Earth; The named is the mother of all things. Therefore let there always be non-being, so we may see their subtlety, And let there always be being, so we may see their outcome. The two are the same, But after they are produced, they have different names. They both may be called deep and profound. Deeper and more profound, The door of all subtleties! """) LAO = split_lines(""" The Way that can be told of is not the eternal Way; The name that can be named is not the eternal name. The Nameless is the origin of Heaven and Earth; The Named is the mother of all things. Therefore let there always be non-being, so we may see their subtlety, And let there always be being, so we may see their outcome. The two are the same, But after they are produced, they have different names. """) TAO = split_lines(""" The Way that can be told of is not the eternal Way; The name that can be named is not the eternal name. The Nameless is the origin of Heaven and Earth; The named is the mother of all things. Therefore let there always be non-being, so we may see their subtlety, And let there always be being, so we may see their result. The two are the same, But after they are produced, they have different names. -- The Way of Lao-Tzu, tr. Wing-tsit Chan """) MERGED_RESULT = split_lines("""\ The Way that can be told of is not the eternal Way; The name that can be named is not the eternal name. The Nameless is the origin of Heaven and Earth; The Named is the mother of all things. Therefore let there always be non-being, so we may see their subtlety, And let there always be being, so we may see their result. The two are the same, But after they are produced, they have different names. <<<<<<< LAO ======= -- The Way of Lao-Tzu, tr. Wing-tsit Chan >>>>>>> TAO """) class TestMerge3(TestCase): def log(self, msg): pass def test_no_changes(self): """No conflicts because nothing changed""" m3 = Merge3(['aaa', 'bbb'], ['aaa', 'bbb'], ['aaa', 'bbb']) self.assertEquals(m3.find_unconflicted(), [(0, 2)]) self.assertEquals(list(m3.find_sync_regions()), [(0, 2, 0, 2, 0, 2), (2, 2, 2, 2, 2, 2)]) self.assertEquals(list(m3.merge_regions()), [('unchanged', 0, 2)]) self.assertEquals(list(m3.merge_groups()), [('unchanged', ['aaa', 'bbb'])]) def test_front_insert(self): m3 = Merge3(['zz'], ['aaa', 'bbb', 'zz'], ['zz']) # todo: should use a sentinel at end as from get_matching_blocks # to match without zz self.assertEquals(list(m3.find_sync_regions()), [(0, 1, 2, 3, 0, 1), (1, 1, 3, 3, 1, 1)]) self.assertEquals(list(m3.merge_regions()), [('a', 0, 2), ('unchanged', 0, 1)]) self.assertEquals(list(m3.merge_groups()), [('a', ['aaa', 'bbb']), ('unchanged', ['zz'])]) def test_null_insert(self): m3 = Merge3([], ['aaa', 'bbb'], []) # todo: should use a sentinel at end as from get_matching_blocks # to match without zz self.assertEquals(list(m3.find_sync_regions()), [(0, 0, 2, 2, 0, 0)]) self.assertEquals(list(m3.merge_regions()), [('a', 0, 2)]) self.assertEquals(list(m3.merge_lines()), ['aaa', 'bbb']) def test_no_conflicts(self): """No conflicts because only one side changed""" m3 = Merge3(['aaa', 'bbb'], ['aaa', '111', 'bbb'], ['aaa', 'bbb']) self.assertEquals(m3.find_unconflicted(), [(0, 1), (1, 2)]) self.assertEquals(list(m3.find_sync_regions()), [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 1, 2), (2, 2, 3, 3, 2, 2)]) self.assertEquals(list(m3.merge_regions()), [('unchanged', 0, 1), ('a', 1, 2), ('unchanged', 1, 2)]) def test_append_a(self): m3 = Merge3(['aaa\n', 'bbb\n'], ['aaa\n', 'bbb\n', '222\n'], ['aaa\n', 'bbb\n']) self.assertEquals(''.join(m3.merge_lines()), 'aaa\nbbb\n222\n') def test_append_b(self): m3 = Merge3(['aaa\n', 'bbb\n'], ['aaa\n', 'bbb\n'], ['aaa\n', 'bbb\n', '222\n']) self.assertEquals(''.join(m3.merge_lines()), 'aaa\nbbb\n222\n') def test_append_agreement(self): m3 = Merge3(['aaa\n', 'bbb\n'], ['aaa\n', 'bbb\n', '222\n'], ['aaa\n', 'bbb\n', '222\n']) self.assertEquals(''.join(m3.merge_lines()), 'aaa\nbbb\n222\n') def test_append_clash(self): m3 = Merge3(['aaa\n', 'bbb\n'], ['aaa\n', 'bbb\n', '222\n'], ['aaa\n', 'bbb\n', '333\n']) ml = m3.merge_lines(name_a='a', name_b='b', start_marker='<<', mid_marker='--', end_marker='>>') self.assertEquals(''.join(ml), 'aaa\n' 'bbb\n' '<< a\n' '222\n' '--\n' '333\n' '>> b\n' ) def test_insert_agreement(self): m3 = Merge3(['aaa\n', 'bbb\n'], ['aaa\n', '222\n', 'bbb\n'], ['aaa\n', '222\n', 'bbb\n']) ml = m3.merge_lines(name_a='a', name_b='b', start_marker='<<', mid_marker='--', end_marker='>>') self.assertEquals(''.join(ml), 'aaa\n222\nbbb\n') def test_insert_clash(self): """Both try to insert lines in the same place.""" m3 = Merge3(['aaa\n', 'bbb\n'], ['aaa\n', '111\n', 'bbb\n'], ['aaa\n', '222\n', 'bbb\n']) self.assertEquals(m3.find_unconflicted(), [(0, 1), (1, 2)]) self.assertEquals(list(m3.find_sync_regions()), [(0, 1, 0, 1, 0, 1), (1, 2, 2, 3, 2, 3), (2, 2, 3, 3, 3, 3)]) self.assertEquals(list(m3.merge_regions()), [('unchanged', 0, 1), ('conflict', 1, 1, 1, 2, 1, 2), ('unchanged', 1, 2)]) self.assertEquals(list(m3.merge_groups()), [('unchanged', ['aaa\n']), ('conflict', [], ['111\n'], ['222\n']), ('unchanged', ['bbb\n']), ]) ml = m3.merge_lines(name_a='a', name_b='b', start_marker='<<', mid_marker='--', end_marker='>>') self.assertEquals(''.join(ml), '''aaa << a 111 -- 222 >> b bbb ''') def test_replace_clash(self): """Both try to insert lines in the same place.""" m3 = Merge3(['aaa', '000', 'bbb'], ['aaa', '111', 'bbb'], ['aaa', '222', 'bbb']) self.assertEquals(m3.find_unconflicted(), [(0, 1), (2, 3)]) self.assertEquals(list(m3.find_sync_regions()), [(0, 1, 0, 1, 0, 1), (2, 3, 2, 3, 2, 3), (3, 3, 3, 3, 3, 3)]) def test_replace_multi(self): """Replacement with regions of different size.""" m3 = Merge3(['aaa', '000', '000', 'bbb'], ['aaa', '111', '111', '111', 'bbb'], ['aaa', '222', '222', '222', '222', 'bbb']) self.assertEquals(m3.find_unconflicted(), [(0, 1), (3, 4)]) self.assertEquals(list(m3.find_sync_regions()), [(0, 1, 0, 1, 0, 1), (3, 4, 4, 5, 5, 6), (4, 4, 5, 5, 6, 6)]) def test_merge_poem(self): """Test case from diff3 manual""" m3 = Merge3(TZU, LAO, TAO) ml = list(m3.merge_lines('LAO', 'TAO')) self.log('merge result:') self.log(''.join(ml)) self.assertEquals(ml, MERGED_RESULT) def test_binary(self): self.assertRaises(error.Abort, Merge3, ['\x00'], ['a'], ['b']) def test_dos_text(self): base_text = 'a\r\n' this_text = 'b\r\n' other_text = 'c\r\n' m3 = Merge3(base_text.splitlines(True), other_text.splitlines(True), this_text.splitlines(True)) m_lines = m3.merge_lines('OTHER', 'THIS') self.assertEqual('<<<<<<< OTHER\r\nc\r\n=======\r\nb\r\n' '>>>>>>> THIS\r\n'.splitlines(True), list(m_lines)) def test_mac_text(self): base_text = 'a\r' this_text = 'b\r' other_text = 'c\r' m3 = Merge3(base_text.splitlines(True), other_text.splitlines(True), this_text.splitlines(True)) m_lines = m3.merge_lines('OTHER', 'THIS') self.assertEqual('<<<<<<< OTHER\rc\r=======\rb\r' '>>>>>>> THIS\r'.splitlines(True), list(m_lines)) if __name__ == '__main__': # hide the timer import time orig = time.time try: time.time = lambda: 0 unittest.main() finally: time.time = orig mercurial-3.7.3/tests/test-treemanifest.t0000644000175000017500000003615312676531525020131 0ustar mpmmpm00000000000000 $ cat << EOF >> $HGRCPATH > [format] > usegeneraldelta=yes > [ui] > ssh=python "$TESTDIR/dummyssh" > EOF Set up repo $ hg --config experimental.treemanifest=True init repo $ cd repo Requirements get set on init $ grep treemanifest .hg/requires treemanifest Without directories, looks like any other repo $ echo 0 > a $ echo 0 > b $ hg ci -Aqm initial $ hg debugdata -m 0 a\x00362fef284ce2ca02aecc8de6d5e8a1c3af0556fe (esc) b\x00362fef284ce2ca02aecc8de6d5e8a1c3af0556fe (esc) Submanifest is stored in separate revlog $ mkdir dir1 $ echo 1 > dir1/a $ echo 1 > dir1/b $ echo 1 > e $ hg ci -Aqm 'add dir1' $ hg debugdata -m 1 a\x00362fef284ce2ca02aecc8de6d5e8a1c3af0556fe (esc) b\x00362fef284ce2ca02aecc8de6d5e8a1c3af0556fe (esc) dir1\x008b3ffd73f901e83304c83d33132c8e774ceac44et (esc) e\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc) $ hg debugdata --dir dir1 0 a\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc) b\x00b8e02f6433738021a065f94175c7cd23db5f05be (esc) Can add nested directories $ mkdir dir1/dir1 $ echo 2 > dir1/dir1/a $ echo 2 > dir1/dir1/b $ mkdir dir1/dir2 $ echo 2 > dir1/dir2/a $ echo 2 > dir1/dir2/b $ hg ci -Aqm 'add dir1/dir1' $ hg files -r . a b dir1/a (glob) dir1/b (glob) dir1/dir1/a (glob) dir1/dir1/b (glob) dir1/dir2/a (glob) dir1/dir2/b (glob) e Revision is not created for unchanged directory $ mkdir dir2 $ echo 3 > dir2/a $ hg add dir2 adding dir2/a (glob) $ hg debugindex --dir dir1 > before $ hg ci -qm 'add dir2' $ hg debugindex --dir dir1 > after $ diff before after $ rm before after Removing directory does not create an revlog entry $ hg rm dir1/dir1 removing dir1/dir1/a (glob) removing dir1/dir1/b (glob) $ hg debugindex --dir dir1/dir1 > before $ hg ci -qm 'remove dir1/dir1' $ hg debugindex --dir dir1/dir1 > after $ diff before after $ rm before after Check that hg files (calls treemanifest.walk()) works without loading all directory revlogs $ hg co 'desc("add dir2")' 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ mv .hg/store/meta/dir2 .hg/store/meta/dir2-backup $ hg files -r . dir1 dir1/a (glob) dir1/b (glob) dir1/dir1/a (glob) dir1/dir1/b (glob) dir1/dir2/a (glob) dir1/dir2/b (glob) Check that status between revisions works (calls treemanifest.matches()) without loading all directory revlogs $ hg status --rev 'desc("add dir1")' --rev . dir1 A dir1/dir1/a A dir1/dir1/b A dir1/dir2/a A dir1/dir2/b $ mv .hg/store/meta/dir2-backup .hg/store/meta/dir2 Merge creates 2-parent revision of directory revlog $ echo 5 > dir1/a $ hg ci -Aqm 'modify dir1/a' $ hg co '.^' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 6 > dir1/b $ hg ci -Aqm 'modify dir1/b' $ hg merge 'desc("modify dir1/a")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m 'conflict-free merge involving dir1/' $ cat dir1/a 5 $ cat dir1/b 6 $ hg debugindex --dir dir1 rev offset length delta linkrev nodeid p1 p2 0 0 54 -1 1 8b3ffd73f901 000000000000 000000000000 1 54 68 0 2 68e9d057c5a8 8b3ffd73f901 000000000000 2 122 12 1 4 4698198d2624 68e9d057c5a8 000000000000 3 134 55 1 5 44844058ccce 68e9d057c5a8 000000000000 4 189 55 1 6 bf3d9b744927 68e9d057c5a8 000000000000 5 244 55 4 7 dde7c0af2a03 bf3d9b744927 44844058ccce Merge keeping directory from parent 1 does not create revlog entry. (Note that dir1's manifest does change, but only because dir1/a's filelog changes.) $ hg co 'desc("add dir2")' 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 8 > dir2/a $ hg ci -m 'modify dir2/a' created new head $ hg debugindex --dir dir2 > before $ hg merge 'desc("modify dir1/a")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg revert -r 'desc("modify dir2/a")' . reverting dir1/a (glob) $ hg ci -m 'merge, keeping parent 1' $ hg debugindex --dir dir2 > after $ diff before after $ rm before after Merge keeping directory from parent 2 does not create revlog entry. (Note that dir2's manifest does change, but only because dir2/a's filelog changes.) $ hg co 'desc("modify dir2/a")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg debugindex --dir dir1 > before $ hg merge 'desc("modify dir1/a")' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg revert -r 'desc("modify dir1/a")' . reverting dir2/a (glob) $ hg ci -m 'merge, keeping parent 2' created new head $ hg debugindex --dir dir1 > after $ diff before after $ rm before after Create flat source repo for tests with mixed flat/tree manifests $ cd .. $ hg init repo-flat $ cd repo-flat Create a few commits with flat manifest $ echo 0 > a $ echo 0 > b $ echo 0 > e $ for d in dir1 dir1/dir1 dir1/dir2 dir2 > do > mkdir $d > echo 0 > $d/a > echo 0 > $d/b > done $ hg ci -Aqm initial $ echo 1 > a $ echo 1 > dir1/a $ echo 1 > dir1/dir1/a $ hg ci -Aqm 'modify on branch 1' $ hg co 0 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 2 > b $ echo 2 > dir1/b $ echo 2 > dir1/dir1/b $ hg ci -Aqm 'modify on branch 2' $ hg merge 1 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m 'merge of flat manifests to new flat manifest' $ hg serve -p $HGPORT -d --pid-file=hg.pid --errorlog=errors.log $ cat hg.pid >> $DAEMON_PIDS Create clone with tree manifests enabled $ cd .. $ hg clone --config experimental.treemanifest=1 \ > http://localhost:$HGPORT repo-mixed -r 1 adding changesets adding manifests adding file changes added 2 changesets with 14 changes to 11 files updating to branch default 11 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo-mixed $ test -d .hg/store/meta [1] $ grep treemanifest .hg/requires treemanifest Should be possible to push updates from flat to tree manifest repo $ hg -R ../repo-flat push ssh://user@dummy/repo-mixed pushing to ssh://user@dummy/repo-mixed searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 2 changesets with 3 changes to 3 files Commit should store revlog per directory $ hg co 1 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 3 > a $ echo 3 > dir1/a $ echo 3 > dir1/dir1/a $ hg ci -m 'first tree' created new head $ find .hg/store/meta | sort .hg/store/meta .hg/store/meta/dir1 .hg/store/meta/dir1/00manifest.i .hg/store/meta/dir1/dir1 .hg/store/meta/dir1/dir1/00manifest.i .hg/store/meta/dir1/dir2 .hg/store/meta/dir1/dir2/00manifest.i .hg/store/meta/dir2 .hg/store/meta/dir2/00manifest.i Merge of two trees $ hg co 2 6 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 1 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m 'merge of flat manifests to new tree manifest' created new head $ hg diff -r 3 Parent of tree root manifest should be flat manifest, and two for merge $ hg debugindex -m rev offset length delta linkrev nodeid p1 p2 0 0 80 -1 0 40536115ed9e 000000000000 000000000000 1 80 83 0 1 f3376063c255 40536115ed9e 000000000000 2 163 89 0 2 5d9b9da231a2 40536115ed9e 000000000000 3 252 83 2 3 d17d663cbd8a 5d9b9da231a2 f3376063c255 4 335 124 1 4 51e32a8c60ee f3376063c255 000000000000 5 459 126 2 5 cc5baa78b230 5d9b9da231a2 f3376063c255 Status across flat/tree boundary should work $ hg status --rev '.^' --rev . M a M dir1/a M dir1/dir1/a Turning off treemanifest config has no effect $ hg debugindex --dir dir1 rev offset length delta linkrev nodeid p1 p2 0 0 127 -1 4 064927a0648a 000000000000 000000000000 1 127 111 0 5 25ecb8cb8618 000000000000 000000000000 $ echo 2 > dir1/a $ hg --config experimental.treemanifest=False ci -qm 'modify dir1/a' $ hg debugindex --dir dir1 rev offset length delta linkrev nodeid p1 p2 0 0 127 -1 4 064927a0648a 000000000000 000000000000 1 127 111 0 5 25ecb8cb8618 000000000000 000000000000 2 238 55 1 6 5b16163a30c6 25ecb8cb8618 000000000000 Stripping and recovering changes should work $ hg st --change tip M dir1/a $ hg --config extensions.strip= strip tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/repo-mixed/.hg/strip-backup/51cfd7b1e13b-78a2f3ed-backup.hg (glob) $ hg unbundle -q .hg/strip-backup/* $ hg st --change tip M dir1/a Shelving and unshelving should work $ echo foo >> dir1/a $ hg --config extensions.shelve= shelve shelved as default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --config extensions.shelve= unshelve unshelving change 'default' $ hg diff --nodates diff -r 708a273da119 dir1/a --- a/dir1/a +++ b/dir1/a @@ -1,1 +1,2 @@ 1 +foo Pushing from treemanifest repo to an empty repo makes that a treemanifest repo $ cd .. $ hg init empty-repo $ cat << EOF >> empty-repo/.hg/hgrc > [experimental] > changegroup3=yes > EOF $ grep treemanifest empty-repo/.hg/requires [1] $ hg push -R repo -r 0 empty-repo pushing to empty-repo searching for changes adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files $ grep treemanifest empty-repo/.hg/requires treemanifest Pushing to an empty repo works $ hg --config experimental.treemanifest=1 init clone $ grep treemanifest clone/.hg/requires treemanifest $ hg push -R repo clone pushing to clone searching for changes adding changesets adding manifests adding file changes added 11 changesets with 15 changes to 10 files (+3 heads) $ grep treemanifest clone/.hg/requires treemanifest Create deeper repo with tree manifests. $ hg --config experimental.treemanifest=True init deeprepo $ cd deeprepo $ mkdir a $ mkdir b $ mkdir b/bar $ mkdir b/bar/orange $ mkdir b/bar/orange/fly $ mkdir b/foo $ mkdir b/foo/apple $ mkdir b/foo/apple/bees $ touch a/one.txt $ touch a/two.txt $ touch b/bar/fruits.txt $ touch b/bar/orange/fly/gnat.py $ touch b/bar/orange/fly/housefly.txt $ touch b/foo/apple/bees/flower.py $ touch c.txt $ touch d.py $ hg ci -Aqm 'initial' We'll see that visitdir works by removing some treemanifest revlogs and running the files command with various parameters. Test files from the root. $ hg files -r . a/one.txt (glob) a/two.txt (glob) b/bar/fruits.txt (glob) b/bar/orange/fly/gnat.py (glob) b/bar/orange/fly/housefly.txt (glob) b/foo/apple/bees/flower.py (glob) c.txt d.py Excludes with a glob should not exclude everything from the glob's root $ hg files -r . -X 'b/fo?' b b/bar/fruits.txt (glob) b/bar/orange/fly/gnat.py (glob) b/bar/orange/fly/housefly.txt (glob) Test files for a subdirectory. $ mv .hg/store/meta/a oldmf $ hg files -r . b b/bar/fruits.txt (glob) b/bar/orange/fly/gnat.py (glob) b/bar/orange/fly/housefly.txt (glob) b/foo/apple/bees/flower.py (glob) $ mv oldmf .hg/store/meta/a Test files with just includes and excludes. $ mv .hg/store/meta/a oldmf $ mv .hg/store/meta/b/bar/orange/fly oldmf2 $ mv .hg/store/meta/b/foo/apple/bees oldmf3 $ hg files -r . -I path:b/bar -X path:b/bar/orange/fly -I path:b/foo -X path:b/foo/apple/bees b/bar/fruits.txt (glob) $ mv oldmf .hg/store/meta/a $ mv oldmf2 .hg/store/meta/b/bar/orange/fly $ mv oldmf3 .hg/store/meta/b/foo/apple/bees Test files for a subdirectory, excluding a directory within it. $ mv .hg/store/meta/a oldmf $ mv .hg/store/meta/b/foo oldmf2 $ hg files -r . -X path:b/foo b b/bar/fruits.txt (glob) b/bar/orange/fly/gnat.py (glob) b/bar/orange/fly/housefly.txt (glob) $ mv oldmf .hg/store/meta/a $ mv oldmf2 .hg/store/meta/b/foo Test files for a sub directory, including only a directory within it, and including an unrelated directory. $ mv .hg/store/meta/a oldmf $ mv .hg/store/meta/b/foo oldmf2 $ hg files -r . -I path:b/bar/orange -I path:a b b/bar/orange/fly/gnat.py (glob) b/bar/orange/fly/housefly.txt (glob) $ mv oldmf .hg/store/meta/a $ mv oldmf2 .hg/store/meta/b/foo Test files for a pattern, including a directory, and excluding a directory within that. $ mv .hg/store/meta/a oldmf $ mv .hg/store/meta/b/foo oldmf2 $ mv .hg/store/meta/b/bar/orange oldmf3 $ hg files -r . glob:**.txt -I path:b/bar -X path:b/bar/orange b/bar/fruits.txt (glob) $ mv oldmf .hg/store/meta/a $ mv oldmf2 .hg/store/meta/b/foo $ mv oldmf3 .hg/store/meta/b/bar/orange Add some more changes to the deep repo $ echo narf >> b/bar/fruits.txt $ hg ci -m narf $ echo troz >> b/bar/orange/fly/gnat.py $ hg ci -m troz Test cloning a treemanifest repo over http. $ hg serve -p $HGPORT2 -d --pid-file=hg.pid --errorlog=errors.log $ cat hg.pid >> $DAEMON_PIDS $ cd .. We can clone even with the knob turned off and we'll get a treemanifest repo. $ hg clone --config experimental.treemanifest=False \ > --config experimental.changegroup3=True \ > http://localhost:$HGPORT2 deepclone requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 10 changes to 8 files updating to branch default 8 files updated, 0 files merged, 0 files removed, 0 files unresolved No server errors. $ cat deeprepo/errors.log requires got updated to include treemanifest $ cat deepclone/.hg/requires | grep treemanifest treemanifest Tree manifest revlogs exist. $ find deepclone/.hg/store/meta | sort deepclone/.hg/store/meta deepclone/.hg/store/meta/a deepclone/.hg/store/meta/a/00manifest.i deepclone/.hg/store/meta/b deepclone/.hg/store/meta/b/00manifest.i deepclone/.hg/store/meta/b/bar deepclone/.hg/store/meta/b/bar/00manifest.i deepclone/.hg/store/meta/b/bar/orange deepclone/.hg/store/meta/b/bar/orange/00manifest.i deepclone/.hg/store/meta/b/bar/orange/fly deepclone/.hg/store/meta/b/bar/orange/fly/00manifest.i deepclone/.hg/store/meta/b/foo deepclone/.hg/store/meta/b/foo/00manifest.i deepclone/.hg/store/meta/b/foo/apple deepclone/.hg/store/meta/b/foo/apple/00manifest.i deepclone/.hg/store/meta/b/foo/apple/bees deepclone/.hg/store/meta/b/foo/apple/bees/00manifest.i Verify passes. $ cd deepclone $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 8 files, 3 changesets, 10 total revisions $ cd .. mercurial-3.7.3/tests/test-eol-update.t0000644000175000017500000000514412676531525017476 0ustar mpmmpm00000000000000Test EOL update $ cat >> $HGRCPATH < [diff] > git = 1 > EOF $ seteol () { > if [ $1 = "LF" ]; then > EOL='\n' > else > EOL='\r\n' > fi > } $ makerepo () { > echo > echo "# ==== setup repository ====" > echo '% hg init' > hg init repo > cd repo > > cat > .hgeol < [patterns] > **.txt = LF > EOF > > printf "first\nsecond\nthird\n" > a.txt > hg commit --addremove -m 'LF commit' > > cat > .hgeol < [patterns] > **.txt = CRLF > EOF > > printf "first\r\nsecond\r\nthird\r\n" > a.txt > hg commit -m 'CRLF commit' > > cd .. > } $ dotest () { > seteol $1 > > echo > echo "% hg clone repo repo-$1" > hg clone --noupdate repo repo-$1 > cd repo-$1 > > cat > .hg/hgrc < [extensions] > eol = > EOF > > hg update > > echo '% a.txt (before)' > cat a.txt > > printf "first${EOL}third${EOL}" > a.txt > > echo '% a.txt (after)' > cat a.txt > echo '% hg diff' > hg diff > > echo '% hg update 0' > hg update 0 > > echo '% a.txt' > cat a.txt > echo '% hg diff' > hg diff > > > cd .. > rm -r repo-$1 > } $ makerepo # ==== setup repository ==== % hg init adding .hgeol adding a.txt $ dotest LF % hg clone repo repo-LF 2 files updated, 0 files merged, 0 files removed, 0 files unresolved % a.txt (before) first\r (esc) second\r (esc) third\r (esc) % a.txt (after) first third % hg diff diff --git a/a.txt b/a.txt --- a/a.txt +++ b/a.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) % hg update 0 merging a.txt 1 files updated, 1 files merged, 0 files removed, 0 files unresolved % a.txt first third % hg diff diff --git a/a.txt b/a.txt --- a/a.txt +++ b/a.txt @@ -1,3 +1,2 @@ first -second third $ dotest CRLF % hg clone repo repo-CRLF 2 files updated, 0 files merged, 0 files removed, 0 files unresolved % a.txt (before) first\r (esc) second\r (esc) third\r (esc) % a.txt (after) first\r (esc) third\r (esc) % hg diff diff --git a/a.txt b/a.txt --- a/a.txt +++ b/a.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) % hg update 0 merging a.txt 1 files updated, 1 files merged, 0 files removed, 0 files unresolved % a.txt first third % hg diff diff --git a/a.txt b/a.txt --- a/a.txt +++ b/a.txt @@ -1,3 +1,2 @@ first -second third $ rm -r repo mercurial-3.7.3/tests/test-status-color.t0000644000175000017500000003046512676531525020102 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [extensions] > color = > [color] > mode = ansi > EOF Terminfo codes compatibility fix $ echo "color.none=0" >> $HGRCPATH $ hg init repo1 $ cd repo1 $ mkdir a b a/1 b/1 b/2 $ touch in_root a/in_a b/in_b a/1/in_a_1 b/1/in_b_1 b/2/in_b_2 hg status in repo root: $ hg status --color=always \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc) $ hg status --color=debug [status.unknown|? ][status.unknown|a/1/in_a_1] [status.unknown|? ][status.unknown|a/in_a] [status.unknown|? ][status.unknown|b/1/in_b_1] [status.unknown|? ][status.unknown|b/2/in_b_2] [status.unknown|? ][status.unknown|b/in_b] [status.unknown|? ][status.unknown|in_root] hg status . in repo root: $ hg status --color=always . \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc) $ hg status --color=always --cwd a \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc) $ hg status --color=always --cwd a . \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m1/in_a_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_a\x1b[0m (esc) $ hg status --color=always --cwd a .. \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m1/in_a_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_a\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../b/1/in_b_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../b/2/in_b_2\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../b/in_b\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../in_root\x1b[0m (esc) $ hg status --color=always --cwd b \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc) $ hg status --color=always --cwd b . \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m1/in_b_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m2/in_b_2\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_b\x1b[0m (esc) $ hg status --color=always --cwd b .. \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../a/1/in_a_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../a/in_a\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m1/in_b_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m2/in_b_2\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_b\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../in_root\x1b[0m (esc) $ hg status --color=always --cwd a/1 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc) $ hg status --color=always --cwd a/1 . \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_a_1\x1b[0m (esc) $ hg status --color=always --cwd a/1 .. \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_a_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../in_a\x1b[0m (esc) $ hg status --color=always --cwd b/1 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc) $ hg status --color=always --cwd b/1 . \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_b_1\x1b[0m (esc) $ hg status --color=always --cwd b/1 .. \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_b_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../2/in_b_2\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../in_b\x1b[0m (esc) $ hg status --color=always --cwd b/2 \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/1/in_a_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4ma/in_a\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/1/in_b_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/2/in_b_2\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mb/in_b\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_root\x1b[0m (esc) $ hg status --color=always --cwd b/2 . \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_b_2\x1b[0m (esc) $ hg status --color=always --cwd b/2 .. \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../1/in_b_1\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4min_b_2\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4m../in_b\x1b[0m (esc) Make sure --color=never works $ hg status --color=never ? a/1/in_a_1 ? a/in_a ? b/1/in_b_1 ? b/2/in_b_2 ? b/in_b ? in_root Make sure ui.formatted=False works $ hg status --config ui.formatted=False ? a/1/in_a_1 ? a/in_a ? b/1/in_b_1 ? b/2/in_b_2 ? b/in_b ? in_root $ cd .. $ hg init repo2 $ cd repo2 $ touch modified removed deleted ignored $ echo "^ignored$" > .hgignore $ hg ci -A -m 'initial checkin' adding .hgignore adding deleted adding modified adding removed $ hg log --color=debug [log.changeset changeset.draft|changeset: 0:389aef86a55e] [log.tag|tag: tip] [log.user|user: test] [log.date|date: Thu Jan 01 00:00:00 1970 +0000] [log.summary|summary: initial checkin] Labels on empty strings should not be displayed, labels on custom templates should be. $ hg log --color=debug -T '{label("my.label",author)}\n{label("skipped.label","")}' [my.label|test] $ touch modified added unknown ignored $ hg add added $ hg remove removed $ rm deleted hg status: $ hg status --color=always \x1b[0;32;1mA \x1b[0m\x1b[0;32;1madded\x1b[0m (esc) \x1b[0;31;1mR \x1b[0m\x1b[0;31;1mremoved\x1b[0m (esc) \x1b[0;36;1;4m! \x1b[0m\x1b[0;36;1;4mdeleted\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4munknown\x1b[0m (esc) hg status modified added removed deleted unknown never-existed ignored: $ hg status --color=always modified added removed deleted unknown never-existed ignored never-existed: * (glob) \x1b[0;32;1mA \x1b[0m\x1b[0;32;1madded\x1b[0m (esc) \x1b[0;31;1mR \x1b[0m\x1b[0;31;1mremoved\x1b[0m (esc) \x1b[0;36;1;4m! \x1b[0m\x1b[0;36;1;4mdeleted\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4munknown\x1b[0m (esc) $ hg copy modified copied hg status -C: $ hg status --color=always -C \x1b[0;32;1mA \x1b[0m\x1b[0;32;1madded\x1b[0m (esc) \x1b[0;32;1mA \x1b[0m\x1b[0;32;1mcopied\x1b[0m (esc) \x1b[0;0m modified\x1b[0m (esc) \x1b[0;31;1mR \x1b[0m\x1b[0;31;1mremoved\x1b[0m (esc) \x1b[0;36;1;4m! \x1b[0m\x1b[0;36;1;4mdeleted\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4munknown\x1b[0m (esc) hg status -A: $ hg status --color=always -A \x1b[0;32;1mA \x1b[0m\x1b[0;32;1madded\x1b[0m (esc) \x1b[0;32;1mA \x1b[0m\x1b[0;32;1mcopied\x1b[0m (esc) \x1b[0;0m modified\x1b[0m (esc) \x1b[0;31;1mR \x1b[0m\x1b[0;31;1mremoved\x1b[0m (esc) \x1b[0;36;1;4m! \x1b[0m\x1b[0;36;1;4mdeleted\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4munknown\x1b[0m (esc) \x1b[0;30;1mI \x1b[0m\x1b[0;30;1mignored\x1b[0m (esc) \x1b[0;0mC \x1b[0m\x1b[0;0m.hgignore\x1b[0m (esc) \x1b[0;0mC \x1b[0m\x1b[0;0mmodified\x1b[0m (esc) hg status -A (with terminfo color): #if tic $ mkdir "$TESTTMP/terminfo" $ TERMINFO="$TESTTMP/terminfo" tic "$TESTDIR/hgterm.ti" $ TERM=hgterm TERMINFO="$TESTTMP/terminfo" hg status --config color.mode=terminfo --color=always -A \x1b[30m\x1b[32m\x1b[1mA \x1b[30m\x1b[30m\x1b[32m\x1b[1madded\x1b[30m (esc) \x1b[30m\x1b[32m\x1b[1mA \x1b[30m\x1b[30m\x1b[32m\x1b[1mcopied\x1b[30m (esc) \x1b[30m\x1b[30m modified\x1b[30m (esc) \x1b[30m\x1b[31m\x1b[1mR \x1b[30m\x1b[30m\x1b[31m\x1b[1mremoved\x1b[30m (esc) \x1b[30m\x1b[36m\x1b[1m\x1b[4m! \x1b[30m\x1b[30m\x1b[36m\x1b[1m\x1b[4mdeleted\x1b[30m (esc) \x1b[30m\x1b[35m\x1b[1m\x1b[4m? \x1b[30m\x1b[30m\x1b[35m\x1b[1m\x1b[4munknown\x1b[30m (esc) \x1b[30m\x1b[30m\x1b[1mI \x1b[30m\x1b[30m\x1b[30m\x1b[1mignored\x1b[30m (esc) \x1b[30m\x1b[30mC \x1b[30m\x1b[30m\x1b[30m.hgignore\x1b[30m (esc) \x1b[30m\x1b[30mC \x1b[30m\x1b[30m\x1b[30mmodified\x1b[30m (esc) #endif $ echo "^ignoreddir$" > .hgignore $ mkdir ignoreddir $ touch ignoreddir/file hg status ignoreddir/file: $ hg status --color=always ignoreddir/file hg status -i ignoreddir/file: $ hg status --color=always -i ignoreddir/file \x1b[0;30;1mI \x1b[0m\x1b[0;30;1mignoreddir/file\x1b[0m (esc) $ cd .. check 'status -q' and some combinations $ hg init repo3 $ cd repo3 $ touch modified removed deleted ignored $ echo "^ignored$" > .hgignore $ hg commit -A -m 'initial checkin' adding .hgignore adding deleted adding modified adding removed $ touch added unknown ignored $ hg add added $ echo "test" >> modified $ hg remove removed $ rm deleted $ hg copy modified copied test unknown color $ hg --config color.status.modified=periwinkle status --color=always ignoring unknown color/effect 'periwinkle' (configured in color.status.modified) M modified \x1b[0;32;1mA \x1b[0m\x1b[0;32;1madded\x1b[0m (esc) \x1b[0;32;1mA \x1b[0m\x1b[0;32;1mcopied\x1b[0m (esc) \x1b[0;31;1mR \x1b[0m\x1b[0;31;1mremoved\x1b[0m (esc) \x1b[0;36;1;4m! \x1b[0m\x1b[0;36;1;4mdeleted\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4munknown\x1b[0m (esc) Run status with 2 different flags. Check if result is the same or different. If result is not as expected, raise error $ assert() { > hg status --color=always $1 > ../a > hg status --color=always $2 > ../b > if diff ../a ../b > /dev/null; then > out=0 > else > out=1 > fi > if [ $3 -eq 0 ]; then > df="same" > else > df="different" > fi > if [ $out -ne $3 ]; then > echo "Error on $1 and $2, should be $df." > fi > } assert flag1 flag2 [0-same | 1-different] $ assert "-q" "-mard" 0 $ assert "-A" "-marduicC" 0 $ assert "-qA" "-mardcC" 0 $ assert "-qAui" "-A" 0 $ assert "-qAu" "-marducC" 0 $ assert "-qAi" "-mardicC" 0 $ assert "-qu" "-u" 0 $ assert "-q" "-u" 1 $ assert "-m" "-a" 1 $ assert "-r" "-d" 1 $ cd .. test 'resolve -l' $ hg init repo4 $ cd repo4 $ echo "file a" > a $ echo "file b" > b $ hg add a b $ hg commit -m "initial" $ echo "file a change 1" > a $ echo "file b change 1" > b $ hg commit -m "head 1" $ hg update 0 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "file a change 2" > a $ echo "file b change 2" > b $ hg commit -m "head 2" created new head $ hg merge merging a merging b warning: conflicts while merging a! (edit, then use 'hg resolve --mark') warning: conflicts while merging b! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 2 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg resolve -m b hg resolve with one unresolved, one resolved: $ hg resolve --color=always -l \x1b[0;31;1mU \x1b[0m\x1b[0;31;1ma\x1b[0m (esc) \x1b[0;32;1mR \x1b[0m\x1b[0;32;1mb\x1b[0m (esc) color coding of error message with current availability of curses $ hg unknowncommand > /dev/null hg: unknown command 'unknowncommand' [255] color coding of error message without curses $ echo 'raise ImportError' > curses.py $ PYTHONPATH=`pwd`:$PYTHONPATH hg unknowncommand > /dev/null hg: unknown command 'unknowncommand' [255] $ cd .. mercurial-3.7.3/tests/test-issue1175.t0000644000175000017500000000405612676531525017106 0ustar mpmmpm00000000000000https://bz.mercurial-scm.org/1175 $ hg init $ touch a $ hg ci -Am0 adding a $ hg mv a a1 $ hg ci -m1 $ hg co 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg mv a a2 $ hg up note: possible conflict - a was renamed multiple times to: a2 a1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg ci -m2 $ touch a $ hg ci -Am3 adding a $ hg mv a b $ hg ci -Am4 a $ hg ci --debug --traceback -Am5 b committing files: b warning: can't find ancestor for 'b' copied from 'a'! committing manifest committing changelog committed changeset 5:83a687e8a97c80992ba385bbfd766be181bfb1d1 $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 6 changesets, 4 total revisions $ hg export --git tip # HG changeset patch # User test # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID 83a687e8a97c80992ba385bbfd766be181bfb1d1 # Parent 1d1625283f71954f21d14c3d44d0ad3c019c597f 5 diff --git a/b b/b new file mode 100644 http://bz.selenic.com/show_bug.cgi?id=4476 $ hg init foo $ cd foo $ touch a && hg ci -Aqm a $ hg mv a b $ echo b1 >> b $ hg ci -Aqm b1 $ hg up 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg mv a b $ echo b2 >> b $ hg ci -Aqm b2 $ hg graft 1 grafting 1:5974126fad84 "b1" merging b warning: conflicts while merging b! (edit, then use 'hg resolve --mark') abort: unresolved conflicts, can't continue (use hg resolve and hg graft --continue) [255] $ echo a > b $ echo b3 >> b $ hg resolve --mark b (no more unresolved files) continue: hg graft --continue $ hg graft --continue grafting 1:5974126fad84 "b1" warning: can't find ancestor for 'b' copied from 'a'! $ hg log -f b -T 'changeset: {rev}:{node|short}\nsummary: {desc}\n\n' changeset: 3:376d30ccffc0 summary: b1 changeset: 2:416baaa2e5e4 summary: b2 changeset: 0:3903775176ed summary: a mercurial-3.7.3/tests/test-nested-repo.t0000644000175000017500000000111312676531525017654 0ustar mpmmpm00000000000000 $ hg init a $ cd a $ hg init b $ echo x > b/x Should print nothing: $ hg add b $ hg st $ echo y > b/y $ hg st Should fail: $ hg st b/x abort: path 'b/x' is inside nested repo 'b' (glob) [255] $ hg add b/x abort: path 'b/x' is inside nested repo 'b' (glob) [255] Should fail: $ hg add b b/x abort: path 'b/x' is inside nested repo 'b' (glob) [255] $ hg st Should arguably print nothing: $ hg st b $ echo a > a $ hg ci -Ama a Should fail: $ hg mv a b abort: path 'b/a' is inside nested repo 'b' (glob) [255] $ hg st $ cd .. mercurial-3.7.3/tests/test-blackbox.t0000644000175000017500000001046612676531525017227 0ustar mpmmpm00000000000000setup $ cat >> $HGRCPATH < [extensions] > blackbox= > mock=$TESTDIR/mockblackbox.py > mq= > EOF $ hg init blackboxtest $ cd blackboxtest command, exit codes, and duration $ echo a > a $ hg add a $ hg blackbox 1970/01/01 00:00:00 bob (*)> add a (glob) 1970/01/01 00:00:00 bob (*)> add a exited 0 after * seconds (glob) incoming change tracking create two heads to verify that we only see one change in the log later $ hg commit -ma $ hg up null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo b > b $ hg commit -Amb adding b created new head clone, commit, pull $ hg clone . ../blackboxtest2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo c > c $ hg commit -Amc adding c $ cd ../blackboxtest2 $ hg pull pulling from $TESTTMP/blackboxtest (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) $ hg blackbox -l 5 1970/01/01 00:00:00 bob (*)> pull (glob) 1970/01/01 00:00:00 bob (*)> updated served branch cache in ?.???? seconds (glob) 1970/01/01 00:00:00 bob (*)> wrote served branch cache with 1 labels and 2 nodes (glob) 1970/01/01 00:00:00 bob (*)> 1 incoming changes - new heads: d02f48003e62 (glob) 1970/01/01 00:00:00 bob (*)> pull exited 0 after * seconds (glob) we must not cause a failure if we cannot write to the log $ hg rollback repository tip rolled back to revision 1 (undo pull) #if unix-permissions no-root $ chmod 000 .hg/blackbox.log $ hg --debug incoming warning: cannot write to blackbox.log: Permission denied comparing with $TESTTMP/blackboxtest (glob) query 1; heads searching for changes all local heads known remotely changeset: 2:d02f48003e62c24e2659d97d30f2a83abe5d5d51 tag: tip phase: draft parent: 1:6563da9dcf87b1949716e38ff3e3dfaa3198eb06 parent: -1:0000000000000000000000000000000000000000 manifest: 2:ab9d46b053ebf45b7996f2922b9893ff4b63d892 user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: c extra: branch=default description: c #endif $ hg pull pulling from $TESTTMP/blackboxtest (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) a failure reading from the log is fine #if unix-permissions no-root $ hg blackbox -l 3 abort: Permission denied: $TESTTMP/blackboxtest2/.hg/blackbox.log [255] $ chmod 600 .hg/blackbox.log #endif backup bundles get logged $ touch d $ hg commit -Amd adding d created new head $ hg strip tip 0 files updated, 0 files merged, 1 files removed, 0 files unresolved saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob) $ hg blackbox -l 5 1970/01/01 00:00:00 bob (*)> strip tip (glob) 1970/01/01 00:00:00 bob (*)> saved backup bundle to $TESTTMP/blackboxtest2/.hg/strip-backup/*-backup.hg (glob) 1970/01/01 00:00:00 bob (*)> updated base branch cache in ?.???? seconds (glob) 1970/01/01 00:00:00 bob (*)> wrote base branch cache with 1 labels and 2 nodes (glob) 1970/01/01 00:00:00 bob (*)> strip tip exited 0 after * seconds (glob) extension and python hooks - use the eol extension for a pythonhook $ echo '[extensions]' >> .hg/hgrc $ echo 'eol=' >> .hg/hgrc $ echo '[hooks]' >> .hg/hgrc $ echo 'update = echo hooked' >> .hg/hgrc $ hg update hooked 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg blackbox -l 5 1970/01/01 00:00:00 bob (*)> update (glob) 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 0 tags (glob) 1970/01/01 00:00:00 bob (*)> pythonhook-preupdate: hgext.eol.preupdate finished in * seconds (glob) 1970/01/01 00:00:00 bob (*)> exthook-update: echo hooked finished in * seconds (glob) 1970/01/01 00:00:00 bob (*)> update exited 0 after * seconds (glob) log rotation $ echo '[blackbox]' >> .hg/hgrc $ echo 'maxsize = 20 b' >> .hg/hgrc $ echo 'maxfiles = 3' >> .hg/hgrc $ hg status $ hg status $ hg status $ hg tip -q 2:d02f48003e62 $ ls .hg/blackbox.log* .hg/blackbox.log .hg/blackbox.log.1 .hg/blackbox.log.2 cleanup $ cd .. mercurial-3.7.3/tests/test-subrepo-recursion.t0000644000175000017500000004003612676531525021124 0ustar mpmmpm00000000000000Create test repository: $ hg init repo $ cd repo $ echo x1 > x.txt $ hg init foo $ cd foo $ echo y1 > y.txt $ hg init bar $ cd bar $ echo z1 > z.txt $ cd .. $ echo 'bar = bar' > .hgsub $ cd .. $ echo 'foo = foo' > .hgsub Add files --- .hgsub files must go first to trigger subrepos: $ hg add -S .hgsub $ hg add -S foo/.hgsub $ hg add -S foo/bar adding foo/bar/z.txt (glob) $ hg add -S adding x.txt adding foo/y.txt (glob) Test recursive status without committing anything: $ hg status -S A .hgsub A foo/.hgsub A foo/bar/z.txt A foo/y.txt A x.txt Test recursive diff without committing anything: $ hg diff --nodates -S foo diff -r 000000000000 foo/.hgsub --- /dev/null +++ b/foo/.hgsub @@ -0,0 +1,1 @@ +bar = bar diff -r 000000000000 foo/y.txt --- /dev/null +++ b/foo/y.txt @@ -0,0 +1,1 @@ +y1 diff -r 000000000000 foo/bar/z.txt --- /dev/null +++ b/foo/bar/z.txt @@ -0,0 +1,1 @@ +z1 Commits: $ hg commit -m fails abort: uncommitted changes in subrepository 'foo' (use --subrepos for recursive commit) [255] The --subrepos flag overwrite the config setting: $ hg commit -m 0-0-0 --config ui.commitsubrepos=No --subrepos committing subrepository foo committing subrepository foo/bar (glob) $ cd foo $ echo y2 >> y.txt $ hg commit -m 0-1-0 $ cd bar $ echo z2 >> z.txt $ hg commit -m 0-1-1 $ cd .. $ hg commit -m 0-2-1 $ cd .. $ hg commit -m 1-2-1 Change working directory: $ echo y3 >> foo/y.txt $ echo z3 >> foo/bar/z.txt $ hg status -S M foo/bar/z.txt M foo/y.txt $ hg diff --nodates -S diff -r d254738c5f5e foo/y.txt --- a/foo/y.txt +++ b/foo/y.txt @@ -1,2 +1,3 @@ y1 y2 +y3 diff -r 9647f22de499 foo/bar/z.txt --- a/foo/bar/z.txt +++ b/foo/bar/z.txt @@ -1,2 +1,3 @@ z1 z2 +z3 Status call crossing repository boundaries: $ hg status -S foo/bar/z.txt M foo/bar/z.txt $ hg status -S -I 'foo/?.txt' M foo/y.txt $ hg status -S -I '**/?.txt' M foo/bar/z.txt M foo/y.txt $ hg diff --nodates -S -I '**/?.txt' diff -r d254738c5f5e foo/y.txt --- a/foo/y.txt +++ b/foo/y.txt @@ -1,2 +1,3 @@ y1 y2 +y3 diff -r 9647f22de499 foo/bar/z.txt --- a/foo/bar/z.txt +++ b/foo/bar/z.txt @@ -1,2 +1,3 @@ z1 z2 +z3 Status from within a subdirectory: $ mkdir dir $ cd dir $ echo a1 > a.txt $ hg status -S M foo/bar/z.txt M foo/y.txt ? dir/a.txt $ hg diff --nodates -S diff -r d254738c5f5e foo/y.txt --- a/foo/y.txt +++ b/foo/y.txt @@ -1,2 +1,3 @@ y1 y2 +y3 diff -r 9647f22de499 foo/bar/z.txt --- a/foo/bar/z.txt +++ b/foo/bar/z.txt @@ -1,2 +1,3 @@ z1 z2 +z3 Status with relative path: $ hg status -S .. M ../foo/bar/z.txt M ../foo/y.txt ? a.txt XXX: filtering lfilesrepo.status() in 3.3-rc causes these files to be listed as added instead of modified. $ hg status -S .. --config extensions.largefiles= M ../foo/bar/z.txt M ../foo/y.txt ? a.txt $ hg diff --nodates -S .. diff -r d254738c5f5e foo/y.txt --- a/foo/y.txt +++ b/foo/y.txt @@ -1,2 +1,3 @@ y1 y2 +y3 diff -r 9647f22de499 foo/bar/z.txt --- a/foo/bar/z.txt +++ b/foo/bar/z.txt @@ -1,2 +1,3 @@ z1 z2 +z3 $ cd .. Cleanup and final commit: $ rm -r dir $ hg commit --subrepos -m 2-3-2 committing subrepository foo committing subrepository foo/bar (glob) Test explicit path commands within subrepos: add/forget $ echo z1 > foo/bar/z2.txt $ hg status -S ? foo/bar/z2.txt $ hg add foo/bar/z2.txt $ hg status -S A foo/bar/z2.txt $ hg forget foo/bar/z2.txt $ hg status -S ? foo/bar/z2.txt $ hg forget foo/bar/z2.txt not removing foo/bar/z2.txt: file is already untracked (glob) [1] $ hg status -S ? foo/bar/z2.txt $ rm foo/bar/z2.txt Log with the relationships between repo and its subrepo: $ hg log --template '{rev}:{node|short} {desc}\n' 2:1326fa26d0c0 2-3-2 1:4b3c9ff4f66b 1-2-1 0:23376cbba0d8 0-0-0 $ hg -R foo log --template '{rev}:{node|short} {desc}\n' 3:65903cebad86 2-3-2 2:d254738c5f5e 0-2-1 1:8629ce7dcc39 0-1-0 0:af048e97ade2 0-0-0 $ hg -R foo/bar log --template '{rev}:{node|short} {desc}\n' 2:31ecbdafd357 2-3-2 1:9647f22de499 0-1-1 0:4904098473f9 0-0-0 Status between revisions: $ hg status -S $ hg status -S --rev 0:1 M .hgsubstate M foo/.hgsubstate M foo/bar/z.txt M foo/y.txt $ hg diff --nodates -S -I '**/?.txt' --rev 0:1 diff -r af048e97ade2 -r d254738c5f5e foo/y.txt --- a/foo/y.txt +++ b/foo/y.txt @@ -1,1 +1,2 @@ y1 +y2 diff -r 4904098473f9 -r 9647f22de499 foo/bar/z.txt --- a/foo/bar/z.txt +++ b/foo/bar/z.txt @@ -1,1 +1,2 @@ z1 +z2 Enable progress extension for archive tests: $ cp $HGRCPATH $HGRCPATH.no-progress $ cat >> $HGRCPATH < [extensions] > progress = > [progress] > disable=False > assume-tty = 1 > delay = 0 > # set changedelay really large so we don't see nested topics > changedelay = 30000 > format = topic bar number > refresh = 0 > width = 60 > EOF Test archiving to a directory tree (the doubled lines in the output only show up in the test output, not in real usage): $ hg archive --subrepos ../archive \r (no-eol) (esc) archiving [ ] 0/3\r (no-eol) (esc) archiving [=============> ] 1/3\r (no-eol) (esc) archiving [===========================> ] 2/3\r (no-eol) (esc) archiving [==========================================>] 3/3\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) archiving (foo) [ ] 0/3\r (no-eol) (esc) archiving (foo) [===========> ] 1/3\r (no-eol) (esc) archiving (foo) [=======================> ] 2/3\r (no-eol) (esc) archiving (foo) [====================================>] 3/3\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) archiving (foo/bar) [ ] 0/1\r (no-eol) (glob) (esc) archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc) \r (no-eol) (esc) $ find ../archive | sort ../archive ../archive/.hg_archival.txt ../archive/.hgsub ../archive/.hgsubstate ../archive/foo ../archive/foo/.hgsub ../archive/foo/.hgsubstate ../archive/foo/bar ../archive/foo/bar/z.txt ../archive/foo/y.txt ../archive/x.txt Test archiving to zip file (unzip output is unstable): $ hg archive --subrepos --prefix '.' ../archive.zip \r (no-eol) (esc) archiving [ ] 0/3\r (no-eol) (esc) archiving [=============> ] 1/3\r (no-eol) (esc) archiving [===========================> ] 2/3\r (no-eol) (esc) archiving [==========================================>] 3/3\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) archiving (foo) [ ] 0/3\r (no-eol) (esc) archiving (foo) [===========> ] 1/3\r (no-eol) (esc) archiving (foo) [=======================> ] 2/3\r (no-eol) (esc) archiving (foo) [====================================>] 3/3\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) archiving (foo/bar) [ ] 0/1\r (no-eol) (glob) (esc) archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc) \r (no-eol) (esc) (unzip date formating is unstable, we do not care about it and glob it out) $ unzip -l ../archive.zip Archive: ../archive.zip Length [ ]* Date [ ]* Time [ ]* Name (re) [\- ]* (re) 172 [0-9:\- ]* .hg_archival.txt (re) 10 [0-9:\- ]* .hgsub (re) 45 [0-9:\- ]* .hgsubstate (re) 3 [0-9:\- ]* x.txt (re) 10 [0-9:\- ]* foo/.hgsub (re) 45 [0-9:\- ]* foo/.hgsubstate (re) 9 [0-9:\- ]* foo/y.txt (re) 9 [0-9:\- ]* foo/bar/z.txt (re) [\- ]* (re) 303 [ ]* 8 files (re) Test archiving a revision that references a subrepo that is not yet cloned: #if hardlink $ hg clone -U . ../empty \r (no-eol) (esc) linking [ <=> ] 1\r (no-eol) (esc) linking [ <=> ] 2\r (no-eol) (esc) linking [ <=> ] 3\r (no-eol) (esc) linking [ <=> ] 4\r (no-eol) (esc) linking [ <=> ] 5\r (no-eol) (esc) linking [ <=> ] 6\r (no-eol) (esc) linking [ <=> ] 7\r (no-eol) (esc) linking [ <=> ] 8\r (no-eol) (esc) \r (no-eol) (esc) #else $ hg clone -U . ../empty \r (no-eol) (esc) linking [ <=> ] 1 (no-eol) #endif $ cd ../empty #if hardlink $ hg archive --subrepos -r tip --prefix './' ../archive.tar.gz \r (no-eol) (esc) archiving [ ] 0/3\r (no-eol) (esc) archiving [=============> ] 1/3\r (no-eol) (esc) archiving [===========================> ] 2/3\r (no-eol) (esc) archiving [==========================================>] 3/3\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) linking [ <=> ] 1\r (no-eol) (esc) linking [ <=> ] 2\r (no-eol) (esc) linking [ <=> ] 3\r (no-eol) (esc) linking [ <=> ] 4\r (no-eol) (esc) linking [ <=> ] 5\r (no-eol) (esc) linking [ <=> ] 6\r (no-eol) (esc) linking [ <=> ] 7\r (no-eol) (esc) linking [ <=> ] 8\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) archiving (foo) [ ] 0/3\r (no-eol) (esc) archiving (foo) [===========> ] 1/3\r (no-eol) (esc) archiving (foo) [=======================> ] 2/3\r (no-eol) (esc) archiving (foo) [====================================>] 3/3\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) linking [ <=> ] 1\r (no-eol) (esc) linking [ <=> ] 2\r (no-eol) (esc) linking [ <=> ] 3\r (no-eol) (esc) linking [ <=> ] 4\r (no-eol) (esc) linking [ <=> ] 5\r (no-eol) (esc) linking [ <=> ] 6\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) archiving (foo/bar) [ ] 0/1\r (no-eol) (glob) (esc) archiving (foo/bar) [================================>] 1/1\r (no-eol) (glob) (esc) \r (no-eol) (esc) cloning subrepo foo from $TESTTMP/repo/foo cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob) #else Note there's a slight output glitch on non-hardlink systems: the last "linking" progress topic never gets closed, leading to slight output corruption on that platform. $ hg archive --subrepos -r tip --prefix './' ../archive.tar.gz \r (no-eol) (esc) archiving [ ] 0/3\r (no-eol) (esc) archiving [=============> ] 1/3\r (no-eol) (esc) archiving [===========================> ] 2/3\r (no-eol) (esc) archiving [==========================================>] 3/3\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) linking [ <=> ] 1\r (no-eol) (esc) cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob) #endif Archive + subrepos uses '/' for all component separators $ tar -tzf ../archive.tar.gz | sort .hg_archival.txt .hgsub .hgsubstate foo/.hgsub foo/.hgsubstate foo/bar/z.txt foo/y.txt x.txt The newly cloned subrepos contain no working copy: $ hg -R foo summary parent: -1:000000000000 (no revision checked out) branch: default commit: (clean) update: 4 new changesets (update) Disable progress extension and cleanup: $ mv $HGRCPATH.no-progress $HGRCPATH Test archiving when there is a directory in the way for a subrepo created by archive: $ hg clone -U . ../almost-empty $ cd ../almost-empty $ mkdir foo $ echo f > foo/f $ hg archive --subrepos -r tip archive cloning subrepo foo from $TESTTMP/empty/foo abort: destination '$TESTTMP/almost-empty/foo' is not empty (in subrepo foo) (glob) [255] Clone and test outgoing: $ cd .. $ hg clone repo repo2 updating to branch default cloning subrepo foo from $TESTTMP/repo/foo cloning subrepo foo/bar from $TESTTMP/repo/foo/bar (glob) 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo2 $ hg outgoing -S comparing with $TESTTMP/repo (glob) searching for changes no changes found comparing with $TESTTMP/repo/foo searching for changes no changes found comparing with $TESTTMP/repo/foo/bar searching for changes no changes found [1] Make nested change: $ echo y4 >> foo/y.txt $ hg diff --nodates -S diff -r 65903cebad86 foo/y.txt --- a/foo/y.txt +++ b/foo/y.txt @@ -1,3 +1,4 @@ y1 y2 y3 +y4 $ hg commit --subrepos -m 3-4-2 committing subrepository foo $ hg outgoing -S comparing with $TESTTMP/repo (glob) searching for changes changeset: 3:2655b8ecc4ee tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 3-4-2 comparing with $TESTTMP/repo/foo searching for changes changeset: 4:e96193d6cb36 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 3-4-2 comparing with $TESTTMP/repo/foo/bar searching for changes no changes found Switch to original repo and setup default path: $ cd ../repo $ echo '[paths]' >> .hg/hgrc $ echo 'default = ../repo2' >> .hg/hgrc Test incoming: $ hg incoming -S comparing with $TESTTMP/repo2 (glob) searching for changes changeset: 3:2655b8ecc4ee tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 3-4-2 comparing with $TESTTMP/repo2/foo searching for changes changeset: 4:e96193d6cb36 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 3-4-2 comparing with $TESTTMP/repo2/foo/bar searching for changes no changes found $ hg incoming -S --bundle incoming.hg abort: cannot combine --bundle and --subrepos [255] Test missing subrepo: $ rm -r foo $ hg status -S warning: error "unknown revision '65903cebad86f1a84bd4f1134f62fa7dcb7a1c98'" in subrepository "foo" Issue2619: IndexError: list index out of range on hg add with subrepos The subrepo must sorts after the explicit filename. $ cd .. $ hg init test $ cd test $ hg init x $ echo abc > abc.txt $ hg ci -Am "abc" adding abc.txt $ echo "x = x" >> .hgsub $ hg add .hgsub $ touch a x/a $ hg add a x/a $ hg ci -Sm "added x" committing subrepository x $ echo abc > x/a $ hg revert --rev '.^' "set:subrepo('glob:x*')" abort: subrepository 'x' does not exist in 25ac2c9b3180! [255] $ cd .. mercurial-3.7.3/tests/mockblackbox.py0000644000175000017500000000034512676531525017304 0ustar mpmmpm00000000000000from mercurial import util def makedate(): return 0, 0 def getuser(): return 'bob' # mock the date and user apis so the output is always the same def uisetup(ui): util.makedate = makedate util.getuser = getuser mercurial-3.7.3/tests/test-issue612.t0000644000175000017500000000116012676531525017012 0ustar mpmmpm00000000000000https://bz.mercurial-scm.org/612 $ hg init $ mkdir src $ echo a > src/a.c $ hg ci -Ama adding src/a.c $ hg mv src source moving src/a.c to source/a.c (glob) $ hg ci -Ammove $ hg co -C 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo new > src/a.c $ echo compiled > src/a.o $ hg ci -mupdate created new head $ hg status ? src/a.o $ hg merge merging src/a.c and source/a.c to source/a.c 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status M source/a.c R src/a.c ? src/a.o mercurial-3.7.3/tests/test-convert-svn-branches.t0000644000175000017500000000616512676531525021512 0ustar mpmmpm00000000000000#require svn svn-bindings $ cat >> $HGRCPATH < [extensions] > convert = > EOF $ svnadmin create svn-repo $ svnadmin load -q svn-repo < "$TESTDIR/svn/branches.svndump" Convert trunk and branches $ cat > branchmap < old3 newbranch > > > EOF $ hg convert --branchmap=branchmap --datesort -r 10 svn-repo A-hg initializing destination A-hg repository scanning source... sorting... converting... 10 init projA 9 hello 8 branch trunk, remove c and dir 7 change a 6 change b 5 move and update c 4 move and update c 3 change b again 2 move to old2 1 move back to old 0 last change to a Test template keywords $ hg -R A-hg log --template '{rev} {svnuuid}{svnpath}@{svnrev}\n' 10 644ede6c-2b81-4367-9dc8-d786514f2cde/trunk@10 9 644ede6c-2b81-4367-9dc8-d786514f2cde/branches/old@9 8 644ede6c-2b81-4367-9dc8-d786514f2cde/branches/old2@8 7 644ede6c-2b81-4367-9dc8-d786514f2cde/branches/old@7 6 644ede6c-2b81-4367-9dc8-d786514f2cde/trunk@6 5 644ede6c-2b81-4367-9dc8-d786514f2cde/branches/old@6 4 644ede6c-2b81-4367-9dc8-d786514f2cde/branches/old@5 3 644ede6c-2b81-4367-9dc8-d786514f2cde/trunk@4 2 644ede6c-2b81-4367-9dc8-d786514f2cde/branches/old@3 1 644ede6c-2b81-4367-9dc8-d786514f2cde/trunk@2 0 644ede6c-2b81-4367-9dc8-d786514f2cde/trunk@1 Convert again $ hg convert --branchmap=branchmap --datesort svn-repo A-hg scanning source... sorting... converting... 0 branch trunk@1 into old3 $ cd A-hg $ hg log -G --template 'branch={branches} {rev} {desc|firstline} files: {files}\n' o branch=newbranch 11 branch trunk@1 into old3 files: | | o branch= 10 last change to a files: a | | | | o branch=old 9 move back to old files: | | | | | o branch=old2 8 move to old2 files: | | | | | o branch=old 7 change b again files: b | | | | o | branch= 6 move and update c files: b | | | | | o branch=old 5 move and update c files: c | | | | | o branch=old 4 change b files: b | | | | o | branch= 3 change a files: a | | | | | o branch=old 2 branch trunk, remove c and dir files: c | |/ | o branch= 1 hello files: a b c dir/e |/ o branch= 0 init projA files: $ hg branches newbranch 11:a6d7cc050ad1 default 10:6e2b33404495 old 9:93c4b0f99529 old2 8:b52884d7bead (inactive) $ hg tags -q tip $ cd .. Test hg failing to call itself $ HG=foobar hg convert svn-repo B-hg 2>&1 | grep abort abort: Mercurial failed to run itself, check hg executable is in PATH Convert 'trunk' to branch other than 'default' $ cat > branchmap < default hgtrunk > > > EOF $ hg convert --branchmap=branchmap --datesort -r 10 svn-repo C-hg initializing destination C-hg repository scanning source... sorting... converting... 10 init projA 9 hello 8 branch trunk, remove c and dir 7 change a 6 change b 5 move and update c 4 move and update c 3 change b again 2 move to old2 1 move back to old 0 last change to a $ cd C-hg $ hg branches --template '{branch}\n' hgtrunk old old2 $ cd .. mercurial-3.7.3/tests/test-import.t0000644000175000017500000011373412676531525016756 0ustar mpmmpm00000000000000 $ hg init a $ mkdir a/d1 $ mkdir a/d1/d2 $ echo line 1 > a/a $ echo line 1 > a/d1/d2/a $ hg --cwd a ci -Ama adding a adding d1/d2/a $ echo line 2 >> a/a $ hg --cwd a ci -u someone -d '1 0' -m'second change' import with no args: $ hg --cwd a import abort: need at least one patch to import [255] generate patches for the test $ hg --cwd a export tip > exported-tip.patch $ hg --cwd a diff -r0:1 > diffed-tip.patch import exported patch (this also tests that editor is not invoked, if the patch contains the commit message and '--edit' is not specified) $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ HGEDITOR=cat hg --cwd b import ../exported-tip.patch applying ../exported-tip.patch message and committer and date should be same $ hg --cwd b tip changeset: 1:1d4bd90af0e4 tag: tip user: someone date: Thu Jan 01 00:00:01 1970 +0000 summary: second change $ rm -r b import exported patch with external patcher (this also tests that editor is invoked, if the '--edit' is specified, regardless of the commit message in the patch) $ cat > dummypatch.py < print 'patching file a' > file('a', 'wb').write('line2\n') > EOF $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ HGEDITOR=cat hg --config ui.patch='python ../dummypatch.py' --cwd b import --edit ../exported-tip.patch applying ../exported-tip.patch second change HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: someone HG: branch 'default' HG: changed a $ cat b/a line2 $ rm -r b import of plain diff should fail without message (this also tests that editor is invoked, if the patch doesn't contain the commit message, regardless of '--edit') $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat > $TESTTMP/editor.sh < env | grep HGEDITFORM > cat \$1 > EOF $ HGEDITOR="sh $TESTTMP/editor.sh" hg --cwd b import ../diffed-tip.patch applying ../diffed-tip.patch HGEDITFORM=import.normal.normal HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: changed a abort: empty commit message [255] Test avoiding editor invocation at applying the patch with --exact, even if commit message is empty $ echo a >> b/a $ hg --cwd b commit -m ' ' $ hg --cwd b tip -T "{node}\n" d8804f3f5396d800812f579c8452796a5993bdb2 $ hg --cwd b export -o ../empty-log.diff . $ hg --cwd b update -q -C ".^1" $ hg --cwd b --config extensions.strip= strip -q tip $ HGEDITOR=cat hg --cwd b import --exact ../empty-log.diff applying ../empty-log.diff $ hg --cwd b tip -T "{node}\n" d8804f3f5396d800812f579c8452796a5993bdb2 $ rm -r b import of plain diff should be ok with message $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --cwd b import -mpatch ../diffed-tip.patch applying ../diffed-tip.patch $ rm -r b import of plain diff with specific date and user (this also tests that editor is not invoked, if '--message'/'--logfile' is specified and '--edit' is not) $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --cwd b import -mpatch -d '1 0' -u 'user@nowhere.net' ../diffed-tip.patch applying ../diffed-tip.patch $ hg -R b tip -pv changeset: 1:ca68f19f3a40 tag: tip user: user@nowhere.net date: Thu Jan 01 00:00:01 1970 +0000 files: a description: patch diff -r 80971e65b431 -r ca68f19f3a40 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -1,1 +1,2 @@ line 1 +line 2 $ rm -r b import of plain diff should be ok with --no-commit (this also tests that editor is not invoked, if '--no-commit' is specified, regardless of '--edit') $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ HGEDITOR=cat hg --cwd b import --no-commit --edit ../diffed-tip.patch applying ../diffed-tip.patch $ hg --cwd b diff --nodates diff -r 80971e65b431 a --- a/a +++ b/a @@ -1,1 +1,2 @@ line 1 +line 2 $ rm -r b import of malformed plain diff should fail $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ sed 's/1,1/foo/' < diffed-tip.patch > broken.patch $ hg --cwd b import -mpatch ../broken.patch applying ../broken.patch abort: bad hunk #1 [255] $ rm -r b hg -R repo import put the clone in a subdir - having a directory named "a" used to hide a bug. $ mkdir dir $ hg clone -r0 a dir/b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd dir $ hg -R b import ../exported-tip.patch applying ../exported-tip.patch $ cd .. $ rm -r dir import from stdin $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --cwd b import - < exported-tip.patch applying patch from stdin $ rm -r b import two patches in one stream $ hg init b $ hg --cwd a export 0:tip | hg --cwd b import - applying patch from stdin $ hg --cwd a id 1d4bd90af0e4 tip $ hg --cwd b id 1d4bd90af0e4 tip $ rm -r b override commit message $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --cwd b import -m 'override' - < exported-tip.patch applying patch from stdin $ hg --cwd b tip | grep override summary: override $ rm -r b $ cat > mkmsg.py < import email.Message, sys > msg = email.Message.Message() > patch = open(sys.argv[1], 'rb').read() > msg.set_payload('email commit message\n' + patch) > msg['Subject'] = 'email patch' > msg['From'] = 'email patcher' > file(sys.argv[2], 'wb').write(msg.as_string()) > EOF plain diff in email, subject, message body $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ python mkmsg.py diffed-tip.patch msg.patch $ hg --cwd b import ../msg.patch applying ../msg.patch $ hg --cwd b tip | grep email user: email patcher summary: email patch $ rm -r b plain diff in email, no subject, message body $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ grep -v '^Subject:' msg.patch | hg --cwd b import - applying patch from stdin $ rm -r b plain diff in email, subject, no message body $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ grep -v '^email ' msg.patch | hg --cwd b import - applying patch from stdin $ rm -r b plain diff in email, no subject, no message body, should fail $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ egrep -v '^(Subject|email)' msg.patch | hg --cwd b import - applying patch from stdin abort: empty commit message [255] $ rm -r b hg export in email, should use patch header $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ python mkmsg.py exported-tip.patch msg.patch $ cat msg.patch | hg --cwd b import - applying patch from stdin $ hg --cwd b tip | grep second summary: second change $ rm -r b subject: duplicate detection, removal of [PATCH] The '---' tests the gitsendmail handling without proper mail headers $ cat > mkmsg2.py < import email.Message, sys > msg = email.Message.Message() > patch = open(sys.argv[1], 'rb').read() > msg.set_payload('email patch\n\nnext line\n---\n' + patch) > msg['Subject'] = '[PATCH] email patch' > msg['From'] = 'email patcher' > file(sys.argv[2], 'wb').write(msg.as_string()) > EOF plain diff in email, [PATCH] subject, message body with subject $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ python mkmsg2.py diffed-tip.patch msg.patch $ cat msg.patch | hg --cwd b import - applying patch from stdin $ hg --cwd b tip --template '{desc}\n' email patch next line $ rm -r b Issue963: Parent of working dir incorrect after import of multiple patches and rollback We weren't backing up the correct dirstate file when importing many patches: import patch1 patch2; rollback $ echo line 3 >> a/a $ hg --cwd a ci -m'third change' $ hg --cwd a export -o '../patch%R' 1 2 $ hg clone -qr0 a b $ hg --cwd b parents --template 'parent: {rev}\n' parent: 0 $ hg --cwd b import -v ../patch1 ../patch2 applying ../patch1 patching file a committing files: a committing manifest committing changelog created 1d4bd90af0e4 applying ../patch2 patching file a committing files: a committing manifest committing changelog created 6d019af21222 $ hg --cwd b rollback repository tip rolled back to revision 0 (undo import) working directory now based on revision 0 $ hg --cwd b parents --template 'parent: {rev}\n' parent: 0 Test that "hg rollback" doesn't restore dirstate to one at the beginning of the rollbacked transaction in not-"parent-gone" case. invoking pretxncommit hook will cause marking '.hg/dirstate' as a file to be restored at rollbacking, after DirstateTransactionPlan (see wiki page for detail). $ hg --cwd b branch -q foobar $ hg --cwd b commit -m foobar $ hg --cwd b update 0 -q $ hg --cwd b import ../patch1 ../patch2 --config hooks.pretxncommit=true applying ../patch1 applying ../patch2 $ hg --cwd b update -q 1 $ hg --cwd b rollback -q $ hg --cwd b parents --template 'parent: {rev}\n' parent: 1 $ hg --cwd b update -q -C 0 $ hg --cwd b --config extensions.strip= strip -q 1 Test visibility of in-memory distate changes inside transaction to external process $ echo foo > a/foo $ hg --cwd a commit -A -m 'adding foo' foo $ hg --cwd a export -o '../patch%R' 3 $ cat > $TESTTMP/checkvisibility.sh < echo "====" > hg parents --template "VISIBLE {rev}:{node|short}\n" > hg status -amr > # test that pending changes are hidden > unset HG_PENDING > hg parents --template "ACTUAL {rev}:{node|short}\n" > hg status -amr > echo "====" > EOF == test visibility to external editor $ (cd b && sh "$TESTTMP/checkvisibility.sh") ==== VISIBLE 0:80971e65b431 ACTUAL 0:80971e65b431 ==== $ HGEDITOR="sh $TESTTMP/checkvisibility.sh" hg --cwd b import -v --edit ../patch1 ../patch2 ../patch3 applying ../patch1 patching file a ==== VISIBLE 0:80971e65b431 M a ACTUAL 0:80971e65b431 M a ==== committing files: a committing manifest committing changelog created 1d4bd90af0e4 applying ../patch2 patching file a ==== VISIBLE 1:1d4bd90af0e4 M a ACTUAL 0:80971e65b431 M a ==== committing files: a committing manifest committing changelog created 6d019af21222 applying ../patch3 patching file foo adding foo ==== VISIBLE 2:6d019af21222 A foo ACTUAL 0:80971e65b431 M a ==== committing files: foo committing manifest committing changelog created 55e3f75b2378 $ hg --cwd b rollback -q (content of file "a" is already changed and it should be recognized as "M", even though dirstate is restored to one before "hg import") $ (cd b && sh "$TESTTMP/checkvisibility.sh") ==== VISIBLE 0:80971e65b431 M a ACTUAL 0:80971e65b431 M a ==== $ hg --cwd b revert --no-backup a $ rm -f b/foo == test visibility to precommit external hook $ cat >> b/.hg/hgrc < [hooks] > precommit.visibility = sh $TESTTMP/checkvisibility.sh > EOF $ (cd b && sh "$TESTTMP/checkvisibility.sh") ==== VISIBLE 0:80971e65b431 ACTUAL 0:80971e65b431 ==== $ hg --cwd b import ../patch1 ../patch2 ../patch3 applying ../patch1 ==== VISIBLE 0:80971e65b431 M a ACTUAL 0:80971e65b431 M a ==== applying ../patch2 ==== VISIBLE 1:1d4bd90af0e4 M a ACTUAL 0:80971e65b431 M a ==== applying ../patch3 ==== VISIBLE 2:6d019af21222 A foo ACTUAL 0:80971e65b431 M a ==== $ hg --cwd b rollback -q $ (cd b && sh "$TESTTMP/checkvisibility.sh") ==== VISIBLE 0:80971e65b431 M a ACTUAL 0:80971e65b431 M a ==== $ hg --cwd b revert --no-backup a $ rm -f b/foo $ cat >> b/.hg/hgrc < [hooks] > precommit.visibility = > EOF == test visibility to pretxncommit external hook $ cat >> b/.hg/hgrc < [hooks] > pretxncommit.visibility = sh $TESTTMP/checkvisibility.sh > EOF $ (cd b && sh "$TESTTMP/checkvisibility.sh") ==== VISIBLE 0:80971e65b431 ACTUAL 0:80971e65b431 ==== $ hg --cwd b import ../patch1 ../patch2 ../patch3 applying ../patch1 ==== VISIBLE 0:80971e65b431 M a ACTUAL 0:80971e65b431 M a ==== applying ../patch2 ==== VISIBLE 1:1d4bd90af0e4 M a ACTUAL 0:80971e65b431 M a ==== applying ../patch3 ==== VISIBLE 2:6d019af21222 A foo ACTUAL 0:80971e65b431 M a ==== $ hg --cwd b rollback -q $ (cd b && sh "$TESTTMP/checkvisibility.sh") ==== VISIBLE 0:80971e65b431 M a ACTUAL 0:80971e65b431 M a ==== $ hg --cwd b revert --no-backup a $ rm -f b/foo $ cat >> b/.hg/hgrc < [hooks] > pretxncommit.visibility = > EOF $ rm -r b importing a patch in a subdirectory failed at the commit stage $ echo line 2 >> a/d1/d2/a $ hg --cwd a ci -u someoneelse -d '1 0' -m'subdir change' hg import in a subdirectory $ hg clone -r0 a b adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --cwd a export tip > tmp $ sed -e 's/d1\/d2\///' < tmp > subdir-tip.patch $ dir=`pwd` $ cd b/d1/d2 2>&1 > /dev/null $ hg import ../../../subdir-tip.patch applying ../../../subdir-tip.patch $ cd "$dir" message should be 'subdir change' committer should be 'someoneelse' $ hg --cwd b tip changeset: 1:3577f5aea227 tag: tip user: someoneelse date: Thu Jan 01 00:00:01 1970 +0000 summary: subdir change should be empty $ hg --cwd b status Test fuzziness (ambiguous patch location, fuzz=2) $ hg init fuzzy $ cd fuzzy $ echo line1 > a $ echo line0 >> a $ echo line3 >> a $ hg ci -Am adda adding a $ echo line1 > a $ echo line2 >> a $ echo line0 >> a $ echo line3 >> a $ hg ci -m change a $ hg export tip > fuzzy-tip.patch $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo line1 > a $ echo line0 >> a $ echo line1 >> a $ echo line0 >> a $ hg ci -m brancha created new head $ hg import --config patch.fuzz=0 -v fuzzy-tip.patch applying fuzzy-tip.patch patching file a Hunk #1 FAILED at 0 1 out of 1 hunks FAILED -- saving rejects to file a.rej abort: patch failed to apply [255] $ hg import --no-commit -v fuzzy-tip.patch applying fuzzy-tip.patch patching file a Hunk #1 succeeded at 2 with fuzz 1 (offset 0 lines). applied to working directory $ hg revert -a reverting a import with --no-commit should have written .hg/last-message.txt $ cat .hg/last-message.txt change (no-eol) test fuzziness with eol=auto $ hg --config patch.eol=auto import --no-commit -v fuzzy-tip.patch applying fuzzy-tip.patch patching file a Hunk #1 succeeded at 2 with fuzz 1 (offset 0 lines). applied to working directory $ cd .. Test hunk touching empty files (issue906) $ hg init empty $ cd empty $ touch a $ touch b1 $ touch c1 $ echo d > d $ hg ci -Am init adding a adding b1 adding c1 adding d $ echo a > a $ echo b > b1 $ hg mv b1 b2 $ echo c > c1 $ hg copy c1 c2 $ rm d $ touch d $ hg diff --git diff --git a/a b/a --- a/a +++ b/a @@ -0,0 +1,1 @@ +a diff --git a/b1 b/b2 rename from b1 rename to b2 --- a/b1 +++ b/b2 @@ -0,0 +1,1 @@ +b diff --git a/c1 b/c1 --- a/c1 +++ b/c1 @@ -0,0 +1,1 @@ +c diff --git a/c1 b/c2 copy from c1 copy to c2 --- a/c1 +++ b/c2 @@ -0,0 +1,1 @@ +c diff --git a/d b/d --- a/d +++ b/d @@ -1,1 +0,0 @@ -d $ hg ci -m empty $ hg export --git tip > empty.diff $ hg up -C 0 4 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg import empty.diff applying empty.diff $ for name in a b1 b2 c1 c2 d; do > echo % $name file > test -f $name && cat $name > done % a file a % b1 file % b2 file b % c1 file c % c2 file c % d file $ cd .. Test importing a patch ending with a binary file removal $ hg init binaryremoval $ cd binaryremoval $ echo a > a $ $PYTHON -c "file('b', 'wb').write('a\x00b')" $ hg ci -Am addall adding a adding b $ hg rm a $ hg rm b $ hg st R a R b $ hg ci -m remove $ hg export --git . > remove.diff $ cat remove.diff | grep git diff --git a/a b/a diff --git a/b b/b $ hg up -C 0 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg import remove.diff applying remove.diff $ hg manifest $ cd .. Issue927: test update+rename with common name $ hg init t $ cd t $ touch a $ hg ci -Am t adding a $ echo a > a Here, bfile.startswith(afile) $ hg copy a a2 $ hg ci -m copya $ hg export --git tip > copy.diff $ hg up -C 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg import copy.diff applying copy.diff a should contain an 'a' $ cat a a and a2 should have duplicated it $ cat a2 a $ cd .. test -p0 $ hg init p0 $ cd p0 $ echo a > a $ hg ci -Am t adding a $ hg import -p foo abort: invalid value 'foo' for option -p, expected int [255] $ hg import -p0 - << EOF > foobar > --- a Sat Apr 12 22:43:58 2008 -0400 > +++ a Sat Apr 12 22:44:05 2008 -0400 > @@ -1,1 +1,1 @@ > -a > +bb > EOF applying patch from stdin $ hg status $ cat a bb test --prefix $ mkdir -p dir/dir2 $ echo b > dir/dir2/b $ hg ci -Am b adding dir/dir2/b $ hg import -p2 --prefix dir - << EOF > foobar > --- drop1/drop2/dir2/b > +++ drop1/drop2/dir2/b > @@ -1,1 +1,1 @@ > -b > +cc > EOF applying patch from stdin $ hg status $ cat dir/dir2/b cc $ cd .. test paths outside repo root $ mkdir outside $ touch outside/foo $ hg init inside $ cd inside $ hg import - < diff --git a/a b/b > rename from ../outside/foo > rename to bar > EOF applying patch from stdin abort: path contains illegal component: ../outside/foo (glob) [255] $ cd .. test import with similarity and git and strip (issue295 et al.) $ hg init sim $ cd sim $ echo 'this is a test' > a $ hg ci -Ama adding a $ cat > ../rename.diff < diff --git a/foo/a b/foo/a > deleted file mode 100644 > --- a/foo/a > +++ /dev/null > @@ -1,1 +0,0 @@ > -this is a test > diff --git a/foo/b b/foo/b > new file mode 100644 > --- /dev/null > +++ b/foo/b > @@ -0,0 +1,2 @@ > +this is a test > +foo > EOF $ hg import --no-commit -v -s 1 ../rename.diff -p2 applying ../rename.diff patching file a patching file b adding b recording removal of a as rename to b (88% similar) applied to working directory $ hg st -C A b a R a $ hg revert -a undeleting a forgetting b $ rm b $ hg import --no-commit -v -s 100 ../rename.diff -p2 applying ../rename.diff patching file a patching file b adding b applied to working directory $ hg st -C A b R a $ cd .. Issue1495: add empty file from the end of patch $ hg init addemptyend $ cd addemptyend $ touch a $ hg addremove adding a $ hg ci -m "commit" $ cat > a.patch < add a, b > diff --git a/a b/a > --- a/a > +++ b/a > @@ -0,0 +1,1 @@ > +a > diff --git a/b b/b > new file mode 100644 > EOF $ hg import --no-commit a.patch applying a.patch apply a good patch followed by an empty patch (mainly to ensure that dirstate is *not* updated when import crashes) $ hg update -q -C . $ rm b $ touch empty.patch $ hg import a.patch empty.patch applying a.patch applying empty.patch transaction abort! rollback completed abort: empty.patch: no diffs found [255] $ hg tip --template '{rev} {desc|firstline}\n' 0 commit $ hg -q status M a $ cd .. create file when source is not /dev/null $ cat > create.patch < diff -Naur proj-orig/foo proj-new/foo > --- proj-orig/foo 1969-12-31 16:00:00.000000000 -0800 > +++ proj-new/foo 2009-07-17 16:50:45.801368000 -0700 > @@ -0,0 +1,1 @@ > +a > EOF some people have patches like the following too $ cat > create2.patch < diff -Naur proj-orig/foo proj-new/foo > --- proj-orig/foo.orig 1969-12-31 16:00:00.000000000 -0800 > +++ proj-new/foo 2009-07-17 16:50:45.801368000 -0700 > @@ -0,0 +1,1 @@ > +a > EOF $ hg init oddcreate $ cd oddcreate $ hg import --no-commit ../create.patch applying ../create.patch $ cat foo a $ rm foo $ hg revert foo $ hg import --no-commit ../create2.patch applying ../create2.patch $ cat foo a $ cd .. Issue1859: first line mistaken for email headers $ hg init emailconfusion $ cd emailconfusion $ cat > a.patch < module: summary > > description > > > diff -r 000000000000 -r 9b4c1e343b55 test.txt > --- /dev/null > +++ b/a > @@ -0,0 +1,1 @@ > +a > EOF $ hg import -d '0 0' a.patch applying a.patch $ hg parents -v changeset: 0:5a681217c0ad tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: a description: module: summary description $ cd .. in commit message $ hg init commitconfusion $ cd commitconfusion $ cat > a.patch < module: summary > > --- description > > diff --git a/a b/a > new file mode 100644 > --- /dev/null > +++ b/a > @@ -0,0 +1,1 @@ > +a > EOF > hg import -d '0 0' a.patch > hg parents -v > cd .. > > echo '% tricky header splitting' > cat > trickyheaders.patch < From: User A > Subject: [PATCH] from: tricky! > > # HG changeset patch > # User User B > # Date 1266264441 18000 > # Branch stable > # Node ID f2be6a1170ac83bf31cb4ae0bad00d7678115bc0 > # Parent 0000000000000000000000000000000000000000 > from: tricky! > > That is not a header. > > diff -r 000000000000 -r f2be6a1170ac foo > --- /dev/null > +++ b/foo > @@ -0,0 +1,1 @@ > +foo > EOF applying a.patch changeset: 0:f34d9187897d tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: a description: module: summary % tricky header splitting $ hg init trickyheaders $ cd trickyheaders $ hg import -d '0 0' ../trickyheaders.patch applying ../trickyheaders.patch $ hg export --git tip # HG changeset patch # User User B # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID eb56ab91903632294ac504838508cb370c0901d2 # Parent 0000000000000000000000000000000000000000 from: tricky! That is not a header. diff --git a/foo b/foo new file mode 100644 --- /dev/null +++ b/foo @@ -0,0 +1,1 @@ +foo $ cd .. Issue2102: hg export and hg import speak different languages $ hg init issue2102 $ cd issue2102 $ mkdir -p src/cmd/gc $ touch src/cmd/gc/mksys.bash $ hg ci -Am init adding src/cmd/gc/mksys.bash $ hg import - < # HG changeset patch > # User Rob Pike > # Date 1216685449 25200 > # Node ID 03aa2b206f499ad6eb50e6e207b9e710d6409c98 > # Parent 93d10138ad8df586827ca90b4ddb5033e21a3a84 > help management of empty pkg and lib directories in perforce > > R=gri > DELTA=4 (4 added, 0 deleted, 0 changed) > OCL=13328 > CL=13328 > > diff --git a/lib/place-holder b/lib/place-holder > new file mode 100644 > --- /dev/null > +++ b/lib/place-holder > @@ -0,0 +1,2 @@ > +perforce does not maintain empty directories. > +this file helps. > diff --git a/pkg/place-holder b/pkg/place-holder > new file mode 100644 > --- /dev/null > +++ b/pkg/place-holder > @@ -0,0 +1,2 @@ > +perforce does not maintain empty directories. > +this file helps. > diff --git a/src/cmd/gc/mksys.bash b/src/cmd/gc/mksys.bash > old mode 100644 > new mode 100755 > EOF applying patch from stdin #if execbit $ hg sum parent: 1:d59915696727 tip help management of empty pkg and lib directories in perforce branch: default commit: (clean) update: (current) phases: 2 draft $ hg diff --git -c tip diff --git a/lib/place-holder b/lib/place-holder new file mode 100644 --- /dev/null +++ b/lib/place-holder @@ -0,0 +1,2 @@ +perforce does not maintain empty directories. +this file helps. diff --git a/pkg/place-holder b/pkg/place-holder new file mode 100644 --- /dev/null +++ b/pkg/place-holder @@ -0,0 +1,2 @@ +perforce does not maintain empty directories. +this file helps. diff --git a/src/cmd/gc/mksys.bash b/src/cmd/gc/mksys.bash old mode 100644 new mode 100755 #else $ hg sum parent: 1:28f089cc9ccc tip help management of empty pkg and lib directories in perforce branch: default commit: (clean) update: (current) phases: 2 draft $ hg diff --git -c tip diff --git a/lib/place-holder b/lib/place-holder new file mode 100644 --- /dev/null +++ b/lib/place-holder @@ -0,0 +1,2 @@ +perforce does not maintain empty directories. +this file helps. diff --git a/pkg/place-holder b/pkg/place-holder new file mode 100644 --- /dev/null +++ b/pkg/place-holder @@ -0,0 +1,2 @@ +perforce does not maintain empty directories. +this file helps. /* The mode change for mksys.bash is missing here, because on platforms */ /* that don't support execbits, mode changes in patches are ignored when */ /* they are imported. This is obviously also the reason for why the hash */ /* in the created changeset is different to the one you see above the */ /* #else clause */ #endif $ cd .. diff lines looking like headers $ hg init difflineslikeheaders $ cd difflineslikeheaders $ echo a >a $ echo b >b $ echo c >c $ hg ci -Am1 adding a adding b adding c $ echo "key: value" >>a $ echo "key: value" >>b $ echo "foo" >>c $ hg ci -m2 $ hg up -C 0 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg diff --git -c1 >want $ hg diff -c1 | hg import --no-commit - applying patch from stdin $ hg diff --git >have $ diff want have $ cd .. import a unified diff with no lines of context (diff -U0) $ hg init diffzero $ cd diffzero $ cat > f << EOF > c2 > c4 > c5 > EOF $ hg commit -Am0 adding f $ hg import --no-commit - << EOF > # HG changeset patch > # User test > # Date 0 0 > # Node ID f4974ab632f3dee767567b0576c0ec9a4508575c > # Parent 8679a12a975b819fae5f7ad3853a2886d143d794 > 1 > diff -r 8679a12a975b -r f4974ab632f3 f > --- a/f Thu Jan 01 00:00:00 1970 +0000 > +++ b/f Thu Jan 01 00:00:00 1970 +0000 > @@ -0,0 +1,1 @@ > +c1 > @@ -1,0 +3,1 @@ > +c3 > @@ -3,1 +4,0 @@ > -c5 > EOF applying patch from stdin $ cat f c1 c2 c3 c4 $ cd .. no segfault while importing a unified diff which start line is zero but chunk size is non-zero $ hg init startlinezero $ cd startlinezero $ echo foo > foo $ hg commit -Amfoo adding foo $ hg import --no-commit - << EOF > diff a/foo b/foo > --- a/foo > +++ b/foo > @@ -0,1 +0,1 @@ > foo > EOF applying patch from stdin $ cd .. Test corner case involving fuzz and skew $ hg init morecornercases $ cd morecornercases $ cat > 01-no-context-beginning-of-file.diff < diff --git a/a b/a > --- a/a > +++ b/a > @@ -1,0 +1,1 @@ > +line > EOF $ cat > 02-no-context-middle-of-file.diff < diff --git a/a b/a > --- a/a > +++ b/a > @@ -1,1 +1,1 @@ > -2 > +add some skew > @@ -2,0 +2,1 @@ > +line > EOF $ cat > 03-no-context-end-of-file.diff < diff --git a/a b/a > --- a/a > +++ b/a > @@ -10,0 +10,1 @@ > +line > EOF $ cat > 04-middle-of-file-completely-fuzzed.diff < diff --git a/a b/a > --- a/a > +++ b/a > @@ -1,1 +1,1 @@ > -2 > +add some skew > @@ -2,2 +2,3 @@ > not matching, should fuzz > ... a bit > +line > EOF $ cat > a < 1 > 2 > 3 > 4 > EOF $ hg ci -Am adda a $ for p in *.diff; do > hg import -v --no-commit $p > cat a > hg revert -aqC a > # patch -p1 < $p > # cat a > # hg revert -aC a > done applying 01-no-context-beginning-of-file.diff patching file a applied to working directory 1 line 2 3 4 applying 02-no-context-middle-of-file.diff patching file a Hunk #1 succeeded at 2 (offset 1 lines). Hunk #2 succeeded at 4 (offset 1 lines). applied to working directory 1 add some skew 3 line 4 applying 03-no-context-end-of-file.diff patching file a Hunk #1 succeeded at 5 (offset -6 lines). applied to working directory 1 2 3 4 line applying 04-middle-of-file-completely-fuzzed.diff patching file a Hunk #1 succeeded at 2 (offset 1 lines). Hunk #2 succeeded at 5 with fuzz 2 (offset 1 lines). applied to working directory 1 add some skew 3 4 line $ cd .. Test partial application ------------------------ prepare a stack of patches depending on each other $ hg init partial $ cd partial $ cat << EOF > a > one > two > three > four > five > six > seven > EOF $ hg add a $ echo 'b' > b $ hg add b $ hg commit -m 'initial' -u Babar $ cat << EOF > a > one > two > 3 > four > five > six > seven > EOF $ hg commit -m 'three' -u Celeste $ cat << EOF > a > one > two > 3 > 4 > five > six > seven > EOF $ hg commit -m 'four' -u Rataxes $ cat << EOF > a > one > two > 3 > 4 > 5 > six > seven > EOF $ echo bb >> b $ hg commit -m 'five' -u Arthur $ echo 'Babar' > jungle $ hg add jungle $ hg ci -m 'jungle' -u Zephir $ echo 'Celeste' >> jungle $ hg ci -m 'extended jungle' -u Cornelius $ hg log -G --template '{desc|firstline} [{author}] {diffstat}\n' @ extended jungle [Cornelius] 1: +1/-0 | o jungle [Zephir] 1: +1/-0 | o five [Arthur] 2: +2/-1 | o four [Rataxes] 1: +1/-1 | o three [Celeste] 1: +1/-1 | o initial [Babar] 2: +8/-0 Importing with some success and some errors: $ hg update --rev 'desc(initial)' 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg export --rev 'desc(five)' | hg import --partial - applying patch from stdin patching file a Hunk #1 FAILED at 1 1 out of 1 hunks FAILED -- saving rejects to file a.rej patch applied partially (fix the .rej files and run `hg commit --amend`) [1] $ hg log -G --template '{desc|firstline} [{author}] {diffstat}\n' @ five [Arthur] 1: +1/-0 | | o extended jungle [Cornelius] 1: +1/-0 | | | o jungle [Zephir] 1: +1/-0 | | | o five [Arthur] 2: +2/-1 | | | o four [Rataxes] 1: +1/-1 | | | o three [Celeste] 1: +1/-1 |/ o initial [Babar] 2: +8/-0 $ hg export # HG changeset patch # User Arthur # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID 26e6446bb2526e2be1037935f5fca2b2706f1509 # Parent 8e4f0351909eae6b9cf68c2c076cb54c42b54b2e five diff -r 8e4f0351909e -r 26e6446bb252 b --- a/b Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,2 @@ b +bb $ hg status -c . C a C b $ ls a a.rej b Importing with zero success: $ hg update --rev 'desc(initial)' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg export --rev 'desc(four)' | hg import --partial - applying patch from stdin patching file a Hunk #1 FAILED at 0 1 out of 1 hunks FAILED -- saving rejects to file a.rej patch applied partially (fix the .rej files and run `hg commit --amend`) [1] $ hg log -G --template '{desc|firstline} [{author}] {diffstat}\n' @ four [Rataxes] 0: +0/-0 | | o five [Arthur] 1: +1/-0 |/ | o extended jungle [Cornelius] 1: +1/-0 | | | o jungle [Zephir] 1: +1/-0 | | | o five [Arthur] 2: +2/-1 | | | o four [Rataxes] 1: +1/-1 | | | o three [Celeste] 1: +1/-1 |/ o initial [Babar] 2: +8/-0 $ hg export # HG changeset patch # User Rataxes # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID cb9b1847a74d9ad52e93becaf14b98dbcc274e1e # Parent 8e4f0351909eae6b9cf68c2c076cb54c42b54b2e four $ hg status -c . C a C b $ ls a a.rej b Importing with unknown file: $ hg update --rev 'desc(initial)' 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg export --rev 'desc("extended jungle")' | hg import --partial - applying patch from stdin unable to find 'jungle' for patching 1 out of 1 hunks FAILED -- saving rejects to file jungle.rej patch applied partially (fix the .rej files and run `hg commit --amend`) [1] $ hg log -G --template '{desc|firstline} [{author}] {diffstat}\n' @ extended jungle [Cornelius] 0: +0/-0 | | o four [Rataxes] 0: +0/-0 |/ | o five [Arthur] 1: +1/-0 |/ | o extended jungle [Cornelius] 1: +1/-0 | | | o jungle [Zephir] 1: +1/-0 | | | o five [Arthur] 2: +2/-1 | | | o four [Rataxes] 1: +1/-1 | | | o three [Celeste] 1: +1/-1 |/ o initial [Babar] 2: +8/-0 $ hg export # HG changeset patch # User Cornelius # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID 1fb1f86bef43c5a75918178f8d23c29fb0a7398d # Parent 8e4f0351909eae6b9cf68c2c076cb54c42b54b2e extended jungle $ hg status -c . C a C b $ ls a a.rej b jungle.rej Importing multiple failing patches: $ hg update --rev 'desc(initial)' 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 'B' > b # just to make another commit $ hg commit -m "a new base" created new head $ hg export --rev 'desc("four") + desc("extended jungle")' | hg import --partial - applying patch from stdin patching file a Hunk #1 FAILED at 0 1 out of 1 hunks FAILED -- saving rejects to file a.rej patch applied partially (fix the .rej files and run `hg commit --amend`) [1] $ hg log -G --template '{desc|firstline} [{author}] {diffstat}\n' @ four [Rataxes] 0: +0/-0 | o a new base [test] 1: +1/-1 | | o extended jungle [Cornelius] 0: +0/-0 |/ | o four [Rataxes] 0: +0/-0 |/ | o five [Arthur] 1: +1/-0 |/ | o extended jungle [Cornelius] 1: +1/-0 | | | o jungle [Zephir] 1: +1/-0 | | | o five [Arthur] 2: +2/-1 | | | o four [Rataxes] 1: +1/-1 | | | o three [Celeste] 1: +1/-1 |/ o initial [Babar] 2: +8/-0 $ hg export # HG changeset patch # User Rataxes # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID a9d7b6d0ffbb4eb12b7d5939250fcd42e8930a1d # Parent f59f8d2e95a8ca5b1b4ca64320140da85f3b44fd four $ hg status -c . C a C b Importing some extra header =========================== $ cat > $TESTTMP/parseextra.py < import mercurial.patch > import mercurial.cmdutil > > def processfoo(repo, data, extra, opts): > if 'foo' in data: > extra['foo'] = data['foo'] > def postimport(ctx): > if 'foo' in ctx.extra(): > ctx.repo().ui.write('imported-foo: %s\n' % ctx.extra()['foo']) > > mercurial.patch.patchheadermap.append(('Foo', 'foo')) > mercurial.cmdutil.extrapreimport.append('foo') > mercurial.cmdutil.extrapreimportmap['foo'] = processfoo > mercurial.cmdutil.extrapostimport.append('foo') > mercurial.cmdutil.extrapostimportmap['foo'] = postimport > EOF $ cat >> $HGRCPATH < [extensions] > parseextra=$TESTTMP/parseextra.py > EOF $ hg up -C tip 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat > $TESTTMP/foo.patch < # HG changeset patch > # User Rataxes > # Date 0 0 > # Thu Jan 01 00:00:00 1970 +0000 > # Foo bar > height > > --- a/a Thu Jan 01 00:00:00 1970 +0000 > +++ b/a Wed Oct 07 09:17:44 2015 +0000 > @@ -5,3 +5,4 @@ > five > six > seven > +heigt > EOF $ hg import $TESTTMP/foo.patch applying $TESTTMP/foo.patch imported-foo: bar $ hg log --debug -r . | grep extra extra: branch=default extra: foo=bar mercurial-3.7.3/tests/bzr-definitions0000644000175000017500000000050712676531525017324 0ustar mpmmpm00000000000000# this file holds the definitions that are used in various bzr tests TERM=dumb; export TERM echo '[extensions]' >> $HGRCPATH echo 'convert = ' >> $HGRCPATH glog() { hg log -G --template '{rev}@{branch} "{desc|firstline}" files: {files}\n' "$@" } manifest() { echo "% manifest of $2" hg -R $1 manifest -v -r $2 } mercurial-3.7.3/tests/test-doctest.py0000644000175000017500000000233112676531525017264 0ustar mpmmpm00000000000000# this is hack to make sure no escape characters are inserted into the output import os, sys if 'TERM' in os.environ: del os.environ['TERM'] import doctest def testmod(name, optionflags=0, testtarget=None): __import__(name) mod = sys.modules[name] if testtarget is not None: mod = getattr(mod, testtarget) doctest.testmod(mod, optionflags=optionflags) testmod('mercurial.changegroup') testmod('mercurial.changelog') testmod('mercurial.dagparser', optionflags=doctest.NORMALIZE_WHITESPACE) testmod('mercurial.dispatch') testmod('mercurial.encoding') testmod('mercurial.hg') testmod('mercurial.hgweb.hgwebdir_mod') testmod('mercurial.match') testmod('mercurial.minirst') testmod('mercurial.patch') testmod('mercurial.pathutil') testmod('mercurial.parser') testmod('mercurial.revset') testmod('mercurial.store') testmod('mercurial.subrepo') testmod('mercurial.templatefilters') testmod('mercurial.templater') testmod('mercurial.ui') testmod('mercurial.url') testmod('mercurial.util') testmod('mercurial.util', testtarget='platform') testmod('hgext.convert.convcmd') testmod('hgext.convert.cvsps') testmod('hgext.convert.filemap') testmod('hgext.convert.p4') testmod('hgext.convert.subversion') testmod('hgext.mq') mercurial-3.7.3/tests/test-bookmarks-rebase.t0000644000175000017500000000472012676531525020665 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "rebase=" >> $HGRCPATH initialize repository $ hg init $ echo 'a' > a $ hg ci -A -m "0" adding a $ echo 'b' > b $ hg ci -A -m "1" adding b $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo 'c' > c $ hg ci -A -m "2" adding c created new head $ echo 'd' > d $ hg ci -A -m "3" adding d $ hg bookmark -r 1 one $ hg bookmark -r 3 two $ hg up -q two bookmark list $ hg bookmark one 1:925d80f479bb * two 3:2ae46b1d99a7 rebase $ hg rebase -s two -d one rebasing 3:2ae46b1d99a7 "3" (tip two) saved backup bundle to $TESTTMP/.hg/strip-backup/2ae46b1d99a7-e6b057bc-backup.hg (glob) $ hg log changeset: 3:42e5ed2cdcf4 bookmark: two tag: tip parent: 1:925d80f479bb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 3 changeset: 2:db815d6d32e6 parent: 0:f7b1eb17ad24 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 1:925d80f479bb bookmark: one user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 changeset: 0:f7b1eb17ad24 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 aborted rebase should restore active bookmark. $ hg up 1 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (leaving bookmark two) $ echo 'e' > d $ hg ci -A -m "4" adding d created new head $ hg bookmark three $ hg rebase -s three -d two rebasing 4:dd7c838e8362 "4" (tip three) merging d warning: conflicts while merging d! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] $ hg rebase --abort rebase aborted $ hg bookmark one 1:925d80f479bb * three 4:dd7c838e8362 two 3:42e5ed2cdcf4 after aborted rebase, restoring a bookmark that has been removed should not fail $ hg rebase -s three -d two rebasing 4:dd7c838e8362 "4" (tip three) merging d warning: conflicts while merging d! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] $ hg bookmark -d three $ hg rebase --abort rebase aborted $ hg bookmark one 1:925d80f479bb two 3:42e5ed2cdcf4 mercurial-3.7.3/tests/dumbhttp.py0000755000175000017500000000334312676531525016500 0ustar mpmmpm00000000000000#!/usr/bin/env python from __future__ import absolute_import """ Small and dumb HTTP server for use in tests. """ import optparse import BaseHTTPServer import signal import SimpleHTTPServer import sys from mercurial import ( cmdutil, ) OptionParser = optparse.OptionParser class simplehttpservice(object): def __init__(self, host, port): self.address = (host, port) def init(self): self.httpd = BaseHTTPServer.HTTPServer( self.address, SimpleHTTPServer.SimpleHTTPRequestHandler) def run(self): self.httpd.serve_forever() if __name__ == '__main__': parser = OptionParser() parser.add_option('-p', '--port', dest='port', type='int', default=8000, help='TCP port to listen on', metavar='PORT') parser.add_option('-H', '--host', dest='host', default='localhost', help='hostname or IP to listen on', metavar='HOST') parser.add_option('--pid', dest='pid', help='file name where the PID of the server is stored') parser.add_option('-f', '--foreground', dest='foreground', action='store_true', help='do not start the HTTP server in the background') parser.add_option('--daemon-pipefds') (options, args) = parser.parse_args() signal.signal(signal.SIGTERM, lambda x, y: sys.exit(0)) if options.foreground and options.pid: parser.error("options --pid and --foreground are mutually exclusive") opts = {'pid_file': options.pid, 'daemon': not options.foreground, 'daemon_pipefds': options.daemon_pipefds} service = simplehttpservice(options.host, options.port) cmdutil.service(opts, initfn=service.init, runfn=service.run, runargs=[sys.executable, __file__] + sys.argv[1:]) mercurial-3.7.3/tests/test-rebase-check-restore.t0000644000175000017500000000514112676531525021431 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > > [phases] > publish=False > > [alias] > tglog = log -G --template "{rev}:{phase} '{desc}' {branches}\n" > EOF $ hg init a $ cd a $ echo A > A $ hg add A $ hg ci -m A $ echo 'B' > B $ hg add B $ hg ci -m B $ echo C >> A $ hg ci -m C $ hg up -q -C 0 $ echo D >> A $ hg ci -m D created new head $ echo E > E $ hg add E $ hg ci -m E $ hg up -q -C 0 $ hg branch 'notdefault' marked working directory as branch notdefault (branches are permanent and global, did you want a bookmark?) $ echo F >> A $ hg ci -m F $ cd .. Rebasing B onto E - check keep: and phases $ hg clone -q -u . a a1 $ cd a1 $ hg phase --force --secret 2 $ hg tglog @ 5:draft 'F' notdefault | | o 4:draft 'E' | | | o 3:draft 'D' |/ | o 2:secret 'C' | | | o 1:draft 'B' |/ o 0:draft 'A' $ hg rebase -s 1 -d 4 --keep rebasing 1:27547f69f254 "B" rebasing 2:965c486023db "C" merging A warning: conflicts while merging A! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] Solve the conflict and go on: $ echo 'conflict solved' > A $ rm A.orig $ hg resolve -m A (no more unresolved files) continue: hg rebase --continue $ hg rebase --continue already rebased 1:27547f69f254 "B" as 45396c49d53b rebasing 2:965c486023db "C" $ hg tglog o 7:secret 'C' | o 6:draft 'B' | | @ 5:draft 'F' notdefault | | o | 4:draft 'E' | | o | 3:draft 'D' |/ | o 2:secret 'C' | | | o 1:draft 'B' |/ o 0:draft 'A' $ cd .. Rebase F onto E - check keepbranches: $ hg clone -q -u . a a2 $ cd a2 $ hg phase --force --secret 2 $ hg tglog @ 5:draft 'F' notdefault | | o 4:draft 'E' | | | o 3:draft 'D' |/ | o 2:secret 'C' | | | o 1:draft 'B' |/ o 0:draft 'A' $ hg rebase -s 5 -d 4 --keepbranches rebasing 5:01e6ebbd8272 "F" (tip) merging A warning: conflicts while merging A! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] Solve the conflict and go on: $ echo 'conflict solved' > A $ rm A.orig $ hg resolve -m A (no more unresolved files) continue: hg rebase --continue $ hg rebase --continue rebasing 5:01e6ebbd8272 "F" (tip) saved backup bundle to $TESTTMP/a2/.hg/strip-backup/01e6ebbd8272-6fd3a015-backup.hg (glob) $ hg tglog @ 5:draft 'F' notdefault | o 4:draft 'E' | o 3:draft 'D' | | o 2:secret 'C' | | | o 1:draft 'B' |/ o 0:draft 'A' $ cd .. mercurial-3.7.3/tests/test-rename.t0000644000175000017500000003336212676531525016711 0ustar mpmmpm00000000000000 $ hg init $ mkdir d1 d1/d11 d2 $ echo d1/a > d1/a $ echo d1/ba > d1/ba $ echo d1/a1 > d1/d11/a1 $ echo d1/b > d1/b $ echo d2/b > d2/b $ hg add d1/a d1/b d1/ba d1/d11/a1 d2/b $ hg commit -m "1" rename a single file $ hg rename d1/d11/a1 d2/c $ hg --config ui.portablefilenames=abort rename d1/a d1/con.xml abort: filename contains 'con', which is reserved on Windows: 'd1/con.xml' [255] $ hg sum parent: 0:9b4b6e7b2c26 tip 1 branch: default commit: 1 renamed update: (current) phases: 1 draft $ hg status -C A d2/c d1/d11/a1 R d1/d11/a1 $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d2/c rename a single file using absolute paths $ hg rename `pwd`/d1/d11/a1 `pwd`/d2/c $ hg status -C A d2/c d1/d11/a1 R d1/d11/a1 $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d2/c rename --after a single file $ mv d1/d11/a1 d2/c $ hg rename --after d1/d11/a1 d2/c $ hg status -C A d2/c d1/d11/a1 R d1/d11/a1 $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d2/c rename --after a single file when src and tgt already tracked $ mv d1/d11/a1 d2/c $ hg addrem -s 0 removing d1/d11/a1 adding d2/c $ hg rename --after d1/d11/a1 d2/c $ hg status -C A d2/c d1/d11/a1 R d1/d11/a1 $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d2/c rename --after a single file to a nonexistent target filename $ hg rename --after d1/a dummy d1/a: not recording move - dummy does not exist (glob) move a single file to an existing directory $ hg rename d1/d11/a1 d2 $ hg status -C A d2/a1 d1/d11/a1 R d1/d11/a1 $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d2/a1 move --after a single file to an existing directory $ mv d1/d11/a1 d2 $ hg rename --after d1/d11/a1 d2 $ hg status -C A d2/a1 d1/d11/a1 R d1/d11/a1 $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d2/a1 rename a file using a relative path $ (cd d1/d11; hg rename ../../d2/b e) $ hg status -C A d1/d11/e d2/b R d2/b $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d1/d11/e rename --after a file using a relative path $ (cd d1/d11; mv ../../d2/b e; hg rename --after ../../d2/b e) $ hg status -C A d1/d11/e d2/b R d2/b $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d1/d11/e rename directory d1 as d3 $ hg rename d1/ d3 moving d1/a to d3/a (glob) moving d1/b to d3/b (glob) moving d1/ba to d3/ba (glob) moving d1/d11/a1 to d3/d11/a1 (glob) $ hg status -C A d3/a d1/a A d3/b d1/b A d3/ba d1/ba A d3/d11/a1 d1/d11/a1 R d1/a R d1/b R d1/ba R d1/d11/a1 $ hg update -C 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d3 rename --after directory d1 as d3 $ mv d1 d3 $ hg rename --after d1 d3 moving d1/a to d3/a (glob) moving d1/b to d3/b (glob) moving d1/ba to d3/ba (glob) moving d1/d11/a1 to d3/d11/a1 (glob) $ hg status -C A d3/a d1/a A d3/b d1/b A d3/ba d1/ba A d3/d11/a1 d1/d11/a1 R d1/a R d1/b R d1/ba R d1/d11/a1 $ hg update -C 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d3 move a directory using a relative path $ (cd d2; mkdir d3; hg rename ../d1/d11 d3) moving ../d1/d11/a1 to d3/d11/a1 (glob) $ hg status -C A d2/d3/d11/a1 d1/d11/a1 R d1/d11/a1 $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d2/d3 move --after a directory using a relative path $ (cd d2; mkdir d3; mv ../d1/d11 d3; hg rename --after ../d1/d11 d3) moving ../d1/d11/a1 to d3/d11/a1 (glob) $ hg status -C A d2/d3/d11/a1 d1/d11/a1 R d1/d11/a1 $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d2/d3 move directory d1/d11 to an existing directory d2 (removes empty d1) $ hg rename d1/d11/ d2 moving d1/d11/a1 to d2/d11/a1 (glob) $ hg status -C A d2/d11/a1 d1/d11/a1 R d1/d11/a1 $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d2/d11 move directories d1 and d2 to a new directory d3 $ mkdir d3 $ hg rename d1 d2 d3 moving d1/a to d3/d1/a (glob) moving d1/b to d3/d1/b (glob) moving d1/ba to d3/d1/ba (glob) moving d1/d11/a1 to d3/d1/d11/a1 (glob) moving d2/b to d3/d2/b (glob) $ hg status -C A d3/d1/a d1/a A d3/d1/b d1/b A d3/d1/ba d1/ba A d3/d1/d11/a1 d1/d11/a1 A d3/d2/b d2/b R d1/a R d1/b R d1/ba R d1/d11/a1 R d2/b $ hg update -C 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d3 move --after directories d1 and d2 to a new directory d3 $ mkdir d3 $ mv d1 d2 d3 $ hg rename --after d1 d2 d3 moving d1/a to d3/d1/a (glob) moving d1/b to d3/d1/b (glob) moving d1/ba to d3/d1/ba (glob) moving d1/d11/a1 to d3/d1/d11/a1 (glob) moving d2/b to d3/d2/b (glob) $ hg status -C A d3/d1/a d1/a A d3/d1/b d1/b A d3/d1/ba d1/ba A d3/d1/d11/a1 d1/d11/a1 A d3/d2/b d2/b R d1/a R d1/b R d1/ba R d1/d11/a1 R d2/b $ hg update -C 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d3 move everything under directory d1 to existing directory d2, do not overwrite existing files (d2/b) $ hg rename d1/* d2 d2/b: not overwriting - file exists moving d1/d11/a1 to d2/d11/a1 (glob) $ hg status -C A d2/a d1/a A d2/ba d1/ba A d2/d11/a1 d1/d11/a1 R d1/a R d1/ba R d1/d11/a1 $ diff -u d1/b d2/b --- d1/b * (glob) +++ d2/b * (glob) @@ * (glob) -d1/b +d2/b [1] $ hg update -C 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d2/a d2/ba d2/d11/a1 attempt to move one file into a non-existent directory $ hg rename d1/a dx/ abort: destination dx/ is not a directory [255] $ hg status -C $ hg update -C 0 files updated, 0 files merged, 0 files removed, 0 files unresolved attempt to move potentially more than one file into a non-existent directory $ hg rename 'glob:d1/**' dx abort: with multiple sources, destination must be an existing directory [255] move every file under d1 to d2/d21 (glob) $ mkdir d2/d21 $ hg rename 'glob:d1/**' d2/d21 moving d1/a to d2/d21/a (glob) moving d1/b to d2/d21/b (glob) moving d1/ba to d2/d21/ba (glob) moving d1/d11/a1 to d2/d21/a1 (glob) $ hg status -C A d2/d21/a d1/a A d2/d21/a1 d1/d11/a1 A d2/d21/b d1/b A d2/d21/ba d1/ba R d1/a R d1/b R d1/ba R d1/d11/a1 $ hg update -C 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d2/d21 move --after some files under d1 to d2/d21 (glob) $ mkdir d2/d21 $ mv d1/a d1/d11/a1 d2/d21 $ hg rename --after 'glob:d1/**' d2/d21 moving d1/a to d2/d21/a (glob) d1/b: not recording move - d2/d21/b does not exist (glob) d1/ba: not recording move - d2/d21/ba does not exist (glob) moving d1/d11/a1 to d2/d21/a1 (glob) $ hg status -C A d2/d21/a d1/a A d2/d21/a1 d1/d11/a1 R d1/a R d1/d11/a1 $ hg update -C 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d2/d21 move every file under d1 starting with an 'a' to d2/d21 (regexp) $ mkdir d2/d21 $ hg rename 're:d1/([^a][^/]*/)*a.*' d2/d21 moving d1/a to d2/d21/a (glob) moving d1/d11/a1 to d2/d21/a1 (glob) $ hg status -C A d2/d21/a d1/a A d2/d21/a1 d1/d11/a1 R d1/a R d1/d11/a1 $ hg update -C 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d2/d21 attempt to overwrite an existing file $ echo "ca" > d1/ca $ hg rename d1/ba d1/ca d1/ca: not overwriting - file exists $ hg status -C ? d1/ca $ hg update -C 0 files updated, 0 files merged, 0 files removed, 0 files unresolved forced overwrite of an existing file $ echo "ca" > d1/ca $ hg rename --force d1/ba d1/ca $ hg status -C A d1/ca d1/ba R d1/ba $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d1/ca attempt to overwrite an existing broken symlink #if symlink $ ln -s ba d1/ca $ hg rename --traceback d1/ba d1/ca d1/ca: not overwriting - file exists $ hg status -C ? d1/ca $ hg update -C 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d1/ca replace a symlink with a file $ ln -s ba d1/ca $ hg rename --force d1/ba d1/ca $ hg status -C A d1/ca d1/ba R d1/ba $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d1/ca #endif do not copy more than one source file to the same destination file $ mkdir d3 $ hg rename d1/* d2/* d3 moving d1/d11/a1 to d3/d11/a1 (glob) d3/b: not overwriting - d2/b collides with d1/b $ hg status -C A d3/a d1/a A d3/b d1/b A d3/ba d1/ba A d3/d11/a1 d1/d11/a1 R d1/a R d1/b R d1/ba R d1/d11/a1 $ hg update -C 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d3 move a whole subtree with "hg rename ." $ mkdir d3 $ (cd d1; hg rename . ../d3) moving a to ../d3/d1/a moving b to ../d3/d1/b moving ba to ../d3/d1/ba moving d11/a1 to ../d3/d1/d11/a1 (glob) $ hg status -C A d3/d1/a d1/a A d3/d1/b d1/b A d3/d1/ba d1/ba A d3/d1/d11/a1 d1/d11/a1 R d1/a R d1/b R d1/ba R d1/d11/a1 $ hg update -C 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d3 move a whole subtree with "hg rename --after ." $ mkdir d3 $ mv d1/* d3 $ (cd d1; hg rename --after . ../d3) moving a to ../d3/a moving b to ../d3/b moving ba to ../d3/ba moving d11/a1 to ../d3/d11/a1 (glob) $ hg status -C A d3/a d1/a A d3/b d1/b A d3/ba d1/ba A d3/d11/a1 d1/d11/a1 R d1/a R d1/b R d1/ba R d1/d11/a1 $ hg update -C 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d3 move the parent tree with "hg rename .." $ (cd d1/d11; hg rename .. ../../d3) moving ../a to ../../d3/a (glob) moving ../b to ../../d3/b (glob) moving ../ba to ../../d3/ba (glob) moving a1 to ../../d3/d11/a1 $ hg status -C A d3/a d1/a A d3/b d1/b A d3/ba d1/ba A d3/d11/a1 d1/d11/a1 R d1/a R d1/b R d1/ba R d1/d11/a1 $ hg update -C 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d3 skip removed files $ hg remove d1/b $ hg rename d1 d3 moving d1/a to d3/a (glob) moving d1/ba to d3/ba (glob) moving d1/d11/a1 to d3/d11/a1 (glob) $ hg status -C A d3/a d1/a A d3/ba d1/ba A d3/d11/a1 d1/d11/a1 R d1/a R d1/b R d1/ba R d1/d11/a1 $ hg update -C 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf d3 transitive rename $ hg rename d1/b d1/bb $ hg rename d1/bb d1/bc $ hg status -C A d1/bc d1/b R d1/b $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d1/bc transitive rename --after $ hg rename d1/b d1/bb $ mv d1/bb d1/bc $ hg rename --after d1/bb d1/bc $ hg status -C A d1/bc d1/b R d1/b $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d1/bc $ echo "# idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b)" # idempotent renames (d1/b -> d1/bb followed by d1/bb -> d1/b) $ hg rename d1/b d1/bb $ echo "some stuff added to d1/bb" >> d1/bb $ hg rename d1/bb d1/b $ hg status -C M d1/b $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved overwriting with renames (issue1959) $ hg rename d1/a d1/c $ hg rename d1/b d1/a $ hg status -C M d1/a d1/b A d1/c d1/a R d1/b $ hg diff --git diff --git a/d1/a b/d1/a --- a/d1/a +++ b/d1/a @@ -1,1 +1,1 @@ -d1/a +d1/b diff --git a/d1/b b/d1/b deleted file mode 100644 --- a/d1/b +++ /dev/null @@ -1,1 +0,0 @@ -d1/b diff --git a/d1/a b/d1/c copy from d1/a copy to d1/c $ hg update -C 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm d1/c # The file was marked as added, so 'hg update' action was 'forget' check illegal path components $ hg rename d1/d11/a1 .hg/foo abort: path contains illegal component: .hg/foo (glob) [255] $ hg status -C $ hg rename d1/d11/a1 ../foo abort: ../foo not under root '$TESTTMP' [255] $ hg status -C $ mv d1/d11/a1 .hg/foo $ hg rename --after d1/d11/a1 .hg/foo abort: path contains illegal component: .hg/foo (glob) [255] $ hg status -C ! d1/d11/a1 $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm .hg/foo $ hg rename d1/d11/a1 .hg abort: path contains illegal component: .hg/a1 (glob) [255] $ hg --config extensions.largefiles= rename d1/d11/a1 .hg abort: path contains illegal component: .hg/a1 (glob) [255] $ hg status -C $ hg rename d1/d11/a1 .. abort: ../a1 not under root '$TESTTMP' (glob) [255] $ hg --config extensions.largefiles= rename d1/d11/a1 .. abort: ../a1 not under root '$TESTTMP' (glob) [255] $ hg status -C $ mv d1/d11/a1 .hg $ hg rename --after d1/d11/a1 .hg abort: path contains illegal component: .hg/a1 (glob) [255] $ hg status -C ! d1/d11/a1 $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm .hg/a1 $ (cd d1/d11; hg rename ../../d2/b ../../.hg/foo) abort: path contains illegal component: .hg/foo (glob) [255] $ hg status -C $ (cd d1/d11; hg rename ../../d2/b ../../../foo) abort: ../../../foo not under root '$TESTTMP' [255] $ hg status -C mercurial-3.7.3/tests/test-histedit-non-commute-abort.t0000644000175000017500000001052112676531525022613 0ustar mpmmpm00000000000000 $ . "$TESTDIR/histedit-helpers.sh" $ cat >> $HGRCPATH < [extensions] > histedit= > EOF $ initrepo () > { > hg init r > cd r > for x in a b c d e f ; do > echo $x > $x > hg add $x > hg ci -m $x > done > echo a >> e > hg ci -m 'does not commute with e' > cd .. > } $ initrepo $ cd r log before edit $ hg log --graph @ changeset: 6:bfa474341cc9 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: does not commute with e | o changeset: 5:652413bf663e | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 4:e860deea161a | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 3:055a42cdd887 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 2:177f92b77385 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 1:d2ae7f538514 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a edit the history $ hg histedit 177f92b77385 --commands - 2>&1 < pick 177f92b77385 c > pick 055a42cdd887 d > pick bfa474341cc9 does not commute with e > pick e860deea161a e > pick 652413bf663e f > EOF 0 files updated, 0 files merged, 2 files removed, 0 files unresolved merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (pick e860deea161a) (hg histedit --continue to resume) insert unsupported advisory merge record $ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -x $ hg debugmergestate * version 2 records local: 8f7551c7e4a2f2efe0bc8c741baf7f227d65d758 other: e860deea161a2f77de56603b340ebbb4536308ae unrecognized entry: x advisory record file: e (record type "F", state "u", hash 58e6b3a414a1e090dfc6029add0f3555ccba127f) local path: e (flags "") ancestor path: e (node null) other path: e (node 6b67ccefd5ce6de77e7ead4f5292843a0255329f) $ hg resolve -l U e insert unsupported mandatory merge record $ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -X $ hg debugmergestate * version 2 records local: 8f7551c7e4a2f2efe0bc8c741baf7f227d65d758 other: e860deea161a2f77de56603b340ebbb4536308ae file: e (record type "F", state "u", hash 58e6b3a414a1e090dfc6029add0f3555ccba127f) local path: e (flags "") ancestor path: e (node null) other path: e (node 6b67ccefd5ce6de77e7ead4f5292843a0255329f) unrecognized entry: X mandatory record $ hg resolve -l abort: unsupported merge state records: X (see https://mercurial-scm.org/wiki/MergeStateRecords for more information) [255] $ hg resolve -ma abort: unsupported merge state records: X (see https://mercurial-scm.org/wiki/MergeStateRecords for more information) [255] abort the edit (should clear out merge state) $ hg histedit --abort 2>&1 | fixbundle 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg debugmergestate no merge state found log after abort $ hg resolve -l $ hg log --graph @ changeset: 6:bfa474341cc9 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: does not commute with e | o changeset: 5:652413bf663e | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 4:e860deea161a | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 3:055a42cdd887 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 2:177f92b77385 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 1:d2ae7f538514 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ cd .. mercurial-3.7.3/tests/test-encoding.t0000644000175000017500000001721712676531525017231 0ustar mpmmpm00000000000000Test character encoding $ hg init t $ cd t we need a repo with some legacy latin-1 changesets $ hg unbundle "$TESTDIR/bundles/legacy-encoding.hg" adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files (run 'hg update' to get a working copy) $ hg co 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ python << EOF > f = file('latin-1', 'w'); f.write("latin-1 e' encoded: \xe9"); f.close() > f = file('utf-8', 'w'); f.write("utf-8 e' encoded: \xc3\xa9"); f.close() > f = file('latin-1-tag', 'w'); f.write("\xe9"); f.close() > EOF should fail with encoding error $ echo "plain old ascii" > a $ hg st M a ? latin-1 ? latin-1-tag ? utf-8 $ HGENCODING=ascii hg ci -l latin-1 transaction abort! rollback completed abort: decoding near ' encoded: \xe9': 'ascii' codec can't decode byte 0xe9 in position 20: ordinal not in range(128)! (esc) [255] these should work $ echo "latin-1" > a $ HGENCODING=latin-1 hg ci -l latin-1 $ echo "utf-8" > a $ HGENCODING=utf-8 hg ci -l utf-8 $ HGENCODING=latin-1 hg tag `cat latin-1-tag` $ HGENCODING=latin-1 hg branch `cat latin-1-tag` marked working directory as branch \xe9 (esc) (branches are permanent and global, did you want a bookmark?) $ HGENCODING=latin-1 hg ci -m 'latin1 branch' $ hg -q rollback $ HGENCODING=latin-1 hg branch \xe9 (esc) $ HGENCODING=latin-1 hg ci -m 'latin1 branch' $ rm .hg/branch hg log (ascii) $ hg --encoding ascii log changeset: 5:a52c0692f24a branch: ? tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: latin1 branch changeset: 4:94db611b4196 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag ? for changeset ca661e7520de changeset: 3:ca661e7520de tag: ? user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: utf-8 e' encoded: ? changeset: 2:650c6f3d55dd user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: latin-1 e' encoded: ? changeset: 1:0e5b7e3f9c4a user: test date: Mon Jan 12 13:46:40 1970 +0000 summary: koi8-r: ????? = u'\u0440\u0442\u0443\u0442\u044c' changeset: 0:1e78a93102a3 user: test date: Mon Jan 12 13:46:40 1970 +0000 summary: latin-1 e': ? = u'\xe9' hg log (latin-1) $ hg --encoding latin-1 log changeset: 5:a52c0692f24a branch: \xe9 (esc) tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: latin1 branch changeset: 4:94db611b4196 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag \xe9 for changeset ca661e7520de (esc) changeset: 3:ca661e7520de tag: \xe9 (esc) user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: utf-8 e' encoded: \xe9 (esc) changeset: 2:650c6f3d55dd user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: latin-1 e' encoded: \xe9 (esc) changeset: 1:0e5b7e3f9c4a user: test date: Mon Jan 12 13:46:40 1970 +0000 summary: koi8-r: \xd2\xd4\xd5\xd4\xd8 = u'\\u0440\\u0442\\u0443\\u0442\\u044c' (esc) changeset: 0:1e78a93102a3 user: test date: Mon Jan 12 13:46:40 1970 +0000 summary: latin-1 e': \xe9 = u'\\xe9' (esc) hg log (utf-8) $ hg --encoding utf-8 log changeset: 5:a52c0692f24a branch: \xc3\xa9 (esc) tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: latin1 branch changeset: 4:94db611b4196 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag \xc3\xa9 for changeset ca661e7520de (esc) changeset: 3:ca661e7520de tag: \xc3\xa9 (esc) user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: utf-8 e' encoded: \xc3\xa9 (esc) changeset: 2:650c6f3d55dd user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: latin-1 e' encoded: \xc3\xa9 (esc) changeset: 1:0e5b7e3f9c4a user: test date: Mon Jan 12 13:46:40 1970 +0000 summary: koi8-r: \xc3\x92\xc3\x94\xc3\x95\xc3\x94\xc3\x98 = u'\\u0440\\u0442\\u0443\\u0442\\u044c' (esc) changeset: 0:1e78a93102a3 user: test date: Mon Jan 12 13:46:40 1970 +0000 summary: latin-1 e': \xc3\xa9 = u'\\xe9' (esc) hg tags (ascii) $ HGENCODING=ascii hg tags tip 5:a52c0692f24a ? 3:ca661e7520de hg tags (latin-1) $ HGENCODING=latin-1 hg tags tip 5:a52c0692f24a \xe9 3:ca661e7520de (esc) hg tags (utf-8) $ HGENCODING=utf-8 hg tags tip 5:a52c0692f24a \xc3\xa9 3:ca661e7520de (esc) hg tags (JSON) $ hg tags -Tjson [ { "node": "a52c0692f24ad921c0a31e1736e7635a8b23b670", "rev": 5, "tag": "tip", "type": "" }, { "node": "ca661e7520dec3f5438a63590c350bebadb04989", "rev": 3, "tag": "\xc3\xa9", (esc) "type": "" } ] hg branches (ascii) $ HGENCODING=ascii hg branches ? 5:a52c0692f24a default 4:94db611b4196 (inactive) hg branches (latin-1) $ HGENCODING=latin-1 hg branches \xe9 5:a52c0692f24a (esc) default 4:94db611b4196 (inactive) hg branches (utf-8) $ HGENCODING=utf-8 hg branches \xc3\xa9 5:a52c0692f24a (esc) default 4:94db611b4196 (inactive) $ echo '[ui]' >> .hg/hgrc $ echo 'fallbackencoding = koi8-r' >> .hg/hgrc hg log (utf-8) $ HGENCODING=utf-8 hg log changeset: 5:a52c0692f24a branch: \xc3\xa9 (esc) tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: latin1 branch changeset: 4:94db611b4196 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag \xc3\xa9 for changeset ca661e7520de (esc) changeset: 3:ca661e7520de tag: \xc3\xa9 (esc) user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: utf-8 e' encoded: \xc3\xa9 (esc) changeset: 2:650c6f3d55dd user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: latin-1 e' encoded: \xc3\xa9 (esc) changeset: 1:0e5b7e3f9c4a user: test date: Mon Jan 12 13:46:40 1970 +0000 summary: koi8-r: \xd1\x80\xd1\x82\xd1\x83\xd1\x82\xd1\x8c = u'\\u0440\\u0442\\u0443\\u0442\\u044c' (esc) changeset: 0:1e78a93102a3 user: test date: Mon Jan 12 13:46:40 1970 +0000 summary: latin-1 e': \xd0\x98 = u'\\xe9' (esc) hg log (dolphin) $ HGENCODING=dolphin hg log abort: unknown encoding: dolphin (please check your locale settings) [255] $ HGENCODING=ascii hg branch `cat latin-1-tag` abort: decoding near '\xe9': 'ascii' codec can't decode byte 0xe9 in position 0: ordinal not in range(128)! (esc) [255] $ cp latin-1-tag .hg/branch $ HGENCODING=latin-1 hg ci -m 'auto-promote legacy name' Test roundtrip encoding of lookup tables when not using UTF-8 (issue2763) $ HGENCODING=latin-1 hg up `cat latin-1-tag` 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ cd .. Test roundtrip encoding/decoding of utf8b for generated data #if hypothesis >>> from hypothesishelpers import * >>> from mercurial import encoding >>> roundtrips(st.binary(), encoding.fromutf8b, encoding.toutf8b) Round trip OK #endif mercurial-3.7.3/tests/test-diff-ignore-whitespace.t0000644000175000017500000001726112676531525021765 0ustar mpmmpm00000000000000GNU diff is the reference for all of these results. Prepare tests: $ echo '[alias]' >> $HGRCPATH $ echo 'ndiff = diff --nodates' >> $HGRCPATH $ hg init $ printf 'hello world\ngoodbye world\n' >foo $ hg ci -Amfoo -ufoo adding foo Test added blank lines: $ printf '\nhello world\n\ngoodbye world\n\n' >foo >>> two diffs showing three added lines <<< $ hg ndiff diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,5 @@ + hello world + goodbye world + $ hg ndiff -b diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,5 @@ + hello world + goodbye world + >>> no diffs <<< $ hg ndiff -B $ hg ndiff -Bb Test added horizontal space first on a line(): $ printf '\t hello world\ngoodbye world\n' >foo >>> four diffs showing added space first on the first line <<< $ hg ndiff diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ -hello world + hello world goodbye world $ hg ndiff -b diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ -hello world + hello world goodbye world $ hg ndiff -B diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ -hello world + hello world goodbye world $ hg ndiff -Bb diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ -hello world + hello world goodbye world Test added horizontal space last on a line: $ printf 'hello world\t \ngoodbye world\n' >foo >>> two diffs showing space appended to the first line <<< $ hg ndiff diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ -hello world +hello world goodbye world $ hg ndiff -B diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ -hello world +hello world goodbye world >>> no diffs <<< $ hg ndiff -b $ hg ndiff -Bb Test added horizontal space in the middle of a word: $ printf 'hello world\ngood bye world\n' >foo >>> four diffs showing space inserted into "goodbye" <<< $ hg ndiff diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ hello world -goodbye world +good bye world $ hg ndiff -B diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ hello world -goodbye world +good bye world $ hg ndiff -b diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ hello world -goodbye world +good bye world $ hg ndiff -Bb diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ hello world -goodbye world +good bye world Test increased horizontal whitespace amount: $ printf 'hello world\ngoodbye\t\t \tworld\n' >foo >>> two diffs showing changed whitespace amount in the last line <<< $ hg ndiff diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ hello world -goodbye world +goodbye world $ hg ndiff -B diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ hello world -goodbye world +goodbye world >>> no diffs <<< $ hg ndiff -b $ hg ndiff -Bb Test added blank line with horizontal whitespace: $ printf 'hello world\n \t\ngoodbye world\n' >foo >>> three diffs showing added blank line with horizontal space <<< $ hg ndiff diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,3 @@ hello world + goodbye world $ hg ndiff -B diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,3 @@ hello world + goodbye world $ hg ndiff -b diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,3 @@ hello world + goodbye world >>> no diffs <<< $ hg ndiff -Bb Test added blank line with other whitespace: $ printf 'hello world\n \t\ngoodbye world \n' >foo >>> three diffs showing added blank line with other space <<< $ hg ndiff diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,3 @@ -hello world -goodbye world +hello world + +goodbye world $ hg ndiff -B diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,3 @@ -hello world -goodbye world +hello world + +goodbye world $ hg ndiff -b diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,3 @@ hello world + goodbye world >>> no diffs <<< $ hg ndiff -Bb Test whitespace changes: $ printf 'helloworld\ngoodbye\tworld \n' >foo >>> four diffs showing changed whitespace <<< $ hg ndiff diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ -hello world -goodbye world +helloworld +goodbye world $ hg ndiff -B diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ -hello world -goodbye world +helloworld +goodbye world $ hg ndiff -b diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ -hello world +helloworld goodbye world $ hg ndiff -Bb diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,2 @@ -hello world +helloworld goodbye world >>> no diffs <<< $ hg ndiff -w Test whitespace changes and blank lines: $ printf 'helloworld\n\n\n\ngoodbye\tworld \n' >foo >>> five diffs showing changed whitespace <<< $ hg ndiff diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,5 @@ -hello world -goodbye world +helloworld + + + +goodbye world $ hg ndiff -B diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,5 @@ -hello world -goodbye world +helloworld + + + +goodbye world $ hg ndiff -b diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,5 @@ -hello world +helloworld + + + goodbye world $ hg ndiff -Bb diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,5 @@ -hello world +helloworld + + + goodbye world $ hg ndiff -w diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,5 @@ hello world + + + goodbye world >>> no diffs <<< $ hg ndiff -wB Test \r (carriage return) as used in "DOS" line endings: $ printf 'hello world\r\n\r\ngoodbye\rworld\n' >foo $ hg ndiff diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,3 @@ -hello world -goodbye world +hello world\r (esc) +\r (esc) +goodbye\r (no-eol) (esc) world No completely blank lines to ignore: $ hg ndiff --ignore-blank-lines diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,3 @@ -hello world -goodbye world +hello world\r (esc) +\r (esc) +goodbye\r (no-eol) (esc) world Only new line noticed: $ hg ndiff --ignore-space-change diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,3 @@ hello world +\r (esc) goodbye world $ hg ndiff --ignore-all-space diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,3 @@ hello world +\r (esc) goodbye world New line not noticed when space change ignored: $ hg ndiff --ignore-blank-lines --ignore-all-space Do not ignore all newlines, only blank lines $ printf 'hello \nworld\ngoodbye world\n' > foo $ hg ndiff --ignore-blank-lines diff -r 540c40a65b78 foo --- a/foo +++ b/foo @@ -1,2 +1,3 @@ -hello world +hello +world goodbye world Test hunk offsets adjustments with --ignore-blank-lines $ hg revert -aC reverting foo $ printf '\nb\nx\nd\n' > a $ printf 'b\ny\nd\n' > b $ hg add a b $ hg ci -m add $ hg cat -r . a > b $ hg cat -r . b > a $ hg diff -B --nodates a > ../diffa $ cat ../diffa diff -r 0e66aa54f318 a --- a/a +++ b/a @@ -1,4 +1,4 @@ b -x +y d $ hg diff -B --nodates b > ../diffb $ cat ../diffb diff -r 0e66aa54f318 b --- a/b +++ b/b @@ -1,3 +1,3 @@ b -y +x d $ hg revert -aC reverting a reverting b $ hg import --no-commit ../diffa applying ../diffa $ hg revert -aC reverting a $ hg import --no-commit ../diffb applying ../diffb $ hg revert -aC reverting b mercurial-3.7.3/tests/test-issue1502.t0000644000175000017500000000224112676531525017072 0ustar mpmmpm00000000000000https://bz.mercurial-scm.org/1502 Initialize repository $ hg init foo $ touch foo/a && hg -R foo commit -A -m "added a" adding a $ hg clone foo foo1 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "bar" > foo1/a && hg -R foo1 commit -m "edit a in foo1" $ echo "hi" > foo/a && hg -R foo commit -m "edited a foo" $ hg -R foo1 pull -u pulling from $TESTTMP/foo (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) abort: not updating: not a linear update (merge or update --check to force update) [255] $ hg -R foo1 book branchy $ hg -R foo1 book * branchy 1:e3e522925eff Pull. Bookmark should not jump to new head. $ echo "there" >> foo/a && hg -R foo commit -m "edited a again" $ hg -R foo1 pull pulling from $TESTTMP/foo (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) $ hg -R foo1 book * branchy 1:e3e522925eff mercurial-3.7.3/tests/test-atomictempfile.py0000644000175000017500000000242112676531525020621 0ustar mpmmpm00000000000000import os import glob import unittest import silenttestrunner from mercurial.util import atomictempfile class testatomictempfile(unittest.TestCase): def test1_simple(self): if os.path.exists('foo'): os.remove('foo') file = atomictempfile('foo') (dir, basename) = os.path.split(file._tempname) self.assertFalse(os.path.isfile('foo')) self.assertTrue(basename in glob.glob('.foo-*')) file.write('argh\n') file.close() self.assertTrue(os.path.isfile('foo')) self.assertTrue(basename not in glob.glob('.foo-*')) # discard() removes the temp file without making the write permanent def test2_discard(self): if os.path.exists('foo'): os.remove('foo') file = atomictempfile('foo') (dir, basename) = os.path.split(file._tempname) file.write('yo\n') file.discard() self.assertFalse(os.path.isfile('foo')) self.assertTrue(basename not in os.listdir('.')) # if a programmer screws up and passes bad args to atomictempfile, they # get a plain ordinary TypeError, not infinite recursion def test3_oops(self): self.assertRaises(TypeError, atomictempfile) if __name__ == '__main__': silenttestrunner.main(__name__) mercurial-3.7.3/tests/test-permissions.t0000644000175000017500000000231012676531525020002 0ustar mpmmpm00000000000000#require unix-permissions no-root $ hg init t $ cd t $ echo foo > a $ hg add a $ hg commit -m "1" $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions $ chmod -r .hg/store/data/a.i $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files abort: Permission denied: $TESTTMP/t/.hg/store/data/a.i [255] $ chmod +r .hg/store/data/a.i $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions $ chmod -w .hg/store/data/a.i $ echo barber > a $ hg commit -m "2" trouble committing a! abort: Permission denied: $TESTTMP/t/.hg/store/data/a.i [255] $ chmod -w . $ hg diff --nodates diff -r 2a18120dc1c9 a --- a/a +++ b/a @@ -1,1 +1,1 @@ -foo +barber $ chmod +w . $ chmod +w .hg/store/data/a.i $ mkdir dir $ touch dir/a $ hg status M a ? dir/a $ chmod -rx dir $ hg status dir: Permission denied M a Reenable perm to allow deletion: $ chmod +rx dir $ cd .. mercurial-3.7.3/tests/test-commit-amend.t0000644000175000017500000007247412676531525020023 0ustar mpmmpm00000000000000 $ cat << EOF >> $HGRCPATH > [format] > usegeneraldelta=yes > EOF $ hg init Setup: $ echo a >> a $ hg ci -Am 'base' adding a Refuse to amend public csets: $ hg phase -r . -p $ hg ci --amend abort: cannot amend public changesets [255] $ hg phase -r . -f -d $ echo a >> a $ hg ci -Am 'base1' Nothing to amend: $ hg ci --amend -m 'base1' nothing changed [1] $ cat >> $HGRCPATH < [hooks] > pretxncommit.foo = sh -c "echo \\"pretxncommit \$HG_NODE\\"; hg id -r \$HG_NODE" > EOF Amending changeset with changes in working dir: (and check that --message does not trigger an editor) $ echo a >> a $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -m 'amend base1' pretxncommit 43f1ba15f28a50abf0aae529cf8a16bfced7b149 43f1ba15f28a tip saved backup bundle to $TESTTMP/.hg/strip-backup/489edb5b847d-f1bf3ab8-amend-backup.hg (glob) $ echo 'pretxncommit.foo = ' >> $HGRCPATH $ hg diff -c . diff -r ad120869acf0 -r 43f1ba15f28a a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,3 @@ a +a +a $ hg log changeset: 1:43f1ba15f28a tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: amend base1 changeset: 0:ad120869acf0 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: base Check proper abort for empty message $ cat > editor.sh << '__EOF__' > #!/bin/sh > echo "" > "$1" > __EOF__ $ echo b > b $ hg add b $ hg summary parent: 1:43f1ba15f28a tip amend base1 branch: default commit: 1 added, 1 unknown update: (current) phases: 2 draft $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend transaction abort! rollback completed abort: empty commit message [255] $ hg summary parent: 1:43f1ba15f28a tip amend base1 branch: default commit: 1 added, 1 unknown update: (current) phases: 2 draft Add new file: $ hg ci --amend -m 'amend base1 new file' saved backup bundle to $TESTTMP/.hg/strip-backup/43f1ba15f28a-7a3b3496-amend-backup.hg (glob) Remove file that was added in amended commit: (and test logfile option) (and test that logfile option do not trigger an editor) $ hg rm b $ echo 'amend base1 remove new file' > ../logfile $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg ci --amend --logfile ../logfile saved backup bundle to $TESTTMP/.hg/strip-backup/b8e3cb2b3882-0b55739a-amend-backup.hg (glob) $ hg cat b b: no such file in rev 74609c7f506e [1] No changes, just a different message: $ hg ci -v --amend -m 'no changes, new message' amending changeset 74609c7f506e copying changeset 74609c7f506e to ad120869acf0 committing files: a committing manifest committing changelog stripping amended changeset 74609c7f506e 1 changesets found uncompressed size of bundle content: 270 (changelog) 163 (manifests) 129 a saved backup bundle to $TESTTMP/.hg/strip-backup/74609c7f506e-1bfde511-amend-backup.hg (glob) 1 changesets found uncompressed size of bundle content: 266 (changelog) 163 (manifests) 129 a adding branch adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files committed changeset 1:1cd866679df8 $ hg diff -c . diff -r ad120869acf0 -r 1cd866679df8 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,3 @@ a +a +a $ hg log changeset: 1:1cd866679df8 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: no changes, new message changeset: 0:ad120869acf0 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: base Disable default date on commit so when -d isn't given, the old date is preserved: $ echo '[defaults]' >> $HGRCPATH $ echo 'commit=' >> $HGRCPATH Test -u/-d: $ cat > .hg/checkeditform.sh < env | grep HGEDITFORM > true > EOF $ HGEDITOR="sh .hg/checkeditform.sh" hg ci --amend -u foo -d '1 0' HGEDITFORM=commit.amend.normal saved backup bundle to $TESTTMP/.hg/strip-backup/1cd866679df8-5f5bcb85-amend-backup.hg (glob) $ echo a >> a $ hg ci --amend -u foo -d '1 0' saved backup bundle to $TESTTMP/.hg/strip-backup/780e6f23e03d-83b10a27-amend-backup.hg (glob) $ hg log -r . changeset: 1:5f357c7560ab tag: tip user: foo date: Thu Jan 01 00:00:01 1970 +0000 summary: no changes, new message Open editor with old commit message if a message isn't given otherwise: $ cat > editor.sh << '__EOF__' > #!/bin/sh > cat $1 > echo "another precious commit message" > "$1" > __EOF__ at first, test saving last-message.txt $ cat > .hg/hgrc << '__EOF__' > [hooks] > pretxncommit.test-saving-last-message = false > __EOF__ $ rm -f .hg/last-message.txt $ hg commit --amend -v -m "message given from command line" amending changeset 5f357c7560ab copying changeset 5f357c7560ab to ad120869acf0 committing files: a committing manifest committing changelog running hook pretxncommit.test-saving-last-message: false transaction abort! rollback completed abort: pretxncommit.test-saving-last-message hook exited with status 1 [255] $ cat .hg/last-message.txt message given from command line (no-eol) $ rm -f .hg/last-message.txt $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -v amending changeset 5f357c7560ab copying changeset 5f357c7560ab to ad120869acf0 no changes, new message HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: foo HG: branch 'default' HG: changed a committing files: a committing manifest committing changelog running hook pretxncommit.test-saving-last-message: false transaction abort! rollback completed abort: pretxncommit.test-saving-last-message hook exited with status 1 [255] $ cat .hg/last-message.txt another precious commit message $ cat > .hg/hgrc << '__EOF__' > [hooks] > pretxncommit.test-saving-last-message = > __EOF__ then, test editing custom commit message $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -v amending changeset 5f357c7560ab copying changeset 5f357c7560ab to ad120869acf0 no changes, new message HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: foo HG: branch 'default' HG: changed a committing files: a committing manifest committing changelog stripping amended changeset 5f357c7560ab 1 changesets found uncompressed size of bundle content: 258 (changelog) 163 (manifests) 131 a saved backup bundle to $TESTTMP/.hg/strip-backup/5f357c7560ab-e7c84ade-amend-backup.hg (glob) 1 changesets found uncompressed size of bundle content: 266 (changelog) 163 (manifests) 131 a adding branch adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files committed changeset 1:7ab3bf440b54 Same, but with changes in working dir (different code path): $ echo a >> a $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend -v amending changeset 7ab3bf440b54 committing files: a committing manifest committing changelog copying changeset a0ea9b1a4c8c to ad120869acf0 another precious commit message HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: foo HG: branch 'default' HG: changed a committing files: a committing manifest committing changelog stripping intermediate changeset a0ea9b1a4c8c stripping amended changeset 7ab3bf440b54 2 changesets found uncompressed size of bundle content: 490 (changelog) 322 (manifests) 249 a saved backup bundle to $TESTTMP/.hg/strip-backup/7ab3bf440b54-8e3b5088-amend-backup.hg (glob) 1 changesets found uncompressed size of bundle content: 266 (changelog) 163 (manifests) 133 a adding branch adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files committed changeset 1:ea22a388757c $ rm editor.sh $ hg log -r . changeset: 1:ea22a388757c tag: tip user: foo date: Thu Jan 01 00:00:01 1970 +0000 summary: another precious commit message Moving bookmarks, preserve active bookmark: $ hg book book1 $ hg book book2 $ hg ci --amend -m 'move bookmarks' saved backup bundle to $TESTTMP/.hg/strip-backup/ea22a388757c-e51094db-amend-backup.hg (glob) $ hg book book1 1:6cec5aa930e2 * book2 1:6cec5aa930e2 $ echo a >> a $ hg ci --amend -m 'move bookmarks' saved backup bundle to $TESTTMP/.hg/strip-backup/6cec5aa930e2-e9b06de4-amend-backup.hg (glob) $ hg book book1 1:48bb6e53a15f * book2 1:48bb6e53a15f abort does not loose bookmarks $ cat > editor.sh << '__EOF__' > #!/bin/sh > echo "" > "$1" > __EOF__ $ echo a >> a $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit --amend transaction abort! rollback completed abort: empty commit message [255] $ hg book book1 1:48bb6e53a15f * book2 1:48bb6e53a15f $ hg revert -Caq $ rm editor.sh $ echo '[defaults]' >> $HGRCPATH $ echo "commit=-d '0 0'" >> $HGRCPATH Moving branches: $ hg branch foo marked working directory as branch foo (branches are permanent and global, did you want a bookmark?) $ echo a >> a $ hg ci -m 'branch foo' $ hg branch default -f marked working directory as branch default $ hg ci --amend -m 'back to default' saved backup bundle to $TESTTMP/.hg/strip-backup/8ac881fbf49d-fd962fef-amend-backup.hg (glob) $ hg branches default 2:ce12b0b57d46 Close branch: $ hg up -q 0 $ echo b >> b $ hg branch foo marked working directory as branch foo (branches are permanent and global, did you want a bookmark?) $ hg ci -Am 'fork' adding b $ echo b >> b $ hg ci -mb $ hg ci --amend --close-branch -m 'closing branch foo' saved backup bundle to $TESTTMP/.hg/strip-backup/c962248fa264-6701c392-amend-backup.hg (glob) Same thing, different code path: $ echo b >> b $ hg ci -m 'reopen branch' reopening closed branch head 4 $ echo b >> b $ hg ci --amend --close-branch saved backup bundle to $TESTTMP/.hg/strip-backup/027371728205-49c0c55d-amend-backup.hg (glob) $ hg branches default 2:ce12b0b57d46 Refuse to amend during a merge: $ hg up -q default $ hg merge foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci --amend abort: cannot amend while merging [255] $ hg ci -m 'merge' Follow copies/renames: $ hg mv b c $ hg ci -m 'b -> c' $ hg mv c d $ hg ci --amend -m 'b -> d' saved backup bundle to $TESTTMP/.hg/strip-backup/b8c6eac7f12e-adaaa8b1-amend-backup.hg (glob) $ hg st --rev '.^' --copies d A d b $ hg cp d e $ hg ci -m 'e = d' $ hg cp e f $ hg ci --amend -m 'f = d' saved backup bundle to $TESTTMP/.hg/strip-backup/7f9761d65613-d37aa788-amend-backup.hg (glob) $ hg st --rev '.^' --copies f A f d $ mv f f.orig $ hg rm -A f $ hg ci -m removef $ hg cp a f $ mv f.orig f $ hg ci --amend -m replacef saved backup bundle to $TESTTMP/.hg/strip-backup/9e8c5f7e3d95-90259f67-amend-backup.hg (glob) $ hg st --change . --copies $ hg log -r . --template "{file_copies}\n" Move added file (issue3410): $ echo g >> g $ hg ci -Am g adding g $ hg mv g h $ hg ci --amend saved backup bundle to $TESTTMP/.hg/strip-backup/24aa8eacce2b-7059e0f1-amend-backup.hg (glob) $ hg st --change . --copies h A h $ hg log -r . --template "{file_copies}\n" Can't rollback an amend: $ hg rollback no rollback information available [1] Preserve extra dict (issue3430): $ hg branch a marked working directory as branch a (branches are permanent and global, did you want a bookmark?) $ echo a >> a $ hg ci -ma $ hg ci --amend -m "a'" saved backup bundle to $TESTTMP/.hg/strip-backup/3837aa2a2fdb-2be01fd1-amend-backup.hg (glob) $ hg log -r . --template "{branch}\n" a $ hg ci --amend -m "a''" saved backup bundle to $TESTTMP/.hg/strip-backup/c05c06be7514-ed28c4cd-amend-backup.hg (glob) $ hg log -r . --template "{branch}\n" a Also preserve other entries in the dict that are in the old commit, first graft something so there's an additional entry: $ hg up 0 -q $ echo z > z $ hg ci -Am 'fork' adding z created new head $ hg up 11 5 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg graft 12 grafting 12:2647734878ef "fork" (tip) $ hg ci --amend -m 'graft amend' saved backup bundle to $TESTTMP/.hg/strip-backup/bd010aea3f39-eedb103b-amend-backup.hg (glob) $ hg log -r . --debug | grep extra extra: amend_source=bd010aea3f39f3fb2a2f884b9ccb0471cd77398e extra: branch=a extra: source=2647734878ef0236dda712fae9c1651cf694ea8a Preserve phase $ hg phase '.^::.' 11: draft 13: draft $ hg phase --secret --force . $ hg phase '.^::.' 11: draft 13: secret $ hg commit --amend -m 'amend for phase' -q $ hg phase '.^::.' 11: draft 13: secret Test amend with obsolete --------------------------- Enable obsolete $ cat >> $HGRCPATH << EOF > [experimental] > evolution=createmarkers,allowunstable > EOF Amend with no files changes $ hg id -n 13 $ hg ci --amend -m 'babar' $ hg id -n 14 $ hg log -Gl 3 --style=compact @ 14[tip]:11 b650e6ee8614 1970-01-01 00:00 +0000 test | babar | | o 12:0 2647734878ef 1970-01-01 00:00 +0000 test | | fork | | o | 11 3334b7925910 1970-01-01 00:00 +0000 test | | a'' | | $ hg log -Gl 4 --hidden --style=compact @ 14[tip]:11 b650e6ee8614 1970-01-01 00:00 +0000 test | babar | | x 13:11 68ff8ff97044 1970-01-01 00:00 +0000 test |/ amend for phase | | o 12:0 2647734878ef 1970-01-01 00:00 +0000 test | | fork | | o | 11 3334b7925910 1970-01-01 00:00 +0000 test | | a'' | | Amend with files changes (note: the extra commit over 15 is a temporary junk I would be happy to get ride of) $ echo 'babar' >> a $ hg commit --amend $ hg log -Gl 6 --hidden --style=compact @ 16[tip]:11 9f9e9bccf56c 1970-01-01 00:00 +0000 test | babar | | x 15 90fef497c56f 1970-01-01 00:00 +0000 test | | temporary amend commit for b650e6ee8614 | | | x 14:11 b650e6ee8614 1970-01-01 00:00 +0000 test |/ babar | | x 13:11 68ff8ff97044 1970-01-01 00:00 +0000 test |/ amend for phase | | o 12:0 2647734878ef 1970-01-01 00:00 +0000 test | | fork | | o | 11 3334b7925910 1970-01-01 00:00 +0000 test | | a'' | | Test that amend does not make it easy to create obsolescence cycle --------------------------------------------------------------------- $ hg id -r 14 --hidden b650e6ee8614 (a) $ hg revert -ar 14 --hidden reverting a $ hg commit --amend $ hg id b99e5df575f7 (a) tip Test that rewriting leaving instability behind is allowed --------------------------------------------------------------------- $ hg up '.^' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo 'b' >> a $ hg log --style compact -r 'children(.)' 18[tip]:11 b99e5df575f7 1970-01-01 00:00 +0000 test babar $ hg commit --amend $ hg log -r 'unstable()' changeset: 18:b99e5df575f7 branch: a parent: 11:3334b7925910 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: babar Amend a merge changeset (with renames and conflicts from the second parent): $ hg up -q default $ hg branch -q bar $ hg cp a aa $ hg mv z zz $ echo cc > cc $ hg add cc $ hg ci -m aazzcc $ hg up -q default $ echo a >> a $ echo dd > cc $ hg add cc $ hg ci -m aa $ hg merge -q bar warning: conflicts while merging cc! (edit, then use 'hg resolve --mark') [1] $ hg resolve -m cc (no more unresolved files) $ hg ci -m 'merge bar' $ hg log --config diff.git=1 -pr . changeset: 23:93cd4445f720 tag: tip parent: 22:30d96aeaf27b parent: 21:1aa437659d19 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge bar diff --git a/a b/aa copy from a copy to aa diff --git a/cc b/cc --- a/cc +++ b/cc @@ -1,1 +1,5 @@ +<<<<<<< local: 30d96aeaf27b - test: aa dd +======= +cc +>>>>>>> other: 1aa437659d19 bar - test: aazzcc diff --git a/z b/zz rename from z rename to zz $ hg debugrename aa aa renamed from a:a80d06849b333b8a3d5c445f8ba3142010dcdc9e $ hg debugrename zz zz renamed from z:69a1b67522704ec122181c0890bd16e9d3e7516a $ hg debugrename cc cc not renamed $ HGEDITOR="sh .hg/checkeditform.sh" hg ci --amend -m 'merge bar (amend message)' --edit HGEDITFORM=commit.amend.merge $ hg log --config diff.git=1 -pr . changeset: 24:832b50f2c271 tag: tip parent: 22:30d96aeaf27b parent: 21:1aa437659d19 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge bar (amend message) diff --git a/a b/aa copy from a copy to aa diff --git a/cc b/cc --- a/cc +++ b/cc @@ -1,1 +1,5 @@ +<<<<<<< local: 30d96aeaf27b - test: aa dd +======= +cc +>>>>>>> other: 1aa437659d19 bar - test: aazzcc diff --git a/z b/zz rename from z rename to zz $ hg debugrename aa aa renamed from a:a80d06849b333b8a3d5c445f8ba3142010dcdc9e $ hg debugrename zz zz renamed from z:69a1b67522704ec122181c0890bd16e9d3e7516a $ hg debugrename cc cc not renamed $ hg mv zz z $ hg ci --amend -m 'merge bar (undo rename)' $ hg log --config diff.git=1 -pr . changeset: 26:bdafc5c72f74 tag: tip parent: 22:30d96aeaf27b parent: 21:1aa437659d19 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge bar (undo rename) diff --git a/a b/aa copy from a copy to aa diff --git a/cc b/cc --- a/cc +++ b/cc @@ -1,1 +1,5 @@ +<<<<<<< local: 30d96aeaf27b - test: aa dd +======= +cc +>>>>>>> other: 1aa437659d19 bar - test: aazzcc $ hg debugrename z z not renamed Amend a merge changeset (with renames during the merge): $ hg up -q bar $ echo x > x $ hg add x $ hg ci -m x $ hg up -q default $ hg merge -q bar $ hg mv aa aaa $ echo aa >> aaa $ hg ci -m 'merge bar again' $ hg log --config diff.git=1 -pr . changeset: 28:32f19415b634 tag: tip parent: 26:bdafc5c72f74 parent: 27:4c94d5bc65f5 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge bar again diff --git a/aa b/aa deleted file mode 100644 --- a/aa +++ /dev/null @@ -1,2 +0,0 @@ -a -a diff --git a/aaa b/aaa new file mode 100644 --- /dev/null +++ b/aaa @@ -0,0 +1,3 @@ +a +a +aa diff --git a/x b/x new file mode 100644 --- /dev/null +++ b/x @@ -0,0 +1,1 @@ +x $ hg debugrename aaa aaa renamed from aa:37d9b5d994eab34eda9c16b195ace52c7b129980 $ hg mv aaa aa $ hg ci --amend -m 'merge bar again (undo rename)' $ hg log --config diff.git=1 -pr . changeset: 30:1e2a06b3d312 tag: tip parent: 26:bdafc5c72f74 parent: 27:4c94d5bc65f5 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge bar again (undo rename) diff --git a/aa b/aa --- a/aa +++ b/aa @@ -1,2 +1,3 @@ a a +aa diff --git a/x b/x new file mode 100644 --- /dev/null +++ b/x @@ -0,0 +1,1 @@ +x $ hg debugrename aa aa not renamed $ hg debugrename -r '.^' aa aa renamed from a:a80d06849b333b8a3d5c445f8ba3142010dcdc9e Amend a merge changeset (with manifest-level conflicts): $ hg up -q bar $ hg rm aa $ hg ci -m 'rm aa' $ hg up -q default $ echo aa >> aa $ hg ci -m aa $ hg merge -q bar --config ui.interactive=True << EOF > c > EOF local changed aa which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? c $ hg ci -m 'merge bar (with conflicts)' $ hg log --config diff.git=1 -pr . changeset: 33:97a298b0c59f tag: tip parent: 32:3d78ce4226b8 parent: 31:67db8847a540 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge bar (with conflicts) $ hg rm aa $ hg ci --amend -m 'merge bar (with conflicts, amended)' $ hg log --config diff.git=1 -pr . changeset: 35:6de0c1bde1c8 tag: tip parent: 32:3d78ce4226b8 parent: 31:67db8847a540 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge bar (with conflicts, amended) diff --git a/aa b/aa deleted file mode 100644 --- a/aa +++ /dev/null @@ -1,4 +0,0 @@ -a -a -aa -aa Issue 3445: amending with --close-branch a commit that created a new head should fail This shouldn't be possible: $ hg up -q default $ hg branch closewithamend marked working directory as branch closewithamend $ echo foo > foo $ hg add foo $ hg ci -m.. $ hg ci --amend --close-branch -m 'closing' abort: can only close branch heads [255] This silliness fails: $ hg branch silliness marked working directory as branch silliness $ echo b >> b $ hg ci --close-branch -m'open and close' abort: can only close branch heads [255] Test that amend with --secret creates new secret changeset forcibly --------------------------------------------------------------------- $ hg phase '.^::.' 35: draft 36: draft $ hg commit --amend --secret -m 'amend as secret' -q $ hg phase '.^::.' 35: draft 38: secret Test that amend with --edit invokes editor forcibly --------------------------------------------------- $ hg parents --template "{desc}\n" amend as secret $ HGEDITOR=cat hg commit --amend -m "editor should be suppressed" $ hg parents --template "{desc}\n" editor should be suppressed $ hg status --rev '.^1::.' A foo $ HGEDITOR=cat hg commit --amend -m "editor should be invoked" --edit editor should be invoked HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'silliness' HG: added foo $ hg parents --template "{desc}\n" editor should be invoked Test that "diff()" in committemplate works correctly for amending ----------------------------------------------------------------- $ cat >> .hg/hgrc < [committemplate] > changeset.commit.amend = {desc}\n > HG: M: {file_mods} > HG: A: {file_adds} > HG: R: {file_dels} > {splitlines(diff()) % 'HG: {line}\n'} > EOF $ hg parents --template "M: {file_mods}\nA: {file_adds}\nR: {file_dels}\n" M: A: foo R: $ hg status -amr $ HGEDITOR=cat hg commit --amend -e -m "expecting diff of foo" expecting diff of foo HG: M: HG: A: foo HG: R: HG: diff -r 6de0c1bde1c8 foo HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/foo Thu Jan 01 00:00:00 1970 +0000 HG: @@ -0,0 +1,1 @@ HG: +foo $ echo y > y $ hg add y $ HGEDITOR=cat hg commit --amend -e -m "expecting diff of foo and y" expecting diff of foo and y HG: M: HG: A: foo y HG: R: HG: diff -r 6de0c1bde1c8 foo HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/foo Thu Jan 01 00:00:00 1970 +0000 HG: @@ -0,0 +1,1 @@ HG: +foo HG: diff -r 6de0c1bde1c8 y HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/y Thu Jan 01 00:00:00 1970 +0000 HG: @@ -0,0 +1,1 @@ HG: +y $ hg rm a $ HGEDITOR=cat hg commit --amend -e -m "expecting diff of a, foo and y" expecting diff of a, foo and y HG: M: HG: A: foo y HG: R: a HG: diff -r 6de0c1bde1c8 a HG: --- a/a Thu Jan 01 00:00:00 1970 +0000 HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: @@ -1,2 +0,0 @@ HG: -a HG: -a HG: diff -r 6de0c1bde1c8 foo HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/foo Thu Jan 01 00:00:00 1970 +0000 HG: @@ -0,0 +1,1 @@ HG: +foo HG: diff -r 6de0c1bde1c8 y HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/y Thu Jan 01 00:00:00 1970 +0000 HG: @@ -0,0 +1,1 @@ HG: +y $ hg rm x $ HGEDITOR=cat hg commit --amend -e -m "expecting diff of a, foo, x and y" expecting diff of a, foo, x and y HG: M: HG: A: foo y HG: R: a x HG: diff -r 6de0c1bde1c8 a HG: --- a/a Thu Jan 01 00:00:00 1970 +0000 HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: @@ -1,2 +0,0 @@ HG: -a HG: -a HG: diff -r 6de0c1bde1c8 foo HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/foo Thu Jan 01 00:00:00 1970 +0000 HG: @@ -0,0 +1,1 @@ HG: +foo HG: diff -r 6de0c1bde1c8 x HG: --- a/x Thu Jan 01 00:00:00 1970 +0000 HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: @@ -1,1 +0,0 @@ HG: -x HG: diff -r 6de0c1bde1c8 y HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/y Thu Jan 01 00:00:00 1970 +0000 HG: @@ -0,0 +1,1 @@ HG: +y $ echo cccc >> cc $ hg status -amr M cc $ HGEDITOR=cat hg commit --amend -e -m "cc should be excluded" -X cc cc should be excluded HG: M: HG: A: foo y HG: R: a x HG: diff -r 6de0c1bde1c8 a HG: --- a/a Thu Jan 01 00:00:00 1970 +0000 HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: @@ -1,2 +0,0 @@ HG: -a HG: -a HG: diff -r 6de0c1bde1c8 foo HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/foo Thu Jan 01 00:00:00 1970 +0000 HG: @@ -0,0 +1,1 @@ HG: +foo HG: diff -r 6de0c1bde1c8 x HG: --- a/x Thu Jan 01 00:00:00 1970 +0000 HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: @@ -1,1 +0,0 @@ HG: -x HG: diff -r 6de0c1bde1c8 y HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/y Thu Jan 01 00:00:00 1970 +0000 HG: @@ -0,0 +1,1 @@ HG: +y Check for issue4405 ------------------- Setup the repo with a file that gets moved in a second commit. $ hg init repo $ cd repo $ touch a0 $ hg add a0 $ hg commit -m a0 $ hg mv a0 a1 $ hg commit -m a1 $ hg up -q 0 $ hg log -G --template '{rev} {desc}' o 1 a1 | @ 0 a0 Now we branch the repro, but re-use the file contents, so we have a divergence in the file revlog topology and the changelog topology. $ hg revert --rev 1 --all removing a0 adding a1 $ hg ci -qm 'a1-amend' $ hg log -G --template '{rev} {desc}' @ 2 a1-amend | | o 1 a1 |/ o 0 a0 The way mercurial does amends is to create a temporary commit (rev 3) and then fold the new and old commits together into another commit (rev 4). During this process, _findlimit is called to check how far back to look for the transitive closure of file copy information, but due to the divergence of the filelog and changelog graph topologies, before _findlimit was fixed, it returned a rev which was not far enough back in this case. $ hg mv a1 a2 $ hg status --copies --rev 0 A a2 a0 R a0 $ hg ci --amend -q $ hg log -G --template '{rev} {desc}' @ 4 a1-amend | | o 1 a1 |/ o 0 a0 Before the fix, the copy information was lost. $ hg status --copies --rev 0 A a2 a0 R a0 $ cd .. Check that amend properly preserve rename from directory rename (issue-4516) If a parent of the merge renames a full directory, any files added to the old directory in the other parent will be renamed to the new directory. For some reason, the rename metadata was when amending such merge. This test ensure we do not regress. We have a dedicated repo because it needs a setup with renamed directory) $ hg init issue4516 $ cd issue4516 $ mkdir olddirname $ echo line1 > olddirname/commonfile.py $ hg add olddirname/commonfile.py $ hg ci -m first $ hg branch newdirname marked working directory as branch newdirname (branches are permanent and global, did you want a bookmark?) $ hg mv olddirname newdirname moving olddirname/commonfile.py to newdirname/commonfile.py (glob) $ hg ci -m rename $ hg update default 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo line1 > olddirname/newfile.py $ hg add olddirname/newfile.py $ hg ci -m log $ hg up newdirname 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ # create newdirname/newfile.py $ hg merge default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m add $ $ hg debugrename newdirname/newfile.py newdirname/newfile.py renamed from olddirname/newfile.py:690b295714aed510803d3020da9c70fca8336def (glob) $ hg status -C --change . A newdirname/newfile.py $ hg status -C --rev 1 A newdirname/newfile.py $ hg status -C --rev 2 A newdirname/commonfile.py olddirname/commonfile.py A newdirname/newfile.py olddirname/newfile.py R olddirname/commonfile.py R olddirname/newfile.py $ hg debugindex newdirname/newfile.py rev offset length delta linkrev nodeid p1 p2 0 0 88 -1 3 34a4d536c0c0 000000000000 000000000000 $ echo a >> newdirname/commonfile.py $ hg ci --amend -m bug $ hg debugrename newdirname/newfile.py newdirname/newfile.py renamed from olddirname/newfile.py:690b295714aed510803d3020da9c70fca8336def (glob) $ hg debugindex newdirname/newfile.py rev offset length delta linkrev nodeid p1 p2 0 0 88 -1 3 34a4d536c0c0 000000000000 000000000000 mercurial-3.7.3/tests/silenttestrunner.py0000644000175000017500000000122512676531525020273 0ustar mpmmpm00000000000000import unittest, sys, os def main(modulename): '''run the tests found in module, printing nothing when all tests pass''' module = sys.modules[modulename] suite = unittest.defaultTestLoader.loadTestsFromModule(module) results = unittest.TestResult() suite.run(results) if results.errors or results.failures: for tc, exc in results.errors: print 'ERROR:', tc print sys.stdout.write(exc) for tc, exc in results.failures: print 'FAIL:', tc print sys.stdout.write(exc) sys.exit(1) if os.environ.get('SILENT_BE_NOISY'): main = unittest.main mercurial-3.7.3/tests/test-convert-splicemap.t0000644000175000017500000001462712676531525021100 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "convert=" >> $HGRCPATH $ glog() > { > hg log -G --template '{rev}:{node|short} "{desc|firstline}"\ > files: {files}\n' "$@" > } $ hg init repo1 $ cd repo1 $ echo a > a $ hg ci -Am adda adding a $ echo b > b $ echo a >> a $ hg ci -Am addb adding b $ PARENTID1=`hg id --debug -i` $ echo c > c $ hg ci -Am addc adding c $ PARENTID2=`hg id --debug -i` $ cd .. $ glog -R repo1 @ 2:e55c719b85b6 "addc" files: c | o 1:6d4c2037ddc2 "addb" files: a b | o 0:07f494440405 "adda" files: a $ hg init repo2 $ cd repo2 $ echo b > a $ echo d > d $ hg ci -Am addaandd adding a adding d $ INVALIDID1=afd12345af $ INVALIDID2=28173x36ddd1e67bf7098d541130558ef5534a86 $ CHILDID1=`hg id --debug -i` $ echo d >> d $ hg ci -Am changed $ CHILDID2=`hg id --debug -i` $ echo e > e $ hg ci -Am adde adding e $ cd .. $ glog -R repo2 @ 2:a39b65753b0a "adde" files: e | o 1:e4ea00df9189 "changed" files: d | o 0:527cdedf31fb "addaandd" files: a d test invalid splicemap1 $ cat > splicemap < $CHILDID2 > EOF $ hg convert --splicemap splicemap repo2 repo1 abort: syntax error in splicemap(1): child parent1[,parent2] expected [255] test invalid splicemap2 $ cat > splicemap < $CHILDID2 $PARENTID1, $PARENTID2, $PARENTID2 > EOF $ hg convert --splicemap splicemap repo2 repo1 abort: syntax error in splicemap(1): child parent1[,parent2] expected [255] test invalid splicemap3 $ cat > splicemap < $INVALIDID1 $INVALIDID2 > EOF $ hg convert --splicemap splicemap repo2 repo1 abort: splicemap entry afd12345af is not a valid revision identifier [255] splice repo2 on repo1 $ cat > splicemap < $CHILDID1 $PARENTID1 > $CHILDID2 $PARENTID2,$CHILDID1 > > EOF $ cat splicemap 527cdedf31fbd5ea708aa14eeecf53d4676f38db 6d4c2037ddc2cb2627ac3a244ecce35283268f8e e4ea00df91897da3079a10fab658c1eddba6617b e55c719b85b60e5102fac26110ba626e7cb6b7dc,527cdedf31fbd5ea708aa14eeecf53d4676f38db $ hg clone repo1 target1 updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg convert --splicemap splicemap repo2 target1 scanning source... sorting... converting... 2 addaandd spliced in 6d4c2037ddc2cb2627ac3a244ecce35283268f8e as parents of 527cdedf31fbd5ea708aa14eeecf53d4676f38db 1 changed spliced in e55c719b85b60e5102fac26110ba626e7cb6b7dc and 527cdedf31fbd5ea708aa14eeecf53d4676f38db as parents of e4ea00df91897da3079a10fab658c1eddba6617b 0 adde $ glog -R target1 o 5:16bc847b02aa "adde" files: e | o 4:e30e4fee3418 "changed" files: d |\ | o 3:e673348c3a3c "addaandd" files: a d | | @ | 2:e55c719b85b6 "addc" files: c |/ o 1:6d4c2037ddc2 "addb" files: a b | o 0:07f494440405 "adda" files: a Test splicemap and conversion order $ hg init ordered $ cd ordered $ echo a > a $ hg ci -Am adda adding a $ hg branch branch marked working directory as branch branch (branches are permanent and global, did you want a bookmark?) $ echo a >> a $ hg ci -Am changea $ echo a >> a $ hg ci -Am changeaagain $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b > b $ hg ci -Am addb adding b We want 2 to depend on 1 and 3. Since 3 is always converted after 2, the bug should be exhibited with all conversion orders. $ cat > ../splicemap < `(hg id -r 2 -i --debug)` `(hg id -r 1 -i --debug)`, `(hg id -r 3 -i --debug)` > EOF $ cd .. $ cat splicemap 7c364e7fa7d70ae525610c016317ed717b519d97 717d54d67e6c31fd75ffef2ff3042bdd98418437, 102a90ea7b4a3361e4082ed620918c261189a36a Test regular conversion $ hg convert --splicemap splicemap ordered ordered-hg1 initializing destination ordered-hg1 repository scanning source... sorting... converting... 3 adda 2 changea 1 addb 0 changeaagain spliced in 717d54d67e6c31fd75ffef2ff3042bdd98418437 and 102a90ea7b4a3361e4082ed620918c261189a36a as parents of 7c364e7fa7d70ae525610c016317ed717b519d97 $ glog -R ordered-hg1 o 3:4cb04b9afbf2 "changeaagain" files: a |\ | o 2:102a90ea7b4a "addb" files: b | | o | 1:717d54d67e6c "changea" files: a |/ o 0:07f494440405 "adda" files: a Test conversion with parent revisions already in dest, using source and destination identifiers. Test unknown splicemap target. $ hg convert -r1 ordered ordered-hg2 initializing destination ordered-hg2 repository scanning source... sorting... converting... 1 adda 0 changea $ hg convert -r3 ordered ordered-hg2 scanning source... sorting... converting... 0 addb $ cat > splicemap < `(hg -R ordered id -r 2 -i --debug)` \ > `(hg -R ordered-hg2 id -r 1 -i --debug)`,\ > `(hg -R ordered-hg2 id -r 2 -i --debug)` > deadbeef102a90ea7b4a3361e4082ed620918c26 deadbeef102a90ea7b4a3361e4082ed620918c27 > EOF $ hg convert --splicemap splicemap ordered ordered-hg2 scanning source... splice map revision deadbeef102a90ea7b4a3361e4082ed620918c26 is not being converted, ignoring sorting... converting... 0 changeaagain spliced in 717d54d67e6c31fd75ffef2ff3042bdd98418437 and 102a90ea7b4a3361e4082ed620918c261189a36a as parents of 7c364e7fa7d70ae525610c016317ed717b519d97 $ glog -R ordered-hg2 o 3:4cb04b9afbf2 "changeaagain" files: a |\ | o 2:102a90ea7b4a "addb" files: b | | o | 1:717d54d67e6c "changea" files: a |/ o 0:07f494440405 "adda" files: a Test empty conversion $ hg convert --splicemap splicemap ordered ordered-hg2 scanning source... splice map revision deadbeef102a90ea7b4a3361e4082ed620918c26 is not being converted, ignoring sorting... converting... Test clonebranches $ hg --config convert.hg.clonebranches=true convert \ > --splicemap splicemap ordered ordered-hg3 initializing destination ordered-hg3 repository scanning source... abort: revision 717d54d67e6c31fd75ffef2ff3042bdd98418437 not found in destination repository (lookups with clonebranches=true are not implemented) [255] Test invalid dependency $ cat > splicemap < `(hg -R ordered id -r 2 -i --debug)` \ > deadbeef102a90ea7b4a3361e4082ed620918c26,\ > `(hg -R ordered-hg2 id -r 2 -i --debug)` > EOF $ hg convert --splicemap splicemap ordered ordered-hg4 initializing destination ordered-hg4 repository scanning source... abort: unknown splice map parent: deadbeef102a90ea7b4a3361e4082ed620918c26 [255] mercurial-3.7.3/tests/test-convert-svn-sink.t0000644000175000017500000002077212676531525020671 0ustar mpmmpm00000000000000#require svn13 $ svnupanddisplay() > { > ( > cd $1; > svn up -q; > svn st -v | sed 's/ */ /g' | sort > limit='' > if [ $2 -gt 0 ]; then > limit="--limit=$2" > fi > svn log --xml -v $limit | python "$TESTDIR/svnxml.py" > ) > } $ cat >> $HGRCPATH < [extensions] > convert = > EOF $ hg init a Add $ echo a > a/a $ mkdir -p a/d1/d2 $ echo b > a/d1/d2/b $ hg --cwd a ci -d '0 0' -A -m 'add a file' adding a adding d1/d2/b Modify $ svn-safe-append.py a a/a $ hg --cwd a ci -d '1 0' -m 'modify a file' $ hg --cwd a tip -q 1:e0e2b8a9156b $ hg convert -d svn a assuming destination a-hg initializing svn repository 'a-hg' initializing svn working copy 'a-hg-wc' scanning source... sorting... converting... 1 add a file 0 modify a file $ svnupanddisplay a-hg-wc 2 2 1 test d1 2 1 test d1/d2 (glob) 2 1 test d1/d2/b (glob) 2 2 test . 2 2 test a revision: 2 author: test msg: modify a file M /a revision: 1 author: test msg: add a file A /a A /d1 A /d1/d2 A /d1/d2/b $ ls a a-hg-wc a: a d1 a-hg-wc: a d1 $ cmp a/a a-hg-wc/a Rename $ hg --cwd a mv a b $ hg --cwd a ci -d '2 0' -m 'rename a file' $ hg --cwd a tip -q 2:eb5169441d43 $ hg convert -d svn a assuming destination a-hg initializing svn working copy 'a-hg-wc' scanning source... sorting... converting... 0 rename a file $ svnupanddisplay a-hg-wc 1 3 1 test d1 3 1 test d1/d2 (glob) 3 1 test d1/d2/b (glob) 3 3 test . 3 3 test b revision: 3 author: test msg: rename a file D /a A /b (from /a@2) $ ls a a-hg-wc a: b d1 a-hg-wc: b d1 Copy $ hg --cwd a cp b c $ hg --cwd a ci -d '3 0' -m 'copy a file' $ hg --cwd a tip -q 3:60effef6ab48 $ hg convert -d svn a assuming destination a-hg initializing svn working copy 'a-hg-wc' scanning source... sorting... converting... 0 copy a file $ svnupanddisplay a-hg-wc 1 4 1 test d1 4 1 test d1/d2 (glob) 4 1 test d1/d2/b (glob) 4 3 test b 4 4 test . 4 4 test c revision: 4 author: test msg: copy a file A /c (from /b@3) $ ls a a-hg-wc a: b c d1 a-hg-wc: b c d1 $ hg --cwd a rm b Remove $ hg --cwd a ci -d '4 0' -m 'remove a file' $ hg --cwd a tip -q 4:87bbe3013fb6 $ hg convert -d svn a assuming destination a-hg initializing svn working copy 'a-hg-wc' scanning source... sorting... converting... 0 remove a file $ svnupanddisplay a-hg-wc 1 5 1 test d1 5 1 test d1/d2 (glob) 5 1 test d1/d2/b (glob) 5 4 test c 5 5 test . revision: 5 author: test msg: remove a file D /b $ ls a a-hg-wc a: c d1 a-hg-wc: c d1 Executable #if execbit $ chmod +x a/c #else $ echo fake >> a/c #endif $ hg --cwd a ci -d '5 0' -m 'make a file executable' #if execbit $ hg --cwd a tip -q 5:ff42e473c340 #else $ hg --cwd a tip -q 5:817a700c8cf1 #endif $ hg convert -d svn a assuming destination a-hg initializing svn working copy 'a-hg-wc' scanning source... sorting... converting... 0 make a file executable $ svnupanddisplay a-hg-wc 1 6 1 test d1 6 1 test d1/d2 (glob) 6 1 test d1/d2/b (glob) 6 6 test . 6 6 test c revision: 6 author: test msg: make a file executable M /c #if execbit $ test -x a-hg-wc/c #endif #if symlink Symlinks $ ln -s a/missing a/link $ hg --cwd a commit -Am 'add symlink' adding link $ hg --cwd a mv link newlink $ hg --cwd a commit -m 'move symlink' $ hg convert -d svn a a-svnlink initializing svn repository 'a-svnlink' initializing svn working copy 'a-svnlink-wc' scanning source... sorting... converting... 7 add a file 6 modify a file 5 rename a file 4 copy a file 3 remove a file 2 make a file executable 1 add symlink 0 move symlink $ svnupanddisplay a-svnlink-wc 1 8 1 test d1 8 1 test d1/d2 8 1 test d1/d2/b 8 6 test c 8 8 test . 8 8 test newlink revision: 8 author: test msg: move symlink D /link A /newlink (from /link@7) Make sure our changes don't affect the rest of the test cases $ hg --cwd a up 5 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg --cwd a --config extensions.strip= strip -r 6 saved backup bundle to $TESTTMP/a/.hg/strip-backup/bd4f7b7a7067-ed505e42-backup.hg (glob) #endif Convert with --full adds and removes files that didn't change $ touch a/f $ hg -R a ci -Aqmf $ echo "rename c d" > filemap $ hg convert -d svn a --filemap filemap --full assuming destination a-hg initializing svn working copy 'a-hg-wc' scanning source... sorting... converting... 0 f $ svnupanddisplay a-hg-wc 1 7 7 test . 7 7 test d 7 7 test f revision: 7 author: test msg: f D /c A /d D /d1 A /f $ rm -rf a a-hg a-hg-wc Executable in new directory $ hg init a $ mkdir a/d1 $ echo a > a/d1/a #if execbit $ chmod +x a/d1/a #else $ echo fake >> a/d1/a #endif $ hg --cwd a ci -d '0 0' -A -m 'add executable file in new directory' adding d1/a $ hg convert -d svn a assuming destination a-hg initializing svn repository 'a-hg' initializing svn working copy 'a-hg-wc' scanning source... sorting... converting... 0 add executable file in new directory $ svnupanddisplay a-hg-wc 1 1 1 test . 1 1 test d1 1 1 test d1/a (glob) revision: 1 author: test msg: add executable file in new directory A /d1 A /d1/a #if execbit $ test -x a-hg-wc/d1/a #endif Copy to new directory $ mkdir a/d2 $ hg --cwd a cp d1/a d2/a $ hg --cwd a ci -d '1 0' -A -m 'copy file to new directory' $ hg convert -d svn a assuming destination a-hg initializing svn working copy 'a-hg-wc' scanning source... sorting... converting... 0 copy file to new directory $ svnupanddisplay a-hg-wc 1 2 1 test d1 2 1 test d1/a (glob) 2 2 test . 2 2 test d2 2 2 test d2/a (glob) revision: 2 author: test msg: copy file to new directory A /d2 A /d2/a (from /d1/a@1) Branchy history $ hg init b $ echo base > b/b $ hg --cwd b ci -d '0 0' -Ambase adding b $ svn-safe-append.py left-1 b/b $ echo left-1 > b/left-1 $ hg --cwd b ci -d '1 0' -Amleft-1 adding left-1 $ svn-safe-append.py left-2 b/b $ echo left-2 > b/left-2 $ hg --cwd b ci -d '2 0' -Amleft-2 adding left-2 $ hg --cwd b up 0 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ svn-safe-append.py right-1 b/b $ echo right-1 > b/right-1 $ hg --cwd b ci -d '3 0' -Amright-1 adding right-1 created new head $ svn-safe-append.py right-2 b/b $ echo right-2 > b/right-2 $ hg --cwd b ci -d '4 0' -Amright-2 adding right-2 $ hg --cwd b up -C 2 3 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg --cwd b merge merging b warning: conflicts while merging b! (edit, then use 'hg resolve --mark') 2 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg --cwd b revert -r 2 b $ hg --cwd b resolve -m b (no more unresolved files) $ hg --cwd b ci -d '5 0' -m 'merge' Expect 4 changes $ hg convert -d svn b assuming destination b-hg initializing svn repository 'b-hg' initializing svn working copy 'b-hg-wc' scanning source... sorting... converting... 5 base 4 left-1 3 left-2 2 right-1 1 right-2 0 merge $ svnupanddisplay b-hg-wc 0 4 2 test left-1 4 3 test b 4 3 test left-2 4 4 test . 4 4 test right-1 4 4 test right-2 revision: 4 author: test msg: merge A /right-1 A /right-2 revision: 3 author: test msg: left-2 M /b A /left-2 revision: 2 author: test msg: left-1 M /b A /left-1 revision: 1 author: test msg: base A /b Tags are not supported, but must not break conversion $ rm -rf a a-hg a-hg-wc $ hg init a $ echo a > a/a $ hg --cwd a ci -d '0 0' -A -m 'Add file a' adding a $ hg --cwd a tag -d '1 0' -m 'Tagged as v1.0' v1.0 $ hg convert -d svn a assuming destination a-hg initializing svn repository 'a-hg' initializing svn working copy 'a-hg-wc' scanning source... sorting... converting... 1 Add file a 0 Tagged as v1.0 writing Subversion tags is not yet implemented $ svnupanddisplay a-hg-wc 2 2 1 test a 2 2 test . 2 2 test .hgtags revision: 2 author: test msg: Tagged as v1.0 A /.hgtags revision: 1 author: test msg: Add file a A /a $ rm -rf a a-hg a-hg-wc mercurial-3.7.3/tests/test-journal-exists.t0000644000175000017500000000144512676531525020426 0ustar mpmmpm00000000000000 $ hg init $ echo a > a $ hg ci -Am0 adding a $ hg -q clone . foo $ touch .hg/store/journal $ echo foo > a $ hg ci -Am0 abort: abandoned transaction found! (run 'hg recover' to clean up transaction) [255] $ hg recover rolling back interrupted transaction checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions Check that zero-size journals are correctly aborted: #if unix-permissions no-root $ hg bundle -qa repo.hg $ chmod -w foo/.hg/store/00changelog.i $ hg -R foo unbundle repo.hg adding changesets abort: Permission denied: $TESTTMP/foo/.hg/store/.00changelog.i-* (glob) [255] $ if test -f foo/.hg/store/journal; then echo 'journal exists :-('; fi #endif mercurial-3.7.3/tests/test-largefiles-misc.t0000644000175000017500000007105312676531525020507 0ustar mpmmpm00000000000000This file contains testcases that tend to be related to special cases or less common commands affecting largefile. Each sections should be independent of each others. $ USERCACHE="$TESTTMP/cache"; export USERCACHE $ mkdir "${USERCACHE}" $ cat >> $HGRCPATH < [extensions] > largefiles= > purge= > rebase= > transplant= > [phases] > publish=False > [largefiles] > minsize=2 > patterns=glob:**.dat > usercache=${USERCACHE} > [hooks] > precommit=sh -c "echo \\"Invoking status precommit hook\\"; hg status" > EOF Test copies and moves from a directory other than root (issue3516) ========================================================================= $ hg init lf_cpmv $ cd lf_cpmv $ mkdir dira $ mkdir dira/dirb $ touch dira/dirb/largefile $ hg add --large dira/dirb/largefile $ hg commit -m "added" Invoking status precommit hook A dira/dirb/largefile $ cd dira $ hg cp dirb/largefile foo/largefile TODO: Ideally, this should mention the largefile, not the standin $ hg log -T '{rev}\n' --stat 'set:clean()' 0 .hglf/dira/dirb/largefile | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) $ hg ci -m "deep copy" Invoking status precommit hook A dira/foo/largefile $ find . | sort . ./dirb ./dirb/largefile ./foo ./foo/largefile $ hg mv foo/largefile baz/largefile $ hg ci -m "moved" Invoking status precommit hook A dira/baz/largefile R dira/foo/largefile $ find . | sort . ./baz ./baz/largefile ./dirb ./dirb/largefile $ cd .. $ hg mv dira dirc moving .hglf/dira/baz/largefile to .hglf/dirc/baz/largefile (glob) moving .hglf/dira/dirb/largefile to .hglf/dirc/dirb/largefile (glob) $ find * | sort dirc dirc/baz dirc/baz/largefile dirc/dirb dirc/dirb/largefile $ hg clone -q . ../fetch $ hg --config extensions.fetch= fetch ../fetch abort: uncommitted changes [255] $ hg up -qC $ cd .. Clone a local repository owned by another user =================================================== #if unix-permissions We have to simulate that here by setting $HOME and removing write permissions $ ORIGHOME="$HOME" $ mkdir alice $ HOME="`pwd`/alice" $ cd alice $ hg init pubrepo $ cd pubrepo $ dd if=/dev/zero bs=1k count=11k > a-large-file 2> /dev/null $ hg add --large a-large-file $ hg commit -m "Add a large file" Invoking status precommit hook A a-large-file $ cd .. $ chmod -R a-w pubrepo $ cd .. $ mkdir bob $ HOME="`pwd`/bob" $ cd bob $ hg clone --pull ../alice/pubrepo pubrepo requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default getting changed largefiles 1 largefiles updated, 0 removed 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. $ chmod -R u+w alice/pubrepo $ HOME="$ORIGHOME" #endif Symlink to a large largefile should behave the same as a symlink to a normal file ===================================================================================== #if symlink $ hg init largesymlink $ cd largesymlink $ dd if=/dev/zero bs=1k count=10k of=largefile 2>/dev/null $ hg add --large largefile $ hg commit -m "commit a large file" Invoking status precommit hook A largefile $ ln -s largefile largelink $ hg add largelink $ hg commit -m "commit a large symlink" Invoking status precommit hook A largelink $ rm -f largelink $ hg up >/dev/null $ test -f largelink [1] $ test -L largelink [1] $ rm -f largelink # make next part of the test independent of the previous $ hg up -C >/dev/null $ test -f largelink $ test -L largelink $ cd .. #endif test for pattern matching on 'hg status': ============================================== to boost performance, largefiles checks whether specified patterns are related to largefiles in working directory (NOT to STANDIN) or not. $ hg init statusmatch $ cd statusmatch $ mkdir -p a/b/c/d $ echo normal > a/b/c/d/e.normal.txt $ hg add a/b/c/d/e.normal.txt $ echo large > a/b/c/d/e.large.txt $ hg add --large a/b/c/d/e.large.txt $ mkdir -p a/b/c/x $ echo normal > a/b/c/x/y.normal.txt $ hg add a/b/c/x/y.normal.txt $ hg commit -m 'add files' Invoking status precommit hook A a/b/c/d/e.large.txt A a/b/c/d/e.normal.txt A a/b/c/x/y.normal.txt (1) no pattern: no performance boost $ hg status -A C a/b/c/d/e.large.txt C a/b/c/d/e.normal.txt C a/b/c/x/y.normal.txt (2) pattern not related to largefiles: performance boost $ hg status -A a/b/c/x C a/b/c/x/y.normal.txt (3) pattern related to largefiles: no performance boost $ hg status -A a/b/c/d C a/b/c/d/e.large.txt C a/b/c/d/e.normal.txt (4) pattern related to STANDIN (not to largefiles): performance boost $ hg status -A .hglf/a C .hglf/a/b/c/d/e.large.txt (5) mixed case: no performance boost $ hg status -A a/b/c/x a/b/c/d C a/b/c/d/e.large.txt C a/b/c/d/e.normal.txt C a/b/c/x/y.normal.txt verify that largefiles doesn't break filesets $ hg log --rev . --exclude "set:binary()" changeset: 0:41bd42f10efa tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add files verify that large files in subrepos handled properly $ hg init subrepo $ echo "subrepo = subrepo" > .hgsub $ hg add .hgsub $ hg ci -m "add subrepo" Invoking status precommit hook A .hgsub ? .hgsubstate $ echo "rev 1" > subrepo/large.txt $ hg add --large subrepo/large.txt $ hg sum parent: 1:8ee150ea2e9c tip add subrepo branch: default commit: 1 subrepos update: (current) phases: 2 draft $ hg st $ hg st -S A subrepo/large.txt $ hg ci -S -m "commit top repo" committing subrepository subrepo Invoking status precommit hook A large.txt Invoking status precommit hook M .hgsubstate # No differences $ hg st -S $ hg sum parent: 2:ce4cd0c527a6 tip commit top repo branch: default commit: (clean) update: (current) phases: 3 draft $ echo "rev 2" > subrepo/large.txt $ hg st -S M subrepo/large.txt $ hg sum parent: 2:ce4cd0c527a6 tip commit top repo branch: default commit: 1 subrepos update: (current) phases: 3 draft $ hg ci -m "this commit should fail without -S" abort: uncommitted changes in subrepository 'subrepo' (use --subrepos for recursive commit) [255] Add a normal file to the subrepo, then test archiving $ echo 'normal file' > subrepo/normal.txt $ touch large.dat $ mv subrepo/large.txt subrepo/renamed-large.txt $ hg addremove -S --dry-run adding large.dat as a largefile removing subrepo/large.txt adding subrepo/normal.txt adding subrepo/renamed-large.txt $ hg status -S ! subrepo/large.txt ? large.dat ? subrepo/normal.txt ? subrepo/renamed-large.txt $ hg addremove --dry-run subrepo removing subrepo/large.txt (glob) adding subrepo/normal.txt (glob) adding subrepo/renamed-large.txt (glob) $ hg status -S ! subrepo/large.txt ? large.dat ? subrepo/normal.txt ? subrepo/renamed-large.txt $ cd .. $ hg -R statusmatch addremove --dry-run statusmatch/subrepo removing statusmatch/subrepo/large.txt (glob) adding statusmatch/subrepo/normal.txt (glob) adding statusmatch/subrepo/renamed-large.txt (glob) $ hg -R statusmatch status -S ! subrepo/large.txt ? large.dat ? subrepo/normal.txt ? subrepo/renamed-large.txt $ hg -R statusmatch addremove --dry-run -S adding large.dat as a largefile removing subrepo/large.txt adding subrepo/normal.txt adding subrepo/renamed-large.txt $ cd statusmatch $ mv subrepo/renamed-large.txt subrepo/large.txt $ hg addremove subrepo adding subrepo/normal.txt (glob) $ hg forget subrepo/normal.txt $ hg addremove -S adding large.dat as a largefile adding subrepo/normal.txt $ rm large.dat $ hg addremove subrepo $ hg addremove -S removing large.dat Lock in subrepo, otherwise the change isn't archived $ hg ci -S -m "add normal file to top level" committing subrepository subrepo Invoking status precommit hook M large.txt A normal.txt Invoking status precommit hook M .hgsubstate $ hg archive -S ../lf_subrepo_archive $ find ../lf_subrepo_archive | sort ../lf_subrepo_archive ../lf_subrepo_archive/.hg_archival.txt ../lf_subrepo_archive/.hgsub ../lf_subrepo_archive/.hgsubstate ../lf_subrepo_archive/a ../lf_subrepo_archive/a/b ../lf_subrepo_archive/a/b/c ../lf_subrepo_archive/a/b/c/d ../lf_subrepo_archive/a/b/c/d/e.large.txt ../lf_subrepo_archive/a/b/c/d/e.normal.txt ../lf_subrepo_archive/a/b/c/x ../lf_subrepo_archive/a/b/c/x/y.normal.txt ../lf_subrepo_archive/subrepo ../lf_subrepo_archive/subrepo/large.txt ../lf_subrepo_archive/subrepo/normal.txt $ cat ../lf_subrepo_archive/.hg_archival.txt repo: 41bd42f10efa43698cc02052ea0977771cba506d node: d56a95e6522858bc08a724c4fe2bdee066d1c30b branch: default latesttag: null latesttagdistance: 4 changessincelatesttag: 4 Test update with subrepos. $ hg update 0 getting changed largefiles 0 largefiles updated, 1 removed 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg status -S $ hg update tip getting changed largefiles 1 largefiles updated, 0 removed 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg status -S # modify a large file $ echo "modified" > subrepo/large.txt $ hg st -S M subrepo/large.txt # update -C should revert the change. $ hg update -C getting changed largefiles 1 largefiles updated, 0 removed 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg status -S $ hg forget -v subrepo/large.txt removing subrepo/large.txt (glob) Test reverting a forgotten file $ hg revert -R subrepo subrepo/large.txt $ hg status -SA subrepo/large.txt C subrepo/large.txt $ hg rm -v subrepo/large.txt removing subrepo/large.txt (glob) $ hg revert -R subrepo subrepo/large.txt $ rm subrepo/large.txt $ hg addremove -S removing subrepo/large.txt $ hg st -S R subrepo/large.txt Test archiving a revision that references a subrepo that is not yet cloned (see test-subrepo-recursion.t): $ hg clone -U . ../empty $ cd ../empty $ hg archive --subrepos -r tip ../archive.tar.gz cloning subrepo subrepo from $TESTTMP/statusmatch/subrepo $ cd .. Test addremove, forget and others ============================================== Test that addremove picks up largefiles prior to the initial commit (issue3541) $ hg init addrm2 $ cd addrm2 $ touch large.dat $ touch large2.dat $ touch normal $ hg add --large large.dat $ hg addremove -v adding large2.dat as a largefile adding normal Test that forgetting all largefiles reverts to islfilesrepo() == False (addremove will add *.dat as normal files now) $ hg forget large.dat $ hg forget large2.dat $ hg addremove -v adding large.dat adding large2.dat Test commit's addremove option prior to the first commit $ hg forget large.dat $ hg forget large2.dat $ hg add --large large.dat $ hg ci -Am "commit" adding large2.dat as a largefile Invoking status precommit hook A large.dat A large2.dat A normal $ find .hglf | sort .hglf .hglf/large.dat .hglf/large2.dat Test actions on largefiles using relative paths from subdir $ mkdir sub $ cd sub $ echo anotherlarge > anotherlarge $ hg add --large anotherlarge $ hg st A sub/anotherlarge $ hg st anotherlarge A anotherlarge $ hg commit -m anotherlarge anotherlarge Invoking status precommit hook A sub/anotherlarge $ hg log anotherlarge changeset: 1:9627a577c5e9 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: anotherlarge $ hg --debug log -T '{rev}: {desc}\n' ../sub/anotherlarge updated patterns: ['../.hglf/sub/../sub/anotherlarge', '../sub/anotherlarge'] 1: anotherlarge $ hg log -G anotherlarge @ changeset: 1:9627a577c5e9 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: anotherlarge | $ hg log glob:another* changeset: 1:9627a577c5e9 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: anotherlarge $ hg --debug log -T '{rev}: {desc}\n' -G glob:another* updated patterns: ['glob:../.hglf/sub/another*', 'glob:another*'] @ 1: anotherlarge | #if no-msys $ hg --debug log -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys updated patterns: ['glob:../.hglf/sub/another*'] 1: anotherlarge $ hg --debug log -G -T '{rev}: {desc}\n' 'glob:../.hglf/sub/another*' # no-msys updated patterns: ['glob:../.hglf/sub/another*'] @ 1: anotherlarge | #endif $ echo more >> anotherlarge $ hg st . M anotherlarge $ hg cat anotherlarge anotherlarge $ hg revert anotherlarge $ hg st ? sub/anotherlarge.orig Test orig files go where we want them $ echo moremore >> anotherlarge $ hg revert anotherlarge -v --config 'ui.origbackuppath=.hg/origbackups' creating directory: $TESTTMP/addrm2/.hg/origbackups/.hglf/sub (glob) saving current version of ../.hglf/sub/anotherlarge as $TESTTMP/addrm2/.hg/origbackups/.hglf/sub/anotherlarge.orig (glob) reverting ../.hglf/sub/anotherlarge (glob) creating directory: $TESTTMP/addrm2/.hg/origbackups/sub (glob) found 90c622cf65cebe75c5842f9136c459333faf392e in store found 90c622cf65cebe75c5842f9136c459333faf392e in store $ ls ../.hg/origbackups/sub anotherlarge.orig $ cd .. Test glob logging from the root dir $ hg log glob:**another* changeset: 1:9627a577c5e9 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: anotherlarge $ hg log -G glob:**another* @ changeset: 1:9627a577c5e9 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: anotherlarge | $ cd .. Log from outer space $ hg --debug log -R addrm2 -T '{rev}: {desc}\n' 'addrm2/sub/anotherlarge' updated patterns: ['addrm2/.hglf/sub/anotherlarge', 'addrm2/sub/anotherlarge'] 1: anotherlarge $ hg --debug log -R addrm2 -T '{rev}: {desc}\n' 'addrm2/.hglf/sub/anotherlarge' updated patterns: ['addrm2/.hglf/sub/anotherlarge'] 1: anotherlarge Check error message while exchange ========================================================= issue3651: summary/outgoing with largefiles shows "no remote repo" unexpectedly $ mkdir issue3651 $ cd issue3651 $ hg init src $ echo a > src/a $ hg -R src add --large src/a $ hg -R src commit -m '#0' Invoking status precommit hook A a check messages when no remote repository is specified: "no remote repo" route for "hg outgoing --large" is not tested here, because it can't be reproduced easily. $ hg init clone1 $ hg -R clone1 -q pull src $ hg -R clone1 -q update $ hg -R clone1 paths | grep default [1] $ hg -R clone1 summary --large parent: 0:fc0bd45326d3 tip #0 branch: default commit: (clean) update: (current) phases: 1 draft largefiles: (no remote repo) check messages when there is no files to upload: $ hg -q clone src clone2 $ hg -R clone2 paths | grep default default = $TESTTMP/issue3651/src (glob) $ hg -R clone2 summary --large parent: 0:fc0bd45326d3 tip #0 branch: default commit: (clean) update: (current) phases: 1 draft largefiles: (no files to upload) $ hg -R clone2 outgoing --large comparing with $TESTTMP/issue3651/src (glob) searching for changes no changes found largefiles: no files to upload [1] $ hg -R clone2 outgoing --large --graph --template "{rev}" comparing with $TESTTMP/issue3651/src (glob) searching for changes no changes found largefiles: no files to upload check messages when there are files to upload: $ echo b > clone2/b $ hg -R clone2 add --large clone2/b $ hg -R clone2 commit -m '#1' Invoking status precommit hook A b $ hg -R clone2 summary --large parent: 1:1acbe71ce432 tip #1 branch: default commit: (clean) update: (current) phases: 2 draft largefiles: 1 entities for 1 files to upload $ hg -R clone2 outgoing --large comparing with $TESTTMP/issue3651/src (glob) searching for changes changeset: 1:1acbe71ce432 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: #1 largefiles to upload (1 entities): b $ hg -R clone2 outgoing --large --graph --template "{rev}" comparing with $TESTTMP/issue3651/src (glob) searching for changes @ 1 largefiles to upload (1 entities): b $ cp clone2/b clone2/b1 $ cp clone2/b clone2/b2 $ hg -R clone2 add --large clone2/b1 clone2/b2 $ hg -R clone2 commit -m '#2: add largefiles referring same entity' Invoking status precommit hook A b1 A b2 $ hg -R clone2 summary --large parent: 2:6095d0695d70 tip #2: add largefiles referring same entity branch: default commit: (clean) update: (current) phases: 3 draft largefiles: 1 entities for 3 files to upload $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" comparing with $TESTTMP/issue3651/src (glob) searching for changes 1:1acbe71ce432 2:6095d0695d70 largefiles to upload (1 entities): b b1 b2 $ hg -R clone2 cat -r 1 clone2/.hglf/b 89e6c98d92887913cadf06b2adb97f26cde4849b $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug --config progress.debug=true comparing with $TESTTMP/issue3651/src (glob) query 1; heads searching for changes all remote heads known locally 1:1acbe71ce432 2:6095d0695d70 finding outgoing largefiles: 0/2 revision (0.00%) finding outgoing largefiles: 1/2 revision (50.00%) largefiles to upload (1 entities): b 89e6c98d92887913cadf06b2adb97f26cde4849b b1 89e6c98d92887913cadf06b2adb97f26cde4849b b2 89e6c98d92887913cadf06b2adb97f26cde4849b $ echo bbb > clone2/b $ hg -R clone2 commit -m '#3: add new largefile entity as existing file' Invoking status precommit hook M b $ echo bbbb > clone2/b $ hg -R clone2 commit -m '#4: add new largefile entity as existing file' Invoking status precommit hook M b $ cp clone2/b1 clone2/b $ hg -R clone2 commit -m '#5: refer existing largefile entity again' Invoking status precommit hook M b $ hg -R clone2 summary --large parent: 5:036794ea641c tip #5: refer existing largefile entity again branch: default commit: (clean) update: (current) phases: 6 draft largefiles: 3 entities for 3 files to upload $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" comparing with $TESTTMP/issue3651/src (glob) searching for changes 1:1acbe71ce432 2:6095d0695d70 3:7983dce246cc 4:233f12ada4ae 5:036794ea641c largefiles to upload (3 entities): b b1 b2 $ hg -R clone2 cat -r 3 clone2/.hglf/b c801c9cfe94400963fcb683246217d5db77f9a9a $ hg -R clone2 cat -r 4 clone2/.hglf/b 13f9ed0898e315bf59dc2973fec52037b6f441a2 $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug --config progress.debug=true comparing with $TESTTMP/issue3651/src (glob) query 1; heads searching for changes all remote heads known locally 1:1acbe71ce432 2:6095d0695d70 3:7983dce246cc 4:233f12ada4ae 5:036794ea641c finding outgoing largefiles: 0/5 revision (0.00%) finding outgoing largefiles: 1/5 revision (20.00%) finding outgoing largefiles: 2/5 revision (40.00%) finding outgoing largefiles: 3/5 revision (60.00%) finding outgoing largefiles: 4/5 revision (80.00%) largefiles to upload (3 entities): b 13f9ed0898e315bf59dc2973fec52037b6f441a2 89e6c98d92887913cadf06b2adb97f26cde4849b c801c9cfe94400963fcb683246217d5db77f9a9a b1 89e6c98d92887913cadf06b2adb97f26cde4849b b2 89e6c98d92887913cadf06b2adb97f26cde4849b Pushing revision #1 causes uploading entity 89e6c98d9288, which is shared also by largefiles b1, b2 in revision #2 and b in revision #5. Then, entity 89e6c98d9288 is not treated as "outgoing entity" at "hg summary" and "hg outgoing", even though files in outgoing revision #2 and #5 refer it. $ hg -R clone2 push -r 1 -q $ hg -R clone2 summary --large parent: 5:036794ea641c tip #5: refer existing largefile entity again branch: default commit: (clean) update: (current) phases: 6 draft largefiles: 2 entities for 1 files to upload $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" comparing with $TESTTMP/issue3651/src (glob) searching for changes 2:6095d0695d70 3:7983dce246cc 4:233f12ada4ae 5:036794ea641c largefiles to upload (2 entities): b $ hg -R clone2 outgoing --large -T "{rev}:{node|short}\n" --debug --config progress.debug=true comparing with $TESTTMP/issue3651/src (glob) query 1; heads searching for changes all remote heads known locally 2:6095d0695d70 3:7983dce246cc 4:233f12ada4ae 5:036794ea641c finding outgoing largefiles: 0/4 revision (0.00%) finding outgoing largefiles: 1/4 revision (25.00%) finding outgoing largefiles: 2/4 revision (50.00%) finding outgoing largefiles: 3/4 revision (75.00%) largefiles to upload (2 entities): b 13f9ed0898e315bf59dc2973fec52037b6f441a2 c801c9cfe94400963fcb683246217d5db77f9a9a $ cd .. merge action 'd' for 'local renamed directory to d2/g' which has no filename ================================================================================== $ hg init merge-action $ cd merge-action $ touch l $ hg add --large l $ mkdir d1 $ touch d1/f $ hg ci -Aqm0 Invoking status precommit hook A d1/f A l $ echo > d1/f $ touch d1/g $ hg ci -Aqm1 Invoking status precommit hook M d1/f A d1/g $ hg up -qr0 $ hg mv d1 d2 moving d1/f to d2/f (glob) $ hg ci -qm2 Invoking status precommit hook A d2/f R d1/f $ hg merge merging d2/f and d1/f to d2/f 1 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cd .. Merge conflicts: ===================== $ hg init merge $ cd merge $ echo 0 > f-different $ echo 0 > f-same $ echo 0 > f-unchanged-1 $ echo 0 > f-unchanged-2 $ hg add --large * $ hg ci -m0 Invoking status precommit hook A f-different A f-same A f-unchanged-1 A f-unchanged-2 $ echo tmp1 > f-unchanged-1 $ echo tmp1 > f-unchanged-2 $ echo tmp1 > f-same $ hg ci -m1 Invoking status precommit hook M f-same M f-unchanged-1 M f-unchanged-2 $ echo 2 > f-different $ echo 0 > f-unchanged-1 $ echo 1 > f-unchanged-2 $ echo 1 > f-same $ hg ci -m2 Invoking status precommit hook M f-different M f-same M f-unchanged-1 M f-unchanged-2 $ hg up -qr0 $ echo tmp2 > f-unchanged-1 $ echo tmp2 > f-unchanged-2 $ echo tmp2 > f-same $ hg ci -m3 Invoking status precommit hook M f-same M f-unchanged-1 M f-unchanged-2 created new head $ echo 1 > f-different $ echo 1 > f-unchanged-1 $ echo 0 > f-unchanged-2 $ echo 1 > f-same $ hg ci -m4 Invoking status precommit hook M f-different M f-same M f-unchanged-1 M f-unchanged-2 $ hg merge largefile f-different has a merge conflict ancestor was 09d2af8dd22201dd8d48e5dcfcaed281ff9422c7 keep (l)ocal e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e or take (o)ther 7448d8798a4380162d4b56f9b452e2f6f9e24e7a? l getting changed largefiles 1 largefiles updated, 0 removed 0 files updated, 4 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f-different 1 $ cat f-same 1 $ cat f-unchanged-1 1 $ cat f-unchanged-2 1 $ cd .. Test largefile insulation (do not enabled a side effect ======================================================== Check whether "largefiles" feature is supported only in repositories enabling largefiles extension. $ mkdir individualenabling $ cd individualenabling $ hg init enabledlocally $ echo large > enabledlocally/large $ hg -R enabledlocally add --large enabledlocally/large $ hg -R enabledlocally commit -m '#0' Invoking status precommit hook A large $ hg init notenabledlocally $ echo large > notenabledlocally/large $ hg -R notenabledlocally add --large notenabledlocally/large $ hg -R notenabledlocally commit -m '#0' Invoking status precommit hook A large $ cat >> $HGRCPATH < [extensions] > # disable globally > largefiles=! > EOF $ cat >> enabledlocally/.hg/hgrc < [extensions] > # enable locally > largefiles= > EOF $ hg -R enabledlocally root $TESTTMP/individualenabling/enabledlocally (glob) $ hg -R notenabledlocally root abort: repository requires features unknown to this Mercurial: largefiles! (see https://mercurial-scm.org/wiki/MissingRequirement for more information) [255] $ hg init push-dst $ hg -R enabledlocally push push-dst pushing to push-dst abort: required features are not supported in the destination: largefiles [255] $ hg init pull-src $ hg -R pull-src pull enabledlocally pulling from enabledlocally abort: required features are not supported in the destination: largefiles [255] $ hg clone enabledlocally clone-dst abort: repository requires features unknown to this Mercurial: largefiles! (see https://mercurial-scm.org/wiki/MissingRequirement for more information) [255] $ test -d clone-dst [1] $ hg clone --pull enabledlocally clone-pull-dst abort: required features are not supported in the destination: largefiles [255] $ test -d clone-pull-dst [1] #if serve Test largefiles specific peer setup, when largefiles is enabled locally (issue4109) $ hg showconfig extensions | grep largefiles extensions.largefiles=! $ mkdir -p $TESTTMP/individualenabling/usercache $ hg serve -R enabledlocally -d -p $HGPORT --pid-file hg.pid $ cat hg.pid >> $DAEMON_PIDS $ hg init pull-dst $ cat > pull-dst/.hg/hgrc < [extensions] > # enable locally > largefiles= > [largefiles] > # ignore system cache to force largefiles specific wire proto access > usercache=$TESTTMP/individualenabling/usercache > EOF $ hg -R pull-dst -q pull -u http://localhost:$HGPORT $ killdaemons.py #endif Test overridden functions work correctly even for repos disabling largefiles (issue4547) $ hg showconfig extensions | grep largefiles extensions.largefiles=! (test updating implied by clone) $ hg init enabled-but-no-largefiles $ echo normal1 > enabled-but-no-largefiles/normal1 $ hg -R enabled-but-no-largefiles add enabled-but-no-largefiles/normal1 $ hg -R enabled-but-no-largefiles commit -m '#0@enabled-but-no-largefiles' Invoking status precommit hook A normal1 $ cat >> enabled-but-no-largefiles/.hg/hgrc < [extensions] > # enable locally > largefiles= > EOF $ hg clone -q enabled-but-no-largefiles no-largefiles $ echo normal2 > enabled-but-no-largefiles/normal2 $ hg -R enabled-but-no-largefiles add enabled-but-no-largefiles/normal2 $ hg -R enabled-but-no-largefiles commit -m '#1@enabled-but-no-largefiles' Invoking status precommit hook A normal2 $ echo normal3 > no-largefiles/normal3 $ hg -R no-largefiles add no-largefiles/normal3 $ hg -R no-largefiles commit -m '#1@no-largefiles' Invoking status precommit hook A normal3 $ hg -R no-largefiles -q pull --rebase Invoking status precommit hook A normal3 (test reverting) $ hg init subrepo-root $ cat >> subrepo-root/.hg/hgrc < [extensions] > # enable locally > largefiles= > EOF $ echo large > subrepo-root/large $ hg -R subrepo-root add --large subrepo-root/large $ hg clone -q no-largefiles subrepo-root/no-largefiles $ cat > subrepo-root/.hgsub < no-largefiles = no-largefiles > EOF $ hg -R subrepo-root add subrepo-root/.hgsub $ hg -R subrepo-root commit -m '#0' Invoking status precommit hook A .hgsub A large ? .hgsubstate $ echo dirty >> subrepo-root/large $ echo dirty >> subrepo-root/no-largefiles/normal1 $ hg -R subrepo-root status -S M large M no-largefiles/normal1 $ hg -R subrepo-root revert --all reverting subrepo-root/.hglf/large (glob) reverting subrepo no-largefiles reverting subrepo-root/no-largefiles/normal1 (glob) $ cd .. Test "pull --rebase" when rebase is enabled before largefiles (issue3861) ========================================================================= $ hg showconfig extensions | grep largefiles extensions.largefiles=! $ mkdir issue3861 $ cd issue3861 $ hg init src $ hg clone -q src dst $ echo a > src/a $ hg -R src commit -Aqm "#0" Invoking status precommit hook A a $ cat >> dst/.hg/hgrc < [extensions] > largefiles= > EOF $ hg -R dst pull --rebase pulling from $TESTTMP/issue3861/src (glob) requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files nothing to rebase - working directory parent is already an ancestor of destination bf5e395ced2c 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. mercurial-3.7.3/tests/test-convert-hg-sink.t0000644000175000017500000003171312676531525020456 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > convert= > [convert] > hg.saverev=False > EOF $ hg init orig $ cd orig $ echo foo > foo $ echo bar > bar $ hg ci -qAm 'add foo and bar' $ hg rm foo $ hg ci -m 'remove foo' $ mkdir foo $ echo file > foo/file $ hg ci -qAm 'add foo/file' $ hg tag some-tag $ hg tag -l local-tag $ hg log changeset: 3:593cbf6fb2b4 tag: local-tag tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag some-tag for changeset ad681a868e44 changeset: 2:ad681a868e44 tag: some-tag user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo/file changeset: 1:cbba8ecc03b7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: remove foo changeset: 0:327daa9251fa user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo and bar $ hg phase --public -r tip $ cd .. $ hg convert orig new 2>&1 | grep -v 'subversion python bindings could not be loaded' initializing destination new repository scanning source... sorting... converting... 3 add foo and bar 2 remove foo 1 add foo/file 0 Added tag some-tag for changeset ad681a868e44 $ cd new $ hg log -G --template '{rev} {node|short} ({phase}) "{desc}"\n' o 3 593cbf6fb2b4 (public) "Added tag some-tag for changeset ad681a868e44" | o 2 ad681a868e44 (public) "add foo/file" | o 1 cbba8ecc03b7 (public) "remove foo" | o 0 327daa9251fa (public) "add foo and bar" $ hg out ../orig comparing with ../orig searching for changes no changes found [1] dirstate should be empty: $ hg debugstate $ hg parents -q $ hg up -C 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg copy bar baz put something in the dirstate: $ hg debugstate > debugstate $ grep baz debugstate a 0 -1 unset baz copy: bar -> baz add a new revision in the original repo $ cd ../orig $ echo baz > baz $ hg ci -qAm 'add baz' $ cd .. $ hg convert orig new 2>&1 | grep -v 'subversion python bindings could not be loaded' scanning source... sorting... converting... 0 add baz $ cd new $ hg out ../orig comparing with ../orig searching for changes no changes found [1] dirstate should be the same (no output below): $ hg debugstate > new-debugstate $ diff debugstate new-debugstate no copies $ hg up -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg debugrename baz baz not renamed $ cd .. test tag rewriting $ cat > filemap < exclude foo > EOF $ hg convert --filemap filemap orig new-filemap 2>&1 | grep -v 'subversion python bindings could not be loaded' initializing destination new-filemap repository scanning source... sorting... converting... 4 add foo and bar 3 remove foo 2 add foo/file 1 Added tag some-tag for changeset ad681a868e44 0 add baz $ cd new-filemap $ hg tags tip 2:3c74706b1ff8 some-tag 0:ba8636729451 $ cd .. Test cases for hg-hg roundtrip Helper $ glog() > { > hg log -G --template '{rev} {node|short} ({phase}) "{desc}" files: {files}\n' $* > } Create a tricky source repo $ hg init source $ cd source $ echo 0 > 0 $ hg ci -Aqm '0: add 0' $ echo a > a $ mkdir dir $ echo b > dir/b $ hg ci -qAm '1: add a and dir/b' $ echo c > dir/c $ hg ci -qAm '2: add dir/c' $ hg copy a e $ echo b >> b $ hg ci -qAm '3: copy a to e, change b' $ hg up -qr -3 $ echo a >> a $ hg ci -qAm '4: change a' $ hg merge merging a and e to e 2 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg copy b dir/d $ hg ci -qAm '5: merge 2 and 3, copy b to dir/d' $ echo a >> a $ hg ci -qAm '6: change a' $ hg mani 0 a b dir/b dir/c dir/d e $ hg phase --public -r tip $ glog @ 6 0613c8e59a3d (public) "6: change a" files: a | o 5 717e9b37cdb7 (public) "5: merge 2 and 3, copy b to dir/d" files: dir/d e |\ | o 4 86a55cb968d5 (public) "4: change a" files: a | | o | 3 0e6e235919dd (public) "3: copy a to e, change b" files: b e | | o | 2 0394b0d5e4f7 (public) "2: add dir/c" files: dir/c |/ o 1 333546584845 (public) "1: add a and dir/b" files: a dir/b | o 0 d1a24e2ebd23 (public) "0: add 0" files: 0 $ cd .. Convert excluding rev 0 and dir/ (and thus rev2): $ cat << EOF > filemap > exclude dir > EOF $ hg convert --filemap filemap source dest --config convert.hg.revs=1:: initializing destination dest repository scanning source... sorting... converting... 5 1: add a and dir/b 4 2: add dir/c 3 3: copy a to e, change b 2 4: change a 1 5: merge 2 and 3, copy b to dir/d 0 6: change a Verify that conversion skipped rev 2: $ glog -R dest o 4 78814e84a217 (draft) "6: change a" files: a | o 3 f7cff662c5e5 (draft) "5: merge 2 and 3, copy b to dir/d" files: e |\ | o 2 ab40a95b0072 (draft) "4: change a" files: a | | o | 1 bd51f17597bf (draft) "3: copy a to e, change b" files: b e |/ o 0 a4a1dae0fe35 (draft) "1: add a and dir/b" files: 0 a Verify mapping correct in both directions: $ cat source/.hg/shamap a4a1dae0fe3514cefd9b8541b7abbc8f44f946d5 333546584845f70c4cfecb992341aaef0e708166 bd51f17597bf32268e68a560b206898c3960cda2 0e6e235919dd8e9285ba8eb5adf703af9ad99378 ab40a95b00725307e79c2fd271000aa8af9759f4 86a55cb968d51770cba2a1630d6cc637b574580a f7cff662c5e581e6f3f1a85ffdd2bcb35825f6ba 717e9b37cdb7eb9917ca8e30aa3f986e6d5b177d 78814e84a217894517c2de392b903ed05e6871a4 0613c8e59a3ddb9789072ef52f1ed13496489bb4 $ cat dest/.hg/shamap 333546584845f70c4cfecb992341aaef0e708166 a4a1dae0fe3514cefd9b8541b7abbc8f44f946d5 0394b0d5e4f761ced559fd0bbdc6afc16cb3f7d1 a4a1dae0fe3514cefd9b8541b7abbc8f44f946d5 0e6e235919dd8e9285ba8eb5adf703af9ad99378 bd51f17597bf32268e68a560b206898c3960cda2 86a55cb968d51770cba2a1630d6cc637b574580a ab40a95b00725307e79c2fd271000aa8af9759f4 717e9b37cdb7eb9917ca8e30aa3f986e6d5b177d f7cff662c5e581e6f3f1a85ffdd2bcb35825f6ba 0613c8e59a3ddb9789072ef52f1ed13496489bb4 78814e84a217894517c2de392b903ed05e6871a4 Verify meta data converted correctly: $ hg -R dest log -r 1 --debug -p --git changeset: 1:bd51f17597bf32268e68a560b206898c3960cda2 phase: draft parent: 0:a4a1dae0fe3514cefd9b8541b7abbc8f44f946d5 parent: -1:0000000000000000000000000000000000000000 manifest: 1:040c72ed9b101773c24ac314776bfc846943781f user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: b e extra: branch=default description: 3: copy a to e, change b diff --git a/b b/b new file mode 100644 --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +b diff --git a/a b/e copy from a copy to e Verify files included and excluded correctly: $ hg -R dest manifest -r tip 0 a b e Make changes in dest and convert back: $ hg -R dest up -q $ echo dest > dest/dest $ hg -R dest ci -Aqm 'change in dest' $ hg -R dest tip changeset: 5:a2e0e3cc6d1d tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change in dest (converting merges back after using a filemap will probably cause chaos so we exclude merges.) $ hg convert dest source --config convert.hg.revs='!merge()' scanning source... sorting... converting... 0 change in dest Verify the conversion back: $ hg -R source log --debug -r tip changeset: 7:e6d364a69ff1248b2099e603b0c145504cade6f0 tag: tip phase: draft parent: 6:0613c8e59a3ddb9789072ef52f1ed13496489bb4 parent: -1:0000000000000000000000000000000000000000 manifest: 7:aa3e9542f3b76d4f1f1b2e9c7ce9dbb48b6a95ec user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: dest extra: branch=default description: change in dest Files that had been excluded are still present: $ hg -R source manifest -r tip 0 a b dest dir/b dir/c dir/d e More source changes $ cd source $ echo 1 >> a $ hg ci -m '8: source first branch' created new head $ hg up -qr -2 $ echo 2 >> a $ hg ci -m '9: source second branch' $ hg merge -q --tool internal:local $ hg ci -m '10: source merge' $ echo >> a $ hg ci -m '11: source change' $ hg mani 0 a b dest dir/b dir/c dir/d e $ glog -r 6: @ 11 0c8927d1f7f4 (draft) "11: source change" files: a | o 10 9ccb7ee8d261 (draft) "10: source merge" files: a |\ | o 9 f131b1518dba (draft) "9: source second branch" files: a | | o | 8 669cf0e74b50 (draft) "8: source first branch" files: a | | | o 7 e6d364a69ff1 (draft) "change in dest" files: dest |/ o 6 0613c8e59a3d (public) "6: change a" files: a | $ cd .. $ hg convert --filemap filemap source dest --config convert.hg.revs=3: scanning source... sorting... converting... 3 8: source first branch 2 9: source second branch 1 10: source merge 0 11: source change $ glog -R dest o 9 8432d597b263 (draft) "11: source change" files: a | o 8 632ffacdcd6f (draft) "10: source merge" files: a |\ | o 7 049cfee90ee6 (draft) "9: source second branch" files: a | | o | 6 9b6845e036e5 (draft) "8: source first branch" files: a | | | @ 5 a2e0e3cc6d1d (draft) "change in dest" files: dest |/ o 4 78814e84a217 (draft) "6: change a" files: a | o 3 f7cff662c5e5 (draft) "5: merge 2 and 3, copy b to dir/d" files: e |\ | o 2 ab40a95b0072 (draft) "4: change a" files: a | | o | 1 bd51f17597bf (draft) "3: copy a to e, change b" files: b e |/ o 0 a4a1dae0fe35 (draft) "1: add a and dir/b" files: 0 a $ cd .. Two way tests $ hg init 0 $ echo f > 0/f $ echo a > 0/a-only $ echo b > 0/b-only $ hg -R 0 ci -Aqm0 $ cat << EOF > filemap-a > exclude b-only > EOF $ cat << EOF > filemap-b > exclude a-only > EOF $ hg convert --filemap filemap-a 0 a initializing destination a repository scanning source... sorting... converting... 0 0 $ hg -R a up -q $ echo a > a/f $ hg -R a ci -ma $ hg convert --filemap filemap-b 0 b initializing destination b repository scanning source... sorting... converting... 0 0 $ hg -R b up -q $ echo b > b/f $ hg -R b ci -mb $ tail */.hg/shamap ==> 0/.hg/shamap <== 86f3f774ffb682bffb5dc3c1d3b3da637cb9a0d6 8a028c7c77f6c7bd6d63bc3f02ca9f779eabf16a dd9f218eb91fb857f2a62fe023e1d64a4e7812fe 8a028c7c77f6c7bd6d63bc3f02ca9f779eabf16a ==> a/.hg/shamap <== 8a028c7c77f6c7bd6d63bc3f02ca9f779eabf16a 86f3f774ffb682bffb5dc3c1d3b3da637cb9a0d6 ==> b/.hg/shamap <== 8a028c7c77f6c7bd6d63bc3f02ca9f779eabf16a dd9f218eb91fb857f2a62fe023e1d64a4e7812fe $ hg convert a 0 scanning source... sorting... converting... 0 a $ hg convert b 0 scanning source... sorting... converting... 0 b $ hg -R 0 log -G o changeset: 2:637fbbbe96b6 | tag: tip | parent: 0:8a028c7c77f6 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | | o changeset: 1:ec7b9c96e692 |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: a | @ changeset: 0:8a028c7c77f6 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 $ hg convert --filemap filemap-b 0 a --config convert.hg.revs=1:: scanning source... sorting... converting... $ hg -R 0 up -r1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo f >> 0/f $ hg -R 0 ci -mx $ hg convert --filemap filemap-b 0 a --config convert.hg.revs=1:: scanning source... sorting... converting... 0 x $ hg -R a log -G -T '{rev} {desc|firstline} ({files})\n' o 2 x (f) | @ 1 a (f) | o 0 0 (a-only f) $ hg -R a mani -r tip a-only f An additional round, demonstrating that unchanged files don't get converted $ echo f >> 0/f $ echo f >> 0/a-only $ hg -R 0 ci -m "extra f+a-only change" $ hg convert --filemap filemap-b 0 a --config convert.hg.revs=1:: scanning source... sorting... converting... 0 extra f+a-only change $ hg -R a log -G -T '{rev} {desc|firstline} ({files})\n' o 3 extra f+a-only change (f) | o 2 x (f) | @ 1 a (f) | o 0 0 (a-only f) Conversion after rollback $ hg -R a rollback -f repository tip rolled back to revision 2 (undo convert) $ hg convert --filemap filemap-b 0 a --config convert.hg.revs=1:: scanning source... sorting... converting... 0 extra f+a-only change $ hg -R a log -G -T '{rev} {desc|firstline} ({files})\n' o 3 extra f+a-only change (f) | o 2 x (f) | @ 1 a (f) | o 0 0 (a-only f) Convert with --full adds and removes files that didn't change $ echo f >> 0/f $ hg -R 0 ci -m "f" $ hg convert --filemap filemap-b --full 0 a --config convert.hg.revs=1:: scanning source... sorting... converting... 0 f $ hg -R a status --change tip M f A b-only R a-only mercurial-3.7.3/tests/test-convert-cvs.t0000644000175000017500000002320612676531525017707 0ustar mpmmpm00000000000000#require cvs $ cvscall() > { > cvs -f "$@" > } $ hgcat() > { > hg --cwd src-hg cat -r tip "$1" > } $ echo "[extensions]" >> $HGRCPATH $ echo "convert = " >> $HGRCPATH $ cat > cvshooks.py < def cvslog(ui,repo,hooktype,log): > print "%s hook: %d entries"%(hooktype,len(log)) > > def cvschangesets(ui,repo,hooktype,changesets): > print "%s hook: %d changesets"%(hooktype,len(changesets)) > EOF $ hookpath=`pwd` $ cat <> $HGRCPATH > [hooks] > cvslog = python:$hookpath/cvshooks.py:cvslog > cvschangesets = python:$hookpath/cvshooks.py:cvschangesets > EOF create cvs repository $ mkdir cvsrepo $ cd cvsrepo $ CVSROOT=`pwd` $ export CVSROOT $ CVS_OPTIONS=-f $ export CVS_OPTIONS $ cd .. $ rmdir cvsrepo $ cvscall -q -d "$CVSROOT" init create source directory $ mkdir src-temp $ cd src-temp $ echo a > a $ mkdir b $ cd b $ echo c > c $ cd .. import source directory $ cvscall -q import -m import src INITIAL start N src/a N src/b/c No conflicts created by this import $ cd .. checkout source directory $ cvscall -q checkout src U src/a U src/b/c commit a new revision changing b/c $ cd src $ sleep 1 $ echo c >> b/c $ cvscall -q commit -mci0 . | grep '<--' $TESTTMP/cvsrepo/src/b/c,v <-- *c (glob) $ cd .. convert fresh repo and also check localtimezone option NOTE: This doesn't check all time zones -- it merely determines that the configuration option is taking effect. An arbitrary (U.S.) time zone is used here. TZ=US/Hawaii is selected since it does not use DST (unlike other U.S. time zones) and is always a fixed difference from UTC. $ TZ=US/Hawaii hg convert --config convert.localtimezone=True src src-hg initializing destination src-hg repository connecting to $TESTTMP/cvsrepo scanning source... collecting CVS rlog 5 log entries cvslog hook: 5 entries creating changesets 3 changeset entries cvschangesets hook: 3 changesets sorting... converting... 2 Initial revision 1 ci0 0 import updating tags $ hgcat a a $ hgcat b/c c c convert fresh repo with --filemap $ echo include b/c > filemap $ hg convert --filemap filemap src src-filemap initializing destination src-filemap repository connecting to $TESTTMP/cvsrepo scanning source... collecting CVS rlog 5 log entries cvslog hook: 5 entries creating changesets 3 changeset entries cvschangesets hook: 3 changesets sorting... converting... 2 Initial revision 1 ci0 0 import filtering out empty revision repository tip rolled back to revision 1 (undo convert) updating tags $ hgcat b/c c c $ hg -R src-filemap log --template '{rev} {desc} files: {files}\n' 2 update tags files: .hgtags 1 ci0 files: b/c 0 Initial revision files: b/c convert full repository (issue1649) $ cvscall -q -d "$CVSROOT" checkout -d srcfull "." | grep -v CVSROOT U srcfull/src/a U srcfull/src/b/c $ ls srcfull CVS CVSROOT src $ hg convert srcfull srcfull-hg \ > | grep -v 'log entries' | grep -v 'hook:' \ > | grep -v '^[0-3] .*' # filter instable changeset order initializing destination srcfull-hg repository connecting to $TESTTMP/cvsrepo scanning source... collecting CVS rlog creating changesets 4 changeset entries sorting... converting... updating tags $ hg cat -r tip --cwd srcfull-hg src/a a $ hg cat -r tip --cwd srcfull-hg src/b/c c c commit new file revisions $ cd src $ echo a >> a $ echo c >> b/c $ cvscall -q commit -mci1 . | grep '<--' $TESTTMP/cvsrepo/src/a,v <-- a $TESTTMP/cvsrepo/src/b/c,v <-- *c (glob) $ cd .. convert again $ TZ=US/Hawaii hg convert --config convert.localtimezone=True src src-hg connecting to $TESTTMP/cvsrepo scanning source... collecting CVS rlog 7 log entries cvslog hook: 7 entries creating changesets 4 changeset entries cvschangesets hook: 4 changesets sorting... converting... 0 ci1 $ hgcat a a a $ hgcat b/c c c c convert again with --filemap $ hg convert --filemap filemap src src-filemap connecting to $TESTTMP/cvsrepo scanning source... collecting CVS rlog 7 log entries cvslog hook: 7 entries creating changesets 4 changeset entries cvschangesets hook: 4 changesets sorting... converting... 0 ci1 $ hgcat b/c c c c $ hg -R src-filemap log --template '{rev} {desc} files: {files}\n' 3 ci1 files: b/c 2 update tags files: .hgtags 1 ci0 files: b/c 0 Initial revision files: b/c commit branch $ cd src $ cvs -q update -r1.1 b/c U b/c $ cvs -q tag -b branch T a T b/c $ cvs -q update -r branch > /dev/null $ sleep 1 $ echo d >> b/c $ cvs -q commit -mci2 . | grep '<--' $TESTTMP/cvsrepo/src/b/c,v <-- *c (glob) $ cd .. convert again $ TZ=US/Hawaii hg convert --config convert.localtimezone=True src src-hg connecting to $TESTTMP/cvsrepo scanning source... collecting CVS rlog 8 log entries cvslog hook: 8 entries creating changesets 5 changeset entries cvschangesets hook: 5 changesets sorting... converting... 0 ci2 $ hgcat b/c c d convert again with --filemap $ TZ=US/Hawaii hg convert --config convert.localtimezone=True --filemap filemap src src-filemap connecting to $TESTTMP/cvsrepo scanning source... collecting CVS rlog 8 log entries cvslog hook: 8 entries creating changesets 5 changeset entries cvschangesets hook: 5 changesets sorting... converting... 0 ci2 $ hgcat b/c c d $ hg -R src-filemap log --template '{rev} {desc} files: {files}\n' 4 ci2 files: b/c 3 ci1 files: b/c 2 update tags files: .hgtags 1 ci0 files: b/c 0 Initial revision files: b/c commit a new revision with funny log message $ cd src $ sleep 1 $ echo e >> a $ cvscall -q commit -m'funny > ---------------------------- > log message' . | grep '<--' |\ > sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g' checking in src/a,v commit new file revisions with some fuzz $ sleep 1 $ echo f >> a $ cvscall -q commit -mfuzzy . | grep '<--' $TESTTMP/cvsrepo/src/a,v <-- a $ sleep 4 # the two changes will be split if fuzz < 4 $ echo g >> b/c $ cvscall -q commit -mfuzzy . | grep '<--' $TESTTMP/cvsrepo/src/b/c,v <-- *c (glob) $ cd .. convert again $ TZ=US/Hawaii hg convert --config convert.cvsps.fuzz=2 --config convert.localtimezone=True src src-hg connecting to $TESTTMP/cvsrepo scanning source... collecting CVS rlog 11 log entries cvslog hook: 11 entries creating changesets 8 changeset entries cvschangesets hook: 8 changesets sorting... converting... 2 funny 1 fuzzy 0 fuzzy $ hg -R src-hg log -G --template '{rev} ({branches}) {desc} date: {date|date} files: {files}\n' o 8 (branch) fuzzy date: * -1000 files: b/c (glob) | o 7 (branch) fuzzy date: * -1000 files: a (glob) | o 6 (branch) funny | ---------------------------- | log message date: * -1000 files: a (glob) o 5 (branch) ci2 date: * -1000 files: b/c (glob) o 4 () ci1 date: * -1000 files: a b/c (glob) | o 3 () update tags date: * +0000 files: .hgtags (glob) | | o 2 (INITIAL) import date: * -1000 files: (glob) | | o | 1 () ci0 date: * -1000 files: b/c (glob) |/ o 0 () Initial revision date: * -1000 files: a b/c (glob) testing debugcvsps $ cd src $ hg debugcvsps --fuzz=2 -x >/dev/null commit a new revision changing a and removing b/c $ cvscall -q update -A U a U b/c $ sleep 1 $ echo h >> a $ cvscall -Q remove -f b/c $ cvscall -q commit -mci | grep '<--' $TESTTMP/cvsrepo/src/a,v <-- a $TESTTMP/cvsrepo/src/b/c,v <-- *c (glob) update and verify the cvsps cache $ hg debugcvsps --fuzz=2 -u collecting CVS rlog 13 log entries cvslog hook: 13 entries creating changesets 11 changeset entries cvschangesets hook: 11 changesets --------------------- PatchSet 1 Date: * (glob) Author: * (glob) Branch: HEAD Tag: (none) Branchpoints: INITIAL Log: Initial revision Members: a:INITIAL->1.1 --------------------- PatchSet 2 Date: * (glob) Author: * (glob) Branch: HEAD Tag: (none) Branchpoints: INITIAL, branch Log: Initial revision Members: b/c:INITIAL->1.1 --------------------- PatchSet 3 Date: * (glob) Author: * (glob) Branch: INITIAL Tag: start Log: import Members: a:1.1->1.1.1.1 b/c:1.1->1.1.1.1 --------------------- PatchSet 4 Date: * (glob) Author: * (glob) Branch: HEAD Tag: (none) Log: ci0 Members: b/c:1.1->1.2 --------------------- PatchSet 5 Date: * (glob) Author: * (glob) Branch: HEAD Tag: (none) Branchpoints: branch Log: ci1 Members: a:1.1->1.2 --------------------- PatchSet 6 Date: * (glob) Author: * (glob) Branch: HEAD Tag: (none) Log: ci1 Members: b/c:1.2->1.3 --------------------- PatchSet 7 Date: * (glob) Author: * (glob) Branch: branch Tag: (none) Log: ci2 Members: b/c:1.1->1.1.2.1 --------------------- PatchSet 8 Date: * (glob) Author: * (glob) Branch: branch Tag: (none) Log: funny ---------------------------- log message Members: a:1.2->1.2.2.1 --------------------- PatchSet 9 Date: * (glob) Author: * (glob) Branch: branch Tag: (none) Log: fuzzy Members: a:1.2.2.1->1.2.2.2 --------------------- PatchSet 10 Date: * (glob) Author: * (glob) Branch: branch Tag: (none) Log: fuzzy Members: b/c:1.1.2.1->1.1.2.2 --------------------- PatchSet 11 Date: * (glob) Author: * (glob) Branch: HEAD Tag: (none) Log: ci Members: a:1.2->1.3 b/c:1.3->1.4(DEAD) $ cd .. mercurial-3.7.3/tests/test-ssh-clone-r.t0000644000175000017500000001373712676531525017600 0ustar mpmmpm00000000000000This test tries to exercise the ssh functionality with a dummy script creating 'remote' repo $ hg init remote $ cd remote $ hg unbundle "$TESTDIR/bundles/remote.hg" adding changesets adding manifests adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. clone remote via stream $ for i in 0 1 2 3 4 5 6 7 8; do > hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed -r "$i" ssh://user@dummy/remote test-"$i" > if cd test-"$i"; then > hg verify > cd .. > fi > done adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 3 changesets, 3 total revisions adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 4 changesets, 4 total revisions adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 3 changesets, 3 total revisions adding changesets adding manifests adding file changes added 4 changesets with 5 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 4 changesets, 5 total revisions adding changesets adding manifests adding file changes added 5 changesets with 6 changes to 3 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 5 changesets, 6 total revisions adding changesets adding manifests adding file changes added 5 changesets with 5 changes to 2 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 5 changesets, 5 total revisions $ cd test-8 $ hg pull ../test-7 pulling from ../test-7 searching for changes adding changesets adding manifests adding file changes added 4 changesets with 2 changes to 3 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 9 changesets, 7 total revisions $ cd .. $ cd test-1 $ hg pull -e "python \"$TESTDIR/dummyssh\"" -r 4 ssh://user@dummy/remote pulling from ssh://user@dummy/remote searching for changes adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 3 changesets, 2 total revisions $ hg pull -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote pulling from ssh://user@dummy/remote searching for changes adding changesets adding manifests adding file changes added 6 changesets with 5 changes to 4 files (run 'hg update' to get a working copy) $ cd .. $ cd test-2 $ hg pull -e "python \"$TESTDIR/dummyssh\"" -r 5 ssh://user@dummy/remote pulling from ssh://user@dummy/remote searching for changes adding changesets adding manifests adding file changes added 2 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 5 changesets, 3 total revisions $ hg pull -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote pulling from ssh://user@dummy/remote searching for changes adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 4 files (run 'hg update' to get a working copy) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 9 changesets, 7 total revisions $ cd .. mercurial-3.7.3/tests/test-subrepo-missing.t0000644000175000017500000000656512676531525020575 0ustar mpmmpm00000000000000 $ hg init repo $ cd repo $ hg init subrepo $ echo a > subrepo/a $ hg -R subrepo ci -Am adda adding a $ echo 'subrepo = subrepo' > .hgsub $ hg ci -Am addsubrepo adding .hgsub $ echo b > subrepo/b $ hg -R subrepo ci -Am addb adding b $ hg ci -m updatedsub ignore blanklines in .hgsubstate >>> file('.hgsubstate', 'wb').write('\n\n \t \n \n') $ hg st --subrepos M .hgsubstate $ hg revert -qC .hgsubstate abort more gracefully on .hgsubstate parsing error $ cp .hgsubstate .hgsubstate.old >>> file('.hgsubstate', 'wb').write('\ninvalid') $ hg st --subrepos --cwd $TESTTMP -R $TESTTMP/repo abort: invalid subrepository revision specifier in 'repo/.hgsubstate' line 2 [255] $ mv .hgsubstate.old .hgsubstate delete .hgsub and revert it $ rm .hgsub $ hg revert .hgsub warning: subrepo spec file '.hgsub' not found warning: subrepo spec file '.hgsub' not found warning: subrepo spec file '.hgsub' not found delete .hgsubstate and revert it $ rm .hgsubstate $ hg revert .hgsubstate delete .hgsub and update $ rm .hgsub $ hg up 0 --cwd $TESTTMP -R $TESTTMP/repo warning: subrepo spec file 'repo/.hgsub' not found warning: subrepo spec file 'repo/.hgsub' not found 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg st warning: subrepo spec file '.hgsub' not found ! .hgsub $ ls subrepo a delete .hgsubstate and update $ hg up -C warning: subrepo spec file '.hgsub' not found warning: subrepo spec file '.hgsub' not found 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm .hgsubstate $ hg up 0 remote changed .hgsubstate which local deleted use (c)hanged version or leave (d)eleted? c 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg st $ ls subrepo a Enable obsolete $ cat >> $HGRCPATH << EOF > [ui] > logtemplate= {rev}:{node|short} {desc|firstline} > [phases] > publish=False > [experimental] > evolution=createmarkers > EOF check that we can update parent repo with missing (amended) subrepo revision $ hg up --repository subrepo -r tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg ci -m "updated subrepo to tip" created new head $ cd subrepo $ hg update -r tip 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo foo > a $ hg commit --amend -m "addb (amended)" $ cd .. $ hg update --clean . revision 102a90ea7b4a in subrepo subrepo is hidden 1 files updated, 0 files merged, 0 files removed, 0 files unresolved check that --hidden is propagated to the subrepo $ hg -R subrepo up tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg ci -m 'commit with amended subrepo' $ echo bar > subrepo/a $ hg -R subrepo ci --amend -m "amend a (again)" $ hg --hidden cat subrepo/a foo verify will warn if locked-in subrepo revisions are hidden or missing $ hg ci -m "amended subrepo (again)" $ hg --config extensions.strip= --hidden strip -R subrepo -qr 'tip' $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 5 changesets, 5 total revisions checking subrepo links subrepo 'subrepo' is hidden in revision a66de08943b6 subrepo 'subrepo' is hidden in revision 674d05939c1e subrepo 'subrepo' not found in revision a7d05d9055a4 $ cd .. mercurial-3.7.3/tests/test-merge9.t0000644000175000017500000000330412676531525016623 0ustar mpmmpm00000000000000test that we don't interrupt the merge session if a file-level merge failed $ hg init repo $ cd repo $ echo foo > foo $ echo a > bar $ hg ci -Am 'add foo' adding bar adding foo $ hg mv foo baz $ echo b >> bar $ echo quux > quux1 $ hg ci -Am 'mv foo baz' adding quux1 $ hg up -qC 0 $ echo >> foo $ echo c >> bar $ echo quux > quux2 $ hg ci -Am 'change foo' adding quux2 created new head test with the rename on the remote side $ HGMERGE=false hg merge merging bar merging foo and baz to baz merging bar failed! 1 files updated, 1 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg resolve -l U bar R baz test with the rename on the local side $ hg up -C 1 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ HGMERGE=false hg merge merging bar merging baz and foo to baz merging bar failed! 1 files updated, 1 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] show unresolved $ hg resolve -l U bar R baz unmark baz $ hg resolve -u baz show $ hg resolve -l U bar U baz $ hg st M bar M baz M quux2 ? bar.orig re-resolve baz $ hg resolve baz merging baz and foo to baz after resolve $ hg resolve -l U bar R baz resolve all warning $ hg resolve abort: no files or directories specified (use --all to re-merge all unresolved files) [255] resolve all $ hg resolve -a merging bar warning: conflicts while merging bar! (edit, then use 'hg resolve --mark') [1] after $ hg resolve -l U bar R baz $ cd .. mercurial-3.7.3/tests/test-archive.t0000644000175000017500000002512312676531525017057 0ustar mpmmpm00000000000000#require serve $ hg init test $ cd test $ echo foo>foo $ hg commit -Am 1 -d '1 0' adding foo $ echo bar>bar $ hg commit -Am 2 -d '2 0' adding bar $ mkdir baz $ echo bletch>baz/bletch $ hg commit -Am 3 -d '1000000000 0' adding baz/bletch $ hg init subrepo $ touch subrepo/sub $ hg -q -R subrepo ci -Am "init subrepo" $ echo "subrepo = subrepo" > .hgsub $ hg add .hgsub $ hg ci -m "add subrepo" $ echo "[web]" >> .hg/hgrc $ echo "name = test-archive" >> .hg/hgrc $ echo "archivesubrepos = True" >> .hg/hgrc $ cp .hg/hgrc .hg/hgrc-base > test_archtype() { > echo "allow_archive = $1" >> .hg/hgrc > hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log > cat hg.pid >> $DAEMON_PIDS > echo % $1 allowed should give 200 > get-with-headers.py localhost:$HGPORT "archive/tip.$2" | head -n 1 > echo % $3 and $4 disallowed should both give 403 > get-with-headers.py localhost:$HGPORT "archive/tip.$3" | head -n 1 > get-with-headers.py localhost:$HGPORT "archive/tip.$4" | head -n 1 > killdaemons.py > cat errors.log > cp .hg/hgrc-base .hg/hgrc > } check http return codes $ test_archtype gz tar.gz tar.bz2 zip % gz allowed should give 200 200 Script output follows % tar.bz2 and zip disallowed should both give 403 403 Archive type not allowed: bz2 403 Archive type not allowed: zip $ test_archtype bz2 tar.bz2 zip tar.gz % bz2 allowed should give 200 200 Script output follows % zip and tar.gz disallowed should both give 403 403 Archive type not allowed: zip 403 Archive type not allowed: gz $ test_archtype zip zip tar.gz tar.bz2 % zip allowed should give 200 200 Script output follows % tar.gz and tar.bz2 disallowed should both give 403 403 Archive type not allowed: gz 403 Archive type not allowed: bz2 $ echo "allow_archive = gz bz2 zip" >> .hg/hgrc $ hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log $ cat hg.pid >> $DAEMON_PIDS invalid arch type should give 404 $ get-with-headers.py localhost:$HGPORT "archive/tip.invalid" | head -n 1 404 Unsupported archive type: None $ TIP=`hg id -v | cut -f1 -d' '` $ QTIP=`hg id -q` $ cat > getarchive.py < import os, sys, urllib2 > try: > # Set stdout to binary mode for win32 platforms > import msvcrt > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) > except ImportError: > pass > if len(sys.argv) <= 3: > node, archive = sys.argv[1:] > requeststr = 'cmd=archive;node=%s;type=%s' % (node, archive) > else: > node, archive, file = sys.argv[1:] > requeststr = 'cmd=archive;node=%s;type=%s;file=%s' % (node, archive, file) > try: > f = urllib2.urlopen('http://127.0.0.1:%s/?%s' > % (os.environ['HGPORT'], requeststr)) > sys.stdout.write(f.read()) > except urllib2.HTTPError, e: > sys.stderr.write(str(e) + '\n') > EOF $ python getarchive.py "$TIP" gz | gunzip | tar tf - 2>/dev/null test-archive-1701ef1f1510/.hg_archival.txt test-archive-1701ef1f1510/.hgsub test-archive-1701ef1f1510/.hgsubstate test-archive-1701ef1f1510/bar test-archive-1701ef1f1510/baz/bletch test-archive-1701ef1f1510/foo test-archive-1701ef1f1510/subrepo/sub $ python getarchive.py "$TIP" bz2 | bunzip2 | tar tf - 2>/dev/null test-archive-1701ef1f1510/.hg_archival.txt test-archive-1701ef1f1510/.hgsub test-archive-1701ef1f1510/.hgsubstate test-archive-1701ef1f1510/bar test-archive-1701ef1f1510/baz/bletch test-archive-1701ef1f1510/foo test-archive-1701ef1f1510/subrepo/sub $ python getarchive.py "$TIP" zip > archive.zip $ unzip -t archive.zip Archive: archive.zip testing: test-archive-1701ef1f1510/.hg_archival.txt OK testing: test-archive-1701ef1f1510/.hgsub OK testing: test-archive-1701ef1f1510/.hgsubstate OK testing: test-archive-1701ef1f1510/bar OK testing: test-archive-1701ef1f1510/baz/bletch OK testing: test-archive-1701ef1f1510/foo OK testing: test-archive-1701ef1f1510/subrepo/sub OK No errors detected in compressed data of archive.zip. test that we can download single directories and files $ python getarchive.py "$TIP" gz baz | gunzip | tar tf - 2>/dev/null test-archive-1701ef1f1510/baz/bletch $ python getarchive.py "$TIP" gz foo | gunzip | tar tf - 2>/dev/null test-archive-1701ef1f1510/foo test that we detect file patterns that match no files $ python getarchive.py "$TIP" gz foobar HTTP Error 404: file(s) not found: foobar test that we reject unsafe patterns $ python getarchive.py "$TIP" gz relre:baz HTTP Error 404: file(s) not found: relre:baz $ killdaemons.py $ hg archive -t tar test.tar $ tar tf test.tar test/.hg_archival.txt test/.hgsub test/.hgsubstate test/bar test/baz/bletch test/foo $ hg archive --debug -t tbz2 -X baz test.tar.bz2 --config progress.debug=true archiving: 0/4 files (0.00%) archiving: .hgsub 1/4 files (25.00%) archiving: .hgsubstate 2/4 files (50.00%) archiving: bar 3/4 files (75.00%) archiving: foo 4/4 files (100.00%) $ bunzip2 -dc test.tar.bz2 | tar tf - 2>/dev/null test/.hg_archival.txt test/.hgsub test/.hgsubstate test/bar test/foo $ hg archive -t tgz -p %b-%h test-%h.tar.gz $ gzip -dc test-$QTIP.tar.gz | tar tf - 2>/dev/null test-1701ef1f1510/.hg_archival.txt test-1701ef1f1510/.hgsub test-1701ef1f1510/.hgsubstate test-1701ef1f1510/bar test-1701ef1f1510/baz/bletch test-1701ef1f1510/foo $ hg archive autodetected_test.tar $ tar tf autodetected_test.tar autodetected_test/.hg_archival.txt autodetected_test/.hgsub autodetected_test/.hgsubstate autodetected_test/bar autodetected_test/baz/bletch autodetected_test/foo The '-t' should override autodetection $ hg archive -t tar autodetect_override_test.zip $ tar tf autodetect_override_test.zip autodetect_override_test.zip/.hg_archival.txt autodetect_override_test.zip/.hgsub autodetect_override_test.zip/.hgsubstate autodetect_override_test.zip/bar autodetect_override_test.zip/baz/bletch autodetect_override_test.zip/foo $ for ext in tar tar.gz tgz tar.bz2 tbz2 zip; do > hg archive auto_test.$ext > if [ -d auto_test.$ext ]; then > echo "extension $ext was not autodetected." > fi > done $ cat > md5comp.py < try: > from hashlib import md5 > except ImportError: > from md5 import md5 > import sys > f1, f2 = sys.argv[1:3] > h1 = md5(file(f1, 'rb').read()).hexdigest() > h2 = md5(file(f2, 'rb').read()).hexdigest() > print h1 == h2 or "md5 differ: " + repr((h1, h2)) > EOF archive name is stored in the archive, so create similar archives and rename them afterwards. $ hg archive -t tgz tip.tar.gz $ mv tip.tar.gz tip1.tar.gz $ sleep 1 $ hg archive -t tgz tip.tar.gz $ mv tip.tar.gz tip2.tar.gz $ python md5comp.py tip1.tar.gz tip2.tar.gz True $ hg archive -t zip -p /illegal test.zip abort: archive prefix contains illegal components [255] $ hg archive -t zip -p very/../bad test.zip $ hg archive --config ui.archivemeta=false -t zip -r 2 test.zip $ unzip -t test.zip Archive: test.zip testing: test/bar OK testing: test/baz/bletch OK testing: test/foo OK No errors detected in compressed data of test.zip. $ hg archive -t tar - | tar tf - 2>/dev/null test-1701ef1f1510/.hg_archival.txt test-1701ef1f1510/.hgsub test-1701ef1f1510/.hgsubstate test-1701ef1f1510/bar test-1701ef1f1510/baz/bletch test-1701ef1f1510/foo $ hg archive -r 0 -t tar rev-%r.tar $ [ -f rev-0.tar ] test .hg_archival.txt $ hg archive ../test-tags $ cat ../test-tags/.hg_archival.txt repo: daa7f7c60e0a224faa4ff77ca41b2760562af264 node: 1701ef1f151069b8747038e93b5186bb43a47504 branch: default latesttag: null latesttagdistance: 4 changessincelatesttag: 4 $ hg tag -r 2 mytag $ hg tag -r 2 anothertag $ hg archive -r 2 ../test-lasttag $ cat ../test-lasttag/.hg_archival.txt repo: daa7f7c60e0a224faa4ff77ca41b2760562af264 node: 2c0277f05ed49d1c8328fb9ba92fba7a5ebcb33e branch: default tag: anothertag tag: mytag $ hg archive -t bogus test.bogus abort: unknown archive type 'bogus' [255] enable progress extension: $ cp $HGRCPATH $HGRCPATH.no-progress $ cat >> $HGRCPATH < [extensions] > progress = > [progress] > assume-tty = 1 > format = topic bar number > delay = 0 > refresh = 0 > width = 60 > EOF $ hg archive ../with-progress \r (no-eol) (esc) archiving [ ] 0/6\r (no-eol) (esc) archiving [======> ] 1/6\r (no-eol) (esc) archiving [=============> ] 2/6\r (no-eol) (esc) archiving [====================> ] 3/6\r (no-eol) (esc) archiving [===========================> ] 4/6\r (no-eol) (esc) archiving [==================================> ] 5/6\r (no-eol) (esc) archiving [==========================================>] 6/6\r (no-eol) (esc) \r (no-eol) (esc) cleanup after progress extension test: $ cp $HGRCPATH.no-progress $HGRCPATH server errors $ cat errors.log empty repo $ hg init ../empty $ cd ../empty $ hg archive ../test-empty abort: no working directory: please specify a revision [255] old file -- date clamped to 1980 $ touch -t 197501010000 old $ hg add old $ hg commit -m old $ hg archive ../old.zip $ unzip -l ../old.zip Archive: ../old.zip \s*Length.* (re) *-----* (glob) *172*80*00:00*old/.hg_archival.txt (glob) *0*80*00:00*old/old (glob) *-----* (glob) \s*172\s+2 files (re) show an error when a provided pattern matches no files $ hg archive -I file_that_does_not_exist.foo ../empty.zip abort: no files match the archive pattern [255] $ hg archive -X * ../empty.zip abort: no files match the archive pattern [255] $ cd .. issue3600: check whether "hg archive" can create archive files which are extracted with expected timestamp, even though TZ is not configured as GMT. $ mkdir issue3600 $ cd issue3600 $ hg init repo $ echo a > repo/a $ hg -R repo add repo/a $ hg -R repo commit -m '#0' -d '456789012 21600' $ cat > show_mtime.py < import sys, os > print int(os.stat(sys.argv[1]).st_mtime) > EOF $ hg -R repo archive --prefix tar-extracted archive.tar $ (TZ=UTC-3; export TZ; tar xf archive.tar) $ python show_mtime.py tar-extracted/a 456789012 $ hg -R repo archive --prefix zip-extracted archive.zip $ (TZ=UTC-3; export TZ; unzip -q archive.zip) $ python show_mtime.py zip-extracted/a 456789012 $ cd .. mercurial-3.7.3/tests/test-merge8.t0000644000175000017500000000125412676531525016624 0ustar mpmmpm00000000000000Test for changeset ba7c74081861 (update dirstate correctly for non-branchmerge updates) $ hg init a $ cd a $ echo a > a $ hg add a $ hg commit -m a $ cd .. $ hg clone a b updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd a $ hg mv a b $ hg commit -m move $ echo b >> b $ hg commit -m b $ cd ../b $ hg pull ../a pulling from ../a searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files (run 'hg update' to get a working copy) $ hg update 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ cd .. mercurial-3.7.3/tests/test-eol-clone.t0000644000175000017500000000254512676531525017316 0ustar mpmmpm00000000000000Testing cloning with the EOL extension $ cat >> $HGRCPATH < [extensions] > eol = > > [eol] > native = CRLF > EOF setup repository $ hg init repo $ cd repo $ cat > .hgeol < [patterns] > **.txt = native > EOF $ printf "first\r\nsecond\r\nthird\r\n" > a.txt $ hg commit --addremove -m 'checkin' adding .hgeol adding a.txt Clone $ cd .. $ hg clone repo repo-2 updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo-2 $ cat a.txt first\r (esc) second\r (esc) third\r (esc) $ hg cat a.txt first second third $ hg remove .hgeol $ hg commit -m 'remove eol' $ hg push --quiet $ cd .. Test clone of repo with .hgeol in working dir, but no .hgeol in tip $ hg clone repo repo-3 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo-3 $ cat a.txt first second third Test clone of revision with .hgeol $ cd .. $ hg clone -r 0 repo repo-4 adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo-4 $ cat .hgeol [patterns] **.txt = native $ cat a.txt first\r (esc) second\r (esc) third\r (esc) $ cd .. mercurial-3.7.3/tests/test-push-r.t0000644000175000017500000000725112676531525016656 0ustar mpmmpm00000000000000 $ hg init test $ cd test $ hg unbundle "$TESTDIR/bundles/remote.hg" adding changesets adding manifests adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. $ for i in 0 1 2 3 4 5 6 7 8; do > echo > mkdir test-"$i" > hg --cwd test-"$i" init > hg -R test push -r "$i" test-"$i" > cd test-"$i" > hg verify > cd .. > done pushing to test-0 searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions pushing to test-1 searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions pushing to test-2 searching for changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 3 changesets, 3 total revisions pushing to test-3 searching for changes adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 1 files checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 4 changesets, 4 total revisions pushing to test-4 searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions pushing to test-5 searching for changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 3 changesets, 3 total revisions pushing to test-6 searching for changes adding changesets adding manifests adding file changes added 4 changesets with 5 changes to 2 files checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 4 changesets, 5 total revisions pushing to test-7 searching for changes adding changesets adding manifests adding file changes added 5 changesets with 6 changes to 3 files checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 5 changesets, 6 total revisions pushing to test-8 searching for changes adding changesets adding manifests adding file changes added 5 changesets with 5 changes to 2 files checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 5 changesets, 5 total revisions $ cd test-8 $ hg pull ../test-7 pulling from ../test-7 searching for changes adding changesets adding manifests adding file changes added 4 changesets with 2 changes to 3 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 9 changesets, 7 total revisions $ cd .. mercurial-3.7.3/tests/test-hgk.t0000644000175000017500000000207112676531525016204 0ustar mpmmpm00000000000000Minimal hgk check $ echo "[extensions]" >> $HGRCPATH $ echo "hgk=" >> $HGRCPATH $ hg init repo $ cd repo $ echo a > a $ hg ci -Am adda adding a $ hg debug-cat-file commit 0 tree a0c8bcbbb45c parent 000000000000 author test 0 0 revision 0 branch default phase draft adda $ echo b > b $ hg ci -Am addb adding b $ hg log -T '{node}\n' 102a90ea7b4a3361e4082ed620918c261189a36a 07f4944404050f47db2e5c5071e0e84e7a27bba9 $ hg debug-diff-tree 07f494440405 102a90ea7b4a :000000 100664 000000000000 1e88685f5dde N b b $ hg debug-diff-tree 07f494440405 102a90ea7b4a --patch diff --git a/b b/b new file mode 100644 --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +b Ensure that diff-tree output isn't affected by diffopts $ hg --config diff.noprefix=True debug-diff-tree 07f494440405 102a90ea7b4a :000000 100664 000000000000 1e88685f5dde N b b $ hg --config diff.noprefix=True debug-diff-tree --patch 07f494440405 102a90ea7b4a diff --git a/b b/b new file mode 100644 --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +b $ cd .. mercurial-3.7.3/tests/test-hybridencode.py0000644000175000017500000005121312676531525020261 0ustar mpmmpm00000000000000from mercurial import store def show(s): # show test input print "A = '%s'" % s.encode("string_escape") # show the result of the C implementation, if available h = store._pathencode(s) print "B = '%s'" % h.encode("string_escape") # compare it with reference implementation in Python r = store._hybridencode(s, True) if h != r: print "R = '%s'" % r.encode("string_escape") print show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;=[]^`{}") print "uppercase char X is encoded as _x" show("data/ABCDEFGHIJKLMNOPQRSTUVWXYZ") print "underbar is doubled" show("data/_") print "tilde is character-encoded" show("data/~") print "characters in ASCII code range 1..31" show('data/\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f' '\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f') print "characters in ASCII code range 126..255" show('data/\x7e\x7f' '\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f' '\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f') show('data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf' '\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf') show('data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf' '\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf') show('data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef' '\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff') print "Windows reserved characters" show('data/less <, greater >, colon :, double-quote ", backslash \\' ', pipe |, question-mark ?, asterisk *') print "encoding directories ending in .hg, .i or .d with '.hg' suffix" show('data/x.h.i/x.hg/x.i/x.d/foo') show('data/a.hg/a.i/a.d/foo') show('data/au.hg/au.i/au.d/foo') show('data/aux.hg/aux.i/aux.d/foo') show('data/auxy.hg/auxy.i/auxy.d/foo') print "but these are not encoded on *filenames*" show('data/foo/x.hg') show('data/foo/x.i') show('data/foo/x.d') show('data/foo/a.hg') show('data/foo/a.i') show('data/foo/a.d') show('data/foo/au.hg') show('data/foo/au.i') show('data/foo/au.d') show('data/foo/aux.hg') show('data/foo/aux.i') show('data/foo/aux.d') show('data/foo/auxy.hg') show('data/foo/auxy.i') show('data/foo/auxy.d') print "plain .hg, .i and .d directories have the leading dot encoded" show('data/.hg/.i/.d/foo') show('data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c.i') show('data/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/' 'TENTH/ELEVENTH/LOREMIPSUM.TXT.i') show('data/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/' 'wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules' '.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider.i') show('data/AUX.THE-QUICK-BROWN-FOX-JU:MPS-OVER-THE-LAZY-DOG-THE-QUICK-' 'BROWN-FOX-JUMPS-OVER-THE-LAZY-DOG.TXT.i') show('data/Project Planning/Resources/AnotherLongDirectoryName/' 'Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt') show('data/Project.Planning/Resources/AnotherLongDirectoryName/' 'Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt') show('data/foo.../foo / /a./_. /__/.x../ bla/.FOO/something.i') show('data/c/co/com/com0/com1/com2/com3/com4/com5/com6/com7/com8/com9') show('data/C/CO/COM/COM0/COM1/COM2/COM3/COM4/COM5/COM6/COM7/COM8/COM9') show('data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x' '/com6.x/com7.x/com8.x/com9.x') show('data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5' '/x.com6/x.com7/x.com8/x.com9') show('data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x' '/com6x/com7x/com8x/com9x') show('data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5' '/xcom6/xcom7/xcom8/xcom9') show('data/l/lp/lpt/lpt0/lpt1/lpt2/lpt3/lpt4/lpt5/lpt6/lpt7/lpt8/lpt9') show('data/L/LP/LPT/LPT0/LPT1/LPT2/LPT3/LPT4/LPT5/LPT6/LPT7/LPT8/LPT9') show('data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x' '/lpt6.x/lpt7.x/lpt8.x/lpt9.x') show('data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5' '/x.lpt6/x.lpt7/x.lpt8/x.lpt9') show('data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x' '/lpt6x/lpt7x/lpt8x/lpt9x') show('data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5' '/xlpt6/xlpt7/xlpt8/xlpt9') show('data/con/p/pr/prn/a/au/aux/n/nu/nul') show('data/CON/P/PR/PRN/A/AU/AUX/N/NU/NUL') show('data/con.x/p.x/pr.x/prn.x/a.x/au.x/aux.x/n.x/nu.x/nul.x') show('data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul') show('data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx') show('data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul') show('data/a./au./aux./auxy./aux.') show('data/c./co./con./cony./con.') show('data/p./pr./prn./prny./prn.') show('data/n./nu./nul./nuly./nul.') show('data/l./lp./lpt./lpt1./lpt1y./lpt1.') show('data/lpt9./lpt9y./lpt9.') show('data/com./com1./com1y./com1.') show('data/com9./com9y./com9.') show('data/a /au /aux /auxy /aux ') print "largest unhashed path" show('data/123456789-123456789-123456789-123456789-123456789-' 'unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "shortest hashed path" show('data/123456789-123456789-123456789-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "changing one char in part that's hashed away produces a different hash" show('data/123456789-123456789-123456789-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-' '123456789-123456') print "uppercase hitting length limit due to encoding" show('data/A23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/Z23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "compare with lowercase not hitting limit" show('data/a23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/z23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "not hitting limit with any of these" show("data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&'()+,-.;=" "[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-" "123456789-12345") print "underbar hitting length limit due to encoding" show('data/_23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "tilde hitting length limit due to encoding" show('data/~23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "Windows reserved characters hitting length limit" show('data/<23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/>23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/:23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/"23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/\\23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/|23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/?23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/*23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "initial space hitting length limit" show('data/ 23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "initial dot hitting length limit" show('data/.23456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "trailing space in filename hitting length limit" show('data/123456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-1234 ') print "trailing dot in filename hitting length limit" show('data/123456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-1234.') print "initial space in directory hitting length limit" show('data/ x/456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "initial dot in directory hitting length limit" show('data/.x/456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "trailing space in directory hitting length limit" show('data/x /456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "trailing dot in directory hitting length limit" show('data/x./456789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "with directories that need direncoding, hitting length limit" show('data/x.i/56789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/x.d/56789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/x.hg/5789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "Windows reserved filenames, hitting length limit" show('data/con/56789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/prn/56789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/aux/56789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/nul/56789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/com1/6789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/com9/6789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/lpt1/6789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') show('data/lpt9/6789-123456789-123456789-123456789-123456789-' 'xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "non-reserved names, just not hitting limit" show('data/123456789-123456789-123456789-123456789-123456789-' '/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12345') print "hashed path with largest untruncated 1st dir" show('data/12345678/-123456789-123456789-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with smallest truncated 1st dir" show('data/123456789/123456789-123456789-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with largest untruncated two dirs" show('data/12345678/12345678/9-123456789-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with smallest truncated two dirs" show('data/123456789/123456789/123456789-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with largest untruncated three dirs" show('data/12345678/12345678/12345678/89-123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with smallest truncated three dirs" show('data/123456789/123456789/123456789/123456789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with largest untruncated four dirs" show('data/12345678/12345678/12345678/12345678/789-123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with smallest truncated four dirs" show('data/123456789/123456789/123456789/123456789/123456789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with largest untruncated five dirs" show('data/12345678/12345678/12345678/12345678/12345678/6789-' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with smallest truncated five dirs" show('data/123456789/123456789/123456789/123456789/123456789/' 'hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with largest untruncated six dirs" show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with smallest truncated six dirs" show('data/123456789/123456789/123456789/123456789/123456789/' '123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with largest untruncated seven dirs" show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with smallest truncated seven dirs" show('data/123456789/123456789/123456789/123456789/123456789/' '123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with largest untruncated eight dirs" print "(directory 8 is dropped because it hits _maxshortdirslen)" show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with smallest truncated eight dirs" print "(directory 8 is dropped because it hits _maxshortdirslen)" show('data/123456789/123456789/123456789/123456789/123456789/' '123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with largest non-dropped directory 8" print "(just not hitting the _maxshortdirslen boundary)" show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "...adding one truncated char to dir 1..7 won't drop dir 8" show('data/12345678x/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') show('data/12345678/12345678x/12345678/12345678/12345678/12345' '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') show('data/12345678/12345678/12345678x/12345678/12345678/12345' '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') show('data/12345678/12345678/12345678/12345678x/12345678/12345' '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') show('data/12345678/12345678/12345678/12345678/12345678x/12345' '678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678x/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path with shortest dropped directory 8" print "(just hitting the _maxshortdirslen boundary)" show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "hashed path that drops dir 8 due to dot or space at end is" print "encoded, and thus causing to hit _maxshortdirslen" show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print "... with dir 8 short enough for encoding" show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12 /xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-123456') print '''Extensions are replicated on hashed paths. Note that we only get to encode files that end in .i or .d inside the store. Encoded filenames are thus bound in length.''' show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.345.i') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.345.d') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.3456.i') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.34567.i') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.345678.i') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.3456789.i') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.3456789-.i') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.3456789-1.i') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.3456789-12.i') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.3456789-123.i') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.3456789-1234.i') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.3456789-12345.i') show('data/12345678/12345678/12345678/12345678/12345678/12345' '678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-' '123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWX' 'YZ-abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTU' 'VWXYZ-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx-xxxx' 'xxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwww' 'wwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww.i') print "paths outside data/ can be encoded" show('metadata/dir/00manifest.i') show('metadata/12345678/12345678/12345678/12345678/12345678/' '12345678/12345678/12345678/12345678/12345678/12345678/' '12345678/12345678/00manifest.i') mercurial-3.7.3/tests/test-manifest.py0000644000175000017500000004036212676531525017433 0ustar mpmmpm00000000000000import binascii import unittest import itertools import silenttestrunner from mercurial import manifest as manifestmod from mercurial import match as matchmod EMTPY_MANIFEST = '' EMTPY_MANIFEST_V2 = '\0\n' HASH_1 = '1' * 40 BIN_HASH_1 = binascii.unhexlify(HASH_1) HASH_2 = 'f' * 40 BIN_HASH_2 = binascii.unhexlify(HASH_2) HASH_3 = '1234567890abcdef0987654321deadbeef0fcafe' BIN_HASH_3 = binascii.unhexlify(HASH_3) A_SHORT_MANIFEST = ( 'bar/baz/qux.py\0%(hash2)s%(flag2)s\n' 'foo\0%(hash1)s%(flag1)s\n' ) % {'hash1': HASH_1, 'flag1': '', 'hash2': HASH_2, 'flag2': 'l', } # Same data as A_SHORT_MANIFEST A_SHORT_MANIFEST_V2 = ( '\0\n' '\x00bar/baz/qux.py\0%(flag2)s\n%(hash2)s\n' '\x00foo\0%(flag1)s\n%(hash1)s\n' ) % {'hash1': BIN_HASH_1, 'flag1': '', 'hash2': BIN_HASH_2, 'flag2': 'l', } # Same data as A_SHORT_MANIFEST A_METADATA_MANIFEST = ( '\0foo\0bar\n' '\x00bar/baz/qux.py\0%(flag2)s\0foo\0bar\n%(hash2)s\n' # flag and metadata '\x00foo\0%(flag1)s\0foo\n%(hash1)s\n' # no flag, but metadata ) % {'hash1': BIN_HASH_1, 'flag1': '', 'hash2': BIN_HASH_2, 'flag2': 'l', } A_STEM_COMPRESSED_MANIFEST = ( '\0\n' '\x00bar/baz/qux.py\0%(flag2)s\n%(hash2)s\n' '\x04qux/foo.py\0%(flag1)s\n%(hash1)s\n' # simple case of 4 stem chars '\x0az.py\0%(flag1)s\n%(hash1)s\n' # tricky newline = 10 stem characters '\x00%(verylongdir)sx/x\0\n%(hash1)s\n' '\xffx/y\0\n%(hash2)s\n' # more than 255 stem chars ) % {'hash1': BIN_HASH_1, 'flag1': '', 'hash2': BIN_HASH_2, 'flag2': 'l', 'verylongdir': 255 * 'x', } A_DEEPER_MANIFEST = ( 'a/b/c/bar.py\0%(hash3)s%(flag1)s\n' 'a/b/c/bar.txt\0%(hash1)s%(flag1)s\n' 'a/b/c/foo.py\0%(hash3)s%(flag1)s\n' 'a/b/c/foo.txt\0%(hash2)s%(flag2)s\n' 'a/b/d/baz.py\0%(hash3)s%(flag1)s\n' 'a/b/d/qux.py\0%(hash1)s%(flag2)s\n' 'a/b/d/ten.txt\0%(hash3)s%(flag2)s\n' 'a/b/dog.py\0%(hash3)s%(flag1)s\n' 'a/b/fish.py\0%(hash2)s%(flag1)s\n' 'a/c/london.py\0%(hash3)s%(flag2)s\n' 'a/c/paper.txt\0%(hash2)s%(flag2)s\n' 'a/c/paris.py\0%(hash2)s%(flag1)s\n' 'a/d/apple.py\0%(hash3)s%(flag1)s\n' 'a/d/pizza.py\0%(hash3)s%(flag2)s\n' 'a/green.py\0%(hash1)s%(flag2)s\n' 'a/purple.py\0%(hash2)s%(flag1)s\n' 'app.py\0%(hash3)s%(flag1)s\n' 'readme.txt\0%(hash2)s%(flag1)s\n' ) % {'hash1': HASH_1, 'flag1': '', 'hash2': HASH_2, 'flag2': 'l', 'hash3': HASH_3, } HUGE_MANIFEST_ENTRIES = 200001 A_HUGE_MANIFEST = ''.join(sorted( 'file%d\0%s%s\n' % (i, h, f) for i, h, f in itertools.izip(xrange(200001), itertools.cycle((HASH_1, HASH_2)), itertools.cycle(('', 'x', 'l'))))) class basemanifesttests(object): def parsemanifest(self, text): raise NotImplementedError('parsemanifest not implemented by test case') def assertIn(self, thing, container, msg=None): # assertIn new in 2.7, use it if available, otherwise polyfill sup = getattr(unittest.TestCase, 'assertIn', False) if sup: return sup(self, thing, container, msg=msg) if not msg: msg = 'Expected %r in %r' % (thing, container) self.assert_(thing in container, msg) def testEmptyManifest(self): m = self.parsemanifest(EMTPY_MANIFEST) self.assertEqual(0, len(m)) self.assertEqual([], list(m)) def testEmptyManifestv2(self): m = self.parsemanifest(EMTPY_MANIFEST_V2) self.assertEqual(0, len(m)) self.assertEqual([], list(m)) def testManifest(self): m = self.parsemanifest(A_SHORT_MANIFEST) self.assertEqual(['bar/baz/qux.py', 'foo'], list(m)) self.assertEqual(BIN_HASH_2, m['bar/baz/qux.py']) self.assertEqual('l', m.flags('bar/baz/qux.py')) self.assertEqual(BIN_HASH_1, m['foo']) self.assertEqual('', m.flags('foo')) self.assertRaises(KeyError, lambda : m['wat']) def testParseManifestV2(self): m1 = self.parsemanifest(A_SHORT_MANIFEST) m2 = self.parsemanifest(A_SHORT_MANIFEST_V2) # Should have same content as A_SHORT_MANIFEST self.assertEqual(m1.text(), m2.text()) def testParseManifestMetadata(self): # Metadata is for future-proofing and should be accepted but ignored m = self.parsemanifest(A_METADATA_MANIFEST) self.assertEqual(A_SHORT_MANIFEST, m.text()) def testParseManifestStemCompression(self): m = self.parsemanifest(A_STEM_COMPRESSED_MANIFEST) self.assertIn('bar/baz/qux.py', m) self.assertIn('bar/qux/foo.py', m) self.assertIn('bar/qux/foz.py', m) self.assertIn(256 * 'x' + '/x', m) self.assertIn(256 * 'x' + '/y', m) self.assertEqual(A_STEM_COMPRESSED_MANIFEST, m.text(usemanifestv2=True)) def testTextV2(self): m1 = self.parsemanifest(A_SHORT_MANIFEST) v2text = m1.text(usemanifestv2=True) self.assertEqual(A_SHORT_MANIFEST_V2, v2text) def testSetItem(self): want = BIN_HASH_1 m = self.parsemanifest(EMTPY_MANIFEST) m['a'] = want self.assertIn('a', m) self.assertEqual(want, m['a']) self.assertEqual('a\0' + HASH_1 + '\n', m.text()) m = self.parsemanifest(A_SHORT_MANIFEST) m['a'] = want self.assertEqual(want, m['a']) self.assertEqual('a\0' + HASH_1 + '\n' + A_SHORT_MANIFEST, m.text()) def testSetFlag(self): want = 'x' m = self.parsemanifest(EMTPY_MANIFEST) # first add a file; a file-less flag makes no sense m['a'] = BIN_HASH_1 m.setflag('a', want) self.assertEqual(want, m.flags('a')) self.assertEqual('a\0' + HASH_1 + want + '\n', m.text()) m = self.parsemanifest(A_SHORT_MANIFEST) # first add a file; a file-less flag makes no sense m['a'] = BIN_HASH_1 m.setflag('a', want) self.assertEqual(want, m.flags('a')) self.assertEqual('a\0' + HASH_1 + want + '\n' + A_SHORT_MANIFEST, m.text()) def testCopy(self): m = self.parsemanifest(A_SHORT_MANIFEST) m['a'] = BIN_HASH_1 m2 = m.copy() del m del m2 # make sure we don't double free() anything def testCompaction(self): unhex = binascii.unhexlify h1, h2 = unhex(HASH_1), unhex(HASH_2) m = self.parsemanifest(A_SHORT_MANIFEST) m['alpha'] = h1 m['beta'] = h2 del m['foo'] want = 'alpha\0%s\nbar/baz/qux.py\0%sl\nbeta\0%s\n' % ( HASH_1, HASH_2, HASH_2) self.assertEqual(want, m.text()) self.assertEqual(3, len(m)) self.assertEqual(['alpha', 'bar/baz/qux.py', 'beta'], list(m)) self.assertEqual(h1, m['alpha']) self.assertEqual(h2, m['bar/baz/qux.py']) self.assertEqual(h2, m['beta']) self.assertEqual('', m.flags('alpha')) self.assertEqual('l', m.flags('bar/baz/qux.py')) self.assertEqual('', m.flags('beta')) self.assertRaises(KeyError, lambda : m['foo']) def testSetGetNodeSuffix(self): clean = self.parsemanifest(A_SHORT_MANIFEST) m = self.parsemanifest(A_SHORT_MANIFEST) h = m['foo'] f = m.flags('foo') want = h + 'a' # Merge code wants to set 21-byte fake hashes at times m['foo'] = want self.assertEqual(want, m['foo']) self.assertEqual([('bar/baz/qux.py', BIN_HASH_2), ('foo', BIN_HASH_1 + 'a')], list(m.iteritems())) # Sometimes it even tries a 22-byte fake hash, but we can # return 21 and it'll work out m['foo'] = want + '+' self.assertEqual(want, m['foo']) # make sure the suffix survives a copy match = matchmod.match('', '', ['re:foo']) m2 = m.matches(match) self.assertEqual(want, m2['foo']) self.assertEqual(1, len(m2)) m2 = m.copy() self.assertEqual(want, m2['foo']) # suffix with iteration self.assertEqual([('bar/baz/qux.py', BIN_HASH_2), ('foo', want)], list(m.iteritems())) # shows up in diff self.assertEqual({'foo': ((want, f), (h, ''))}, m.diff(clean)) self.assertEqual({'foo': ((h, ''), (want, f))}, clean.diff(m)) def testMatchException(self): m = self.parsemanifest(A_SHORT_MANIFEST) match = matchmod.match('', '', ['re:.*']) def filt(path): if path == 'foo': assert False return True match.matchfn = filt self.assertRaises(AssertionError, m.matches, match) def testRemoveItem(self): m = self.parsemanifest(A_SHORT_MANIFEST) del m['foo'] self.assertRaises(KeyError, lambda : m['foo']) self.assertEqual(1, len(m)) self.assertEqual(1, len(list(m))) # now restore and make sure everything works right m['foo'] = 'a' * 20 self.assertEqual(2, len(m)) self.assertEqual(2, len(list(m))) def testManifestDiff(self): MISSING = (None, '') addl = 'z-only-in-left\0' + HASH_1 + '\n' addr = 'z-only-in-right\0' + HASH_2 + 'x\n' left = self.parsemanifest( A_SHORT_MANIFEST.replace(HASH_1, HASH_3 + 'x') + addl) right = self.parsemanifest(A_SHORT_MANIFEST + addr) want = { 'foo': ((BIN_HASH_3, 'x'), (BIN_HASH_1, '')), 'z-only-in-left': ((BIN_HASH_1, ''), MISSING), 'z-only-in-right': (MISSING, (BIN_HASH_2, 'x')), } self.assertEqual(want, left.diff(right)) want = { 'bar/baz/qux.py': (MISSING, (BIN_HASH_2, 'l')), 'foo': (MISSING, (BIN_HASH_3, 'x')), 'z-only-in-left': (MISSING, (BIN_HASH_1, '')), } self.assertEqual(want, self.parsemanifest(EMTPY_MANIFEST).diff(left)) want = { 'bar/baz/qux.py': ((BIN_HASH_2, 'l'), MISSING), 'foo': ((BIN_HASH_3, 'x'), MISSING), 'z-only-in-left': ((BIN_HASH_1, ''), MISSING), } self.assertEqual(want, left.diff(self.parsemanifest(EMTPY_MANIFEST))) copy = right.copy() del copy['z-only-in-right'] del right['foo'] want = { 'foo': (MISSING, (BIN_HASH_1, '')), 'z-only-in-right': ((BIN_HASH_2, 'x'), MISSING), } self.assertEqual(want, right.diff(copy)) short = self.parsemanifest(A_SHORT_MANIFEST) pruned = short.copy() del pruned['foo'] want = { 'foo': ((BIN_HASH_1, ''), MISSING), } self.assertEqual(want, short.diff(pruned)) want = { 'foo': (MISSING, (BIN_HASH_1, '')), } self.assertEqual(want, pruned.diff(short)) want = { 'bar/baz/qux.py': None, 'foo': (MISSING, (BIN_HASH_1, '')), } self.assertEqual(want, pruned.diff(short, True)) def testReversedLines(self): backwards = ''.join( l + '\n' for l in reversed(A_SHORT_MANIFEST.split('\n')) if l) try: self.parsemanifest(backwards) self.fail('Should have raised ValueError') except ValueError as v: self.assertIn('Manifest lines not in sorted order.', str(v)) def testNoTerminalNewline(self): try: self.parsemanifest(A_SHORT_MANIFEST + 'wat') self.fail('Should have raised ValueError') except ValueError as v: self.assertIn('Manifest did not end in a newline.', str(v)) def testNoNewLineAtAll(self): try: self.parsemanifest('wat') self.fail('Should have raised ValueError') except ValueError as v: self.assertIn('Manifest did not end in a newline.', str(v)) def testHugeManifest(self): m = self.parsemanifest(A_HUGE_MANIFEST) self.assertEqual(HUGE_MANIFEST_ENTRIES, len(m)) self.assertEqual(len(m), len(list(m))) def testMatchesMetadata(self): '''Tests matches() for a few specific files to make sure that both the set of files as well as their flags and nodeids are correct in the resulting manifest.''' m = self.parsemanifest(A_HUGE_MANIFEST) match = matchmod.match('/', '', ['file1', 'file200', 'file300'], exact=True) m2 = m.matches(match) w = ('file1\0%sx\n' 'file200\0%sl\n' 'file300\0%s\n') % (HASH_2, HASH_1, HASH_1) self.assertEqual(w, m2.text()) def testMatchesNonexistentFile(self): '''Tests matches() for a small set of specific files, including one nonexistent file to make sure in only matches against existing files. ''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', '', ['a/b/c/bar.txt', 'a/b/d/qux.py', 'readme.txt', 'nonexistent'], exact=True) m2 = m.matches(match) self.assertEqual( ['a/b/c/bar.txt', 'a/b/d/qux.py', 'readme.txt'], m2.keys()) def testMatchesNonexistentDirectory(self): '''Tests matches() for a relpath match on a directory that doesn't actually exist.''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', '', ['a/f'], default='relpath') m2 = m.matches(match) self.assertEqual([], m2.keys()) def testMatchesExactLarge(self): '''Tests matches() for files matching a large list of exact files. ''' m = self.parsemanifest(A_HUGE_MANIFEST) flist = m.keys()[80:300] match = matchmod.match('/', '', flist, exact=True) m2 = m.matches(match) self.assertEqual(flist, m2.keys()) def testMatchesFull(self): '''Tests matches() for what should be a full match.''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', '', ['']) m2 = m.matches(match) self.assertEqual(m.keys(), m2.keys()) def testMatchesDirectory(self): '''Tests matches() on a relpath match on a directory, which should match against all files within said directory.''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', '', ['a/b'], default='relpath') m2 = m.matches(match) self.assertEqual([ 'a/b/c/bar.py', 'a/b/c/bar.txt', 'a/b/c/foo.py', 'a/b/c/foo.txt', 'a/b/d/baz.py', 'a/b/d/qux.py', 'a/b/d/ten.txt', 'a/b/dog.py', 'a/b/fish.py'], m2.keys()) def testMatchesExactPath(self): '''Tests matches() on an exact match on a directory, which should result in an empty manifest because you can't perform an exact match against a directory.''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', '', ['a/b'], exact=True) m2 = m.matches(match) self.assertEqual([], m2.keys()) def testMatchesCwd(self): '''Tests matches() on a relpath match with the current directory ('.') when not in the root directory.''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', 'a/b', ['.'], default='relpath') m2 = m.matches(match) self.assertEqual([ 'a/b/c/bar.py', 'a/b/c/bar.txt', 'a/b/c/foo.py', 'a/b/c/foo.txt', 'a/b/d/baz.py', 'a/b/d/qux.py', 'a/b/d/ten.txt', 'a/b/dog.py', 'a/b/fish.py'], m2.keys()) def testMatchesWithPattern(self): '''Tests matches() for files matching a pattern that reside deeper than the specified directory.''' m = self.parsemanifest(A_DEEPER_MANIFEST) match = matchmod.match('/', '', ['a/b/*/*.txt']) m2 = m.matches(match) self.assertEqual( ['a/b/c/bar.txt', 'a/b/c/foo.txt', 'a/b/d/ten.txt'], m2.keys()) class testmanifestdict(unittest.TestCase, basemanifesttests): def parsemanifest(self, text): return manifestmod.manifestdict(text) class testtreemanifest(unittest.TestCase, basemanifesttests): def parsemanifest(self, text): return manifestmod.treemanifest('', text) if __name__ == '__main__': silenttestrunner.main(__name__) mercurial-3.7.3/tests/test-mq-safety.t0000644000175000017500000001146012676531525017343 0ustar mpmmpm00000000000000 $ echo '[extensions]' >> $HGRCPATH $ echo 'hgext.mq =' >> $HGRCPATH $ hg init repo $ cd repo $ echo foo > foo $ hg ci -qAm 'add a file' $ hg qinit $ hg qnew foo $ echo foo >> foo $ hg qrefresh -m 'append foo' $ hg qnew bar $ echo bar >> foo $ hg qrefresh -m 'append bar' Try to operate on public mq changeset $ hg qpop popping bar now at: foo $ hg phase --public qbase $ echo babar >> foo $ hg qref abort: cannot qrefresh public revision (see "hg help phases" for details) [255] $ hg revert -a reverting foo $ hg qpop abort: popping would remove a public revision (see "hg help phases" for details) [255] $ hg qfold bar abort: cannot qrefresh public revision (see "hg help phases" for details) [255] $ hg revert -a reverting foo restore state for remaining test $ hg qpush applying bar now at: bar try to commit on top of a patch $ echo quux >> foo $ hg ci -m 'append quux' abort: cannot commit over an applied mq patch [255] cheat a bit... $ mv .hg/patches .hg/patches2 $ hg ci -m 'append quux' $ mv .hg/patches2 .hg/patches qpop/qrefresh on the wrong revision $ hg qpop abort: popping would remove a revision not managed by this patch queue [255] $ hg qpop -n patches using patch queue: $TESTTMP/repo/.hg/patches (glob) abort: popping would remove a revision not managed by this patch queue [255] $ hg qrefresh abort: working directory revision is not qtip [255] $ hg up -C qtip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg qpop abort: popping would remove a revision not managed by this patch queue [255] $ hg qrefresh abort: cannot qrefresh a revision with children [255] $ hg tip --template '{rev} {desc}\n' 3 append quux qpush warning branchheads $ cd .. $ hg init branchy $ cd branchy $ echo q > q $ hg add q $ hg qnew -f qp $ hg qpop popping qp patch queue now empty $ echo a > a $ hg ci -Ama adding a $ hg up null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg branch b marked working directory as branch b (branches are permanent and global, did you want a bookmark?) $ echo c > c $ hg ci -Amc adding c $ hg merge default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -mmerge $ hg up default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log changeset: 2:65309210bf4e branch: b tag: tip parent: 1:707adb4c8ae1 parent: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge changeset: 1:707adb4c8ae1 branch: b parent: -1:000000000000 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: c changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ hg qpush applying qp now at: qp Testing applied patches, push and --force $ cd .. $ hg init forcepush $ cd forcepush $ echo a > a $ hg ci -Am adda adding a $ echo a >> a $ hg ci -m changea $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch branch marked working directory as branch branch (branches are permanent and global, did you want a bookmark?) $ echo b > b $ hg ci -Am addb adding b $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg --cwd .. clone -r 0 forcepush forcepush2 adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a >> a $ hg qnew patch Pushing applied patch with --rev without --force $ hg push -r . ../forcepush2 pushing to ../forcepush2 abort: source has mq patches applied [255] Pushing applied patch with branchhash, without --force $ hg push ../forcepush2#default pushing to ../forcepush2 abort: source has mq patches applied [255] Pushing revs excluding applied patch $ hg push --new-branch -r 'branch(branch)' -r 2 ../forcepush2 pushing to ../forcepush2 searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Pushing applied patch with --force $ hg phase --force --secret 'mq()' $ hg push --force -r default ../forcepush2 pushing to ../forcepush2 searching for changes no changes found (ignored 1 secret changesets) [1] $ hg phase --draft 'mq()' $ hg push --force -r default ../forcepush2 pushing to ../forcepush2 searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) $ cd .. mercurial-3.7.3/tests/test-wireproto.t0000644000175000017500000002135012676531525017466 0ustar mpmmpm00000000000000#require killdaemons Test wire protocol argument passing Setup repo: $ hg init repo Local: $ hg debugwireargs repo eins zwei --three drei --four vier eins zwei drei vier None $ hg debugwireargs repo eins zwei --four vier eins zwei None vier None $ hg debugwireargs repo eins zwei eins zwei None None None $ hg debugwireargs repo eins zwei --five fuenf eins zwei None None fuenf HTTP: $ hg serve -R repo -p $HGPORT -d --pid-file=hg1.pid -E error.log -A access.log $ cat hg1.pid >> $DAEMON_PIDS $ hg debugwireargs http://localhost:$HGPORT/ un deux trois quatre un deux trois quatre None $ hg debugwireargs http://localhost:$HGPORT/ \ un deux trois\ qu\ \ atre un deux trois qu atre None $ hg debugwireargs http://localhost:$HGPORT/ eins zwei --four vier eins zwei None vier None $ hg debugwireargs http://localhost:$HGPORT/ eins zwei eins zwei None None None $ hg debugwireargs http://localhost:$HGPORT/ eins zwei --five fuenf eins zwei None None None $ hg debugwireargs http://localhost:$HGPORT/ un deux trois onethousandcharactersxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx un deux trois onethousandcharactersxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx None $ cat error.log $ cat access.log * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=quatre&one=un&three=trois&two=deux (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=quatre&one=un&three=trois&two=deux (glob) * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=qu++atre&one=+un&three=trois+&two=deux (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=qu++atre&one=+un&three=trois+&two=deux (glob) * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=vier&one=eins&two=zwei (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=vier&one=eins&two=zwei (glob) * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:one=eins&two=zwei (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:one=eins&two=zwei (glob) * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:one=eins&two=zwei (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:one=eins&two=zwei (glob) * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=onethousandcharactersxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx&one x-hgarg-2:=un&three=trois&two=deux (glob) * - - [*] "GET /?cmd=debugwireargs HTTP/1.1" 200 - x-hgarg-1:four=onethousandcharactersxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx&one x-hgarg-2:=un&three=trois&two=deux (glob) HTTP without the httpheader capability: $ HGRCPATH="`pwd`/repo/.hgrc" $ export HGRCPATH $ CAP=httpheader $ . "$TESTDIR/notcapable" $ hg serve -R repo -p $HGPORT2 -d --pid-file=hg2.pid -E error2.log -A access2.log $ cat hg2.pid >> $DAEMON_PIDS $ hg debugwireargs http://localhost:$HGPORT2/ un deux trois quatre un deux trois quatre None $ hg debugwireargs http://localhost:$HGPORT2/ eins zwei --four vier eins zwei None vier None $ hg debugwireargs http://localhost:$HGPORT2/ eins zwei eins zwei None None None $ hg debugwireargs http://localhost:$HGPORT2/ eins zwei --five fuenf eins zwei None None None $ cat error2.log $ cat access2.log * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs&four=quatre&one=un&three=trois&two=deux HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs&four=quatre&one=un&three=trois&two=deux HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs&four=vier&one=eins&two=zwei HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs&four=vier&one=eins&two=zwei HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=debugwireargs&one=eins&two=zwei HTTP/1.1" 200 - (glob) SSH (try to exercise the ssh functionality with a dummy script): $ hg debugwireargs --ssh "python $TESTDIR/dummyssh" ssh://user@dummy/repo uno due tre quattro uno due tre quattro None $ hg debugwireargs --ssh "python $TESTDIR/dummyssh" ssh://user@dummy/repo eins zwei --four vier eins zwei None vier None $ hg debugwireargs --ssh "python $TESTDIR/dummyssh" ssh://user@dummy/repo eins zwei eins zwei None None None $ hg debugwireargs --ssh "python $TESTDIR/dummyssh" ssh://user@dummy/repo eins zwei --five fuenf eins zwei None None None Explicitly kill daemons to let the test exit on Windows $ killdaemons.py mercurial-3.7.3/tests/test-hgweb-symrev.t0000644000175000017500000017723312676531525020067 0ustar mpmmpm00000000000000#require serve Test symbolic revision usage in links produced by hgweb pages. There are multiple issues related to this: - issue2296 - issue2826 - issue3594 - issue3634 Set up the repo $ hg init test $ cd test $ echo 0 > foo $ mkdir dir $ echo 0 > dir/bar $ hg ci -Am 'first' adding dir/bar adding foo $ echo 1 >> foo $ hg ci -m 'second' $ echo 2 >> foo $ hg ci -m 'third' $ hg bookmark -r1 xyzzy $ hg log -G --template '{rev}:{node|short} {tags} {bookmarks}\n' @ 2:9d8c40cba617 tip | o 1:a7c1559b7bba xyzzy | o 0:43c799df6e75 $ hg serve --config web.allow_archive=zip -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ REVLINKS='href=[^>]+(rev=|/)(43c799df6e75|0|a7c1559b7bba|1|xyzzy|9d8c40cba617|2|tip|default)' (De)referencing symbolic revisions (paper) $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog?style=paper' | egrep $REVLINKS
                      • graph
                      • changeset
                      • browse
                      • zip less more | rev 2: (0) tip third second first less more | rev 2: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph?style=paper' | egrep $REVLINKS
                      • log
                      • changeset
                      • browse
                      • less more | rev 2: (0) tip less more | rev 2: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file?style=paper' | egrep $REVLINKS
                      • log
                      • graph
                      • changeset
                      • zip directory / @ 2:9d8c40cba617 [up] $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'branches?style=paper' | egrep $REVLINKS $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'tags?style=paper' | egrep $REVLINKS $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'bookmarks?style=paper' | egrep $REVLINKS $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog?style=paper&rev=all()' | egrep $REVLINKS third second first $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'rev/xyzzy?style=paper' | egrep $REVLINKS
                      • log
                      • graph
                      • raw
                      • browse
                      • zip changeset 1:a7c1559b7bba 43c799df6e75 9d8c40cba617 foo $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog/xyzzy?style=paper' | egrep $REVLINKS
                      • graph
                      • changeset
                      • browse
                      • zip less more | rev 1: (0) tip second first less more | rev 1: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph/xyzzy?style=paper' | egrep $REVLINKS
                      • log
                      • changeset
                      • browse
                      • less more | rev 1: (0) tip less more | rev 1: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file/xyzzy?style=paper' | egrep $REVLINKS
                      • log
                      • graph
                      • changeset
                      • zip directory / @ 1:a7c1559b7bba [up] $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file/xyzzy/foo?style=paper' | egrep $REVLINKS
                      • log
                      • graph
                      • changeset
                      • browse
                      • latest
                      • diff
                      • comparison
                      • annotate
                      • file log
                      • raw
                      • view foo @ 1:a7c1559b7bba 43c799df6e75 9d8c40cba617 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log/xyzzy/foo?style=paper' | egrep $REVLINKS href="/atom-log/tip/foo" title="Atom feed for test:foo" /> href="/rss-log/tip/foo" title="RSS feed for test:foo" />
                      • log
                      • graph
                      • changeset
                      • browse
                      • file
                      • diff
                      • comparison
                      • annotate
                      • raw
                      • log foo @ 1:a7c1559b7bba less more | (0) tip
                        second first less more | (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'annotate/xyzzy/foo?style=paper' | egrep $REVLINKS
                      • log
                      • graph
                      • changeset
                      • browse
                      • file
                      • latest
                      • diff
                      • comparison
                      • file log
                      • raw
                      • annotate foo @ 1:a7c1559b7bba 43c799df6e75 9d8c40cba617 log
                      • graph
                      • changeset
                      • browse
                      • file
                      • latest
                      • comparison
                      • annotate
                      • file log
                      • raw
                      • diff foo @ 1:a7c1559b7bba 43c799df6e75 9d8c40cba617 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'comparison/xyzzy/foo?style=paper' | egrep $REVLINKS
                      • log
                      • graph
                      • changeset
                      • browse
                      • file
                      • latest
                      • diff
                      • annotate
                      • file log
                      • raw
                      • comparison foo @ 1:a7c1559b7bba 43c799df6e75 9d8c40cba617 (De)referencing symbolic revisions (coal) $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog?style=coal' | egrep $REVLINKS
                      • graph
                      • changeset
                      • browse
                      • zip less more | rev 2: (0) tip third second first less more | rev 2: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph?style=coal' | egrep $REVLINKS
                      • log
                      • changeset
                      • browse
                      • less more | rev 2: (0) tip less more | rev 2: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file?style=coal' | egrep $REVLINKS
                      • log
                      • graph
                      • changeset
                      • zip directory / @ 2:9d8c40cba617 [up] $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'branches?style=coal' | egrep $REVLINKS $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'tags?style=coal' | egrep $REVLINKS $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'bookmarks?style=coal' | egrep $REVLINKS $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog?style=coal&rev=all()' | egrep $REVLINKS third second first $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'rev/xyzzy?style=coal' | egrep $REVLINKS
                      • log
                      • graph
                      • raw
                      • browse
                      • zip changeset 1:a7c1559b7bba 43c799df6e75 9d8c40cba617 foo $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog/xyzzy?style=coal' | egrep $REVLINKS
                      • graph
                      • changeset
                      • browse
                      • zip less more | rev 1: (0) tip second first less more | rev 1: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph/xyzzy?style=coal' | egrep $REVLINKS
                      • log
                      • changeset
                      • browse
                      • less more | rev 1: (0) tip less more | rev 1: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file/xyzzy?style=coal' | egrep $REVLINKS
                      • log
                      • graph
                      • changeset
                      • zip directory / @ 1:a7c1559b7bba [up] $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file/xyzzy/foo?style=coal' | egrep $REVLINKS
                      • log
                      • graph
                      • changeset
                      • browse
                      • latest
                      • diff
                      • comparison
                      • annotate
                      • file log
                      • raw
                      • view foo @ 1:a7c1559b7bba 43c799df6e75 9d8c40cba617 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log/xyzzy/foo?style=coal' | egrep $REVLINKS href="/atom-log/tip/foo" title="Atom feed for test:foo" /> href="/rss-log/tip/foo" title="RSS feed for test:foo" />
                      • log
                      • graph
                      • changeset
                      • browse
                      • file
                      • diff
                      • comparison
                      • annotate
                      • raw
                      • log foo @ 1:a7c1559b7bba less more | (0) tip
                        second first less more | (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'annotate/xyzzy/foo?style=coal' | egrep $REVLINKS
                      • log
                      • graph
                      • changeset
                      • browse
                      • file
                      • latest
                      • diff
                      • comparison
                      • file log
                      • raw
                      • annotate foo @ 1:a7c1559b7bba 43c799df6e75 9d8c40cba617 log
                      • graph
                      • changeset
                      • browse
                      • file
                      • latest
                      • comparison
                      • annotate
                      • file log
                      • raw
                      • diff foo @ 1:a7c1559b7bba 43c799df6e75 9d8c40cba617 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'comparison/xyzzy/foo?style=coal' | egrep $REVLINKS
                      • log
                      • graph
                      • changeset
                      • browse
                      • file
                      • latest
                      • diff
                      • annotate
                      • file log
                      • raw
                      • comparison foo @ 1:a7c1559b7bba 43c799df6e75 9d8c40cba617 (De)referencing symbolic revisions (gitweb) $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'summary?style=gitweb' | egrep $REVLINKS files | zip | changeset | files changeset | files changeset | files xyzzy changeset | changelog | files default changeset | changelog | files $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog?style=gitweb' | egrep $REVLINKS changelog | graph | files | zip |
                        (0) tip
                        changeset | files changeset | files changeset | files (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log?style=gitweb' | egrep $REVLINKS shortlog | graph | files | zip | (0) tip
                        Thu, 01 Jan 1970 00:00:00 +0000third default tip changeset
                        Thu, 01 Jan 1970 00:00:00 +0000second xyzzy changeset
                        Thu, 01 Jan 1970 00:00:00 +0000first changeset
                        (0) tip
                        $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph?style=gitweb' | egrep $REVLINKS shortlog | changelog | files | less more | (0) tip
                        less more | (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'tags?style=gitweb' | egrep $REVLINKS tip changeset | changelog | files $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'bookmarks?style=gitweb' | egrep $REVLINKS xyzzy changeset | changelog | files $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'branches?style=gitweb' | egrep $REVLINKS default changeset | changelog | files $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file?style=gitweb' | egrep $REVLINKS changeset | zip | [up] dir files foo file | revisions | annotate $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog?style=gitweb&rev=all()' | egrep $REVLINKS files | zip Thu, 01 Jan 1970 00:00:00 +0000third default tip changeset
                        Thu, 01 Jan 1970 00:00:00 +0000second xyzzy changeset
                        Thu, 01 Jan 1970 00:00:00 +0000first changeset
                        $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'rev/xyzzy?style=gitweb' | egrep $REVLINKS shortlog | changelog | graph | files | raw | zip | second xyzzy a7c1559b7bba 43c799df6e75 9d8c40cba617 foo file | annotate | diff | comparison | revisions $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog/xyzzy?style=gitweb' | egrep $REVLINKS changelog | graph | files | zip |
                        (0) tip
                        changeset | files changeset | files (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log/xyzzy?style=gitweb' | egrep $REVLINKS shortlog | graph | files | zip | (0) tip
                        Thu, 01 Jan 1970 00:00:00 +0000second xyzzy changeset
                        Thu, 01 Jan 1970 00:00:00 +0000first changeset
                        (0) tip
                        $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph/xyzzy?style=gitweb' | egrep $REVLINKS shortlog | changelog | files | less more | (0) tip
                        less more | (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file/xyzzy?style=gitweb' | egrep $REVLINKS changeset | zip | [up] dir files foo file | revisions | annotate $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file/xyzzy/foo?style=gitweb' | egrep $REVLINKS files | changeset | latest | revisions | annotate | diff | comparison | raw | a7c1559b7bba 9d8c40cba617 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log/xyzzy/foo?style=gitweb' | egrep $REVLINKS file | annotate | diff | comparison | rss | (0) tip file | diff | annotate file | diff | annotate (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'annotate/xyzzy/foo?style=gitweb' | egrep $REVLINKS files | changeset | file | latest | revisions | diff | comparison | raw | a7c1559b7bba 9d8c40cba617 files | changeset | file | latest | revisions | annotate | comparison | raw | a7c1559b7bba 9d8c40cba617 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'comparison/xyzzy/foo?style=gitweb' | egrep $REVLINKS files | changeset | file | latest | revisions | annotate | diff | raw | a7c1559b7bba 9d8c40cba617 (De)referencing symbolic revisions (monoblue) $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'summary?style=monoblue' | egrep $REVLINKS
                      • zip
                      • changeset | files changeset | files changeset | files xyzzy changeset | changelog | files default changeset | changelog | files $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog?style=monoblue' | egrep $REVLINKS
                      • graph
                      • files
                      • zip
                      • changeset | files changeset | files changeset | files (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log?style=monoblue' | egrep $REVLINKS
                      • graph
                      • files
                      • zip
                      • third default tip

                        second xyzzy

                        first

                        (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph?style=monoblue' | egrep $REVLINKS
                      • files
                      • less more | (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'tags?style=monoblue' | egrep $REVLINKS tip changeset | changelog | files $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'bookmarks?style=monoblue' | egrep $REVLINKS xyzzy changeset | changelog | files $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'branches?style=monoblue' | egrep $REVLINKS default changeset | changelog | files $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file?style=monoblue' | egrep $REVLINKS
                      • graph
                      • changeset
                      • zip
                      • [up] dir files foo file | revisions | annotate $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog?style=monoblue&rev=all()' | egrep $REVLINKS
                      • zip
                      • third default tip

                        second xyzzy

                        first

                        $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'rev/xyzzy?style=monoblue' | egrep $REVLINKS
                      • graph
                      • files
                      • raw
                      • zip
                      • second xyzzy

                        a7c1559b7bba
                        43c799df6e75
                        9d8c40cba617
                        foo file | annotate | diff | comparison | revisions $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog/xyzzy?style=monoblue' | egrep $REVLINKS
                      • graph
                      • files
                      • zip
                      • changeset | files changeset | files (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log/xyzzy?style=monoblue' | egrep $REVLINKS
                      • graph
                      • files
                      • zip
                      • second xyzzy

                        first

                        (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph/xyzzy?style=monoblue' | egrep $REVLINKS
                      • files
                      • less more | (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file/xyzzy?style=monoblue' | egrep $REVLINKS
                      • graph
                      • changeset
                      • zip
                      • [up] dir files foo file | revisions | annotate $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file/xyzzy/foo?style=monoblue' | egrep $REVLINKS
                      • graph
                      • files
                      • latest
                      • revisions
                      • annotate
                      • diff
                      • comparison
                      • raw
                      • a7c1559b7bba
                        9d8c40cba617 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log/xyzzy/foo?style=monoblue' | egrep $REVLINKS
                      • graph
                      • files
                      • file
                      • annotate
                      • diff
                      • comparison
                      • rss
                      • file | diff | annotate file | diff | annotate (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'annotate/xyzzy/foo?style=monoblue' | egrep $REVLINKS
                      • graph
                      • files
                      • file
                      • latest
                      • revisions
                      • diff
                      • comparison
                      • raw
                      • a7c1559b7bba
                        9d8c40cba617 graph
                      • files
                      • file
                      • latest
                      • revisions
                      • annotate
                      • comparison
                      • raw
                      • a7c1559b7bba
                        43c799df6e75
                        9d8c40cba617
                        $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'comparison/xyzzy/foo?style=monoblue' | egrep $REVLINKS
                      • graph
                      • files
                      • file
                      • latest
                      • revisions
                      • annotate
                      • diff
                      • raw
                      • a7c1559b7bba
                        43c799df6e75
                        9d8c40cba617
                        (De)referencing symbolic revisions (spartan) $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog?style=spartan' | egrep $REVLINKS changelog graph files zip navigate: (0) tip third second first navigate: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log?style=spartan' | egrep $REVLINKS shortlog graph files zip navigate: (0) tip 9d8c40cba617 files: foo a7c1559b7bba files: foo 43c799df6e75 files: dir/bar foo navigate: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph?style=spartan' | egrep $REVLINKS changelog shortlog files navigate: (0) tip navigate: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'tags?style=spartan' | egrep $REVLINKS tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'branches?style=spartan' | egrep $REVLINKS default $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file?style=spartan' | egrep $REVLINKS changelog shortlog graph changeset zip

                        Mercurial / files for changeset 9d8c40cba617: /

                        [up] dir/ foo $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog?style=spartan&rev=all()' | egrep $REVLINKS zip 9d8c40cba617 a7c1559b7bba files: foo a7c1559b7bba 43c799df6e75 9d8c40cba617 files: foo 43c799df6e75 a7c1559b7bba files: dir/bar foo $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'rev/xyzzy?style=spartan' | egrep $REVLINKS changelog shortlog graph files raw zip a7c1559b7bba 43c799df6e75 9d8c40cba617 foo $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'shortlog/xyzzy?style=spartan' | egrep $REVLINKS changelog graph files zip navigate: (0) tip second first navigate: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log/xyzzy?style=spartan' | egrep $REVLINKS shortlog graph files zip navigate: (0) tip a7c1559b7bba files: foo 43c799df6e75 files: dir/bar foo navigate: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'graph/xyzzy?style=spartan' | egrep $REVLINKS changelog shortlog files navigate: (0) tip navigate: (0) tip $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file/xyzzy?style=spartan' | egrep $REVLINKS changelog shortlog graph changeset zip

                        Mercurial / files for changeset a7c1559b7bba: /

                        [up] dir/ foo $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'file/xyzzy/foo?style=spartan' | egrep $REVLINKS changelog shortlog graph changeset files revisions annotate raw a7c1559b7bba 9d8c40cba617 $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'log/xyzzy/foo?style=spartan' | egrep $REVLINKS href="/atom-log/tip/foo" title="Atom feed for test:foo"> href="/rss-log/tip/foo" title="RSS feed for test:foo"> file annotate rss atom

                        navigate: (0) tip

                        second a7c1559b7bba (diff) (annotate) first 43c799df6e75 (diff) (annotate) $ "$TESTDIR/get-with-headers.py" 127.0.0.1:$HGPORT 'annotate/xyzzy/foo?style=spartan' | egrep $REVLINKS changelog shortlog graph changeset files file revisions raw a7c1559b7bba 9d8c40cba617 changelog shortlog graph changeset file revisions annotate raw a7c1559b7bba 43c799df6e75 9d8c40cba617 Done $ cat errors.log $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS $ cd .. mercurial-3.7.3/tests/test-merge2.t0000644000175000017500000000215412676531525016616 0ustar mpmmpm00000000000000 $ hg init t $ cd t $ echo This is file a1 > a $ hg add a $ hg commit -m "commit #0" $ echo This is file b1 > b $ hg add b $ hg commit -m "commit #1" $ rm b $ hg update 0 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo This is file b2 > b $ hg add b $ hg commit -m "commit #2" created new head $ cd ..; rm -r t $ mkdir t $ cd t $ hg init $ echo This is file a1 > a $ hg add a $ hg commit -m "commit #0" $ echo This is file b1 > b $ hg add b $ hg commit -m "commit #1" $ rm b $ hg update 0 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo This is file b2 > b $ hg commit -A -m "commit #2" adding b created new head $ cd ..; rm -r t $ hg init t $ cd t $ echo This is file a1 > a $ hg add a $ hg commit -m "commit #0" $ echo This is file b1 > b $ hg add b $ hg commit -m "commit #1" $ rm b $ hg remove b $ hg update 0 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo This is file b2 > b $ hg commit -A -m "commit #2" adding b created new head $ cd .. mercurial-3.7.3/tests/test-basic.t0000644000175000017500000000256312676531525016522 0ustar mpmmpm00000000000000Create a repository: $ hg config defaults.backout=-d "0 0" defaults.commit=-d "0 0" defaults.shelve=--date "0 0" defaults.tag=-d "0 0" devel.all-warnings=true largefiles.usercache=$TESTTMP/.cache/largefiles (glob) ui.slash=True ui.interactive=False ui.mergemarkers=detailed ui.promptecho=True $ hg init t $ cd t Make a changeset: $ echo a > a $ hg add a $ hg commit -m test This command is ancient: $ hg history changeset: 0:acb14030fe0a tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: test Verify that updating to revision 0 via commands.update() works properly $ cat < update_to_rev0.py > from mercurial import ui, hg, commands > myui = ui.ui() > repo = hg.repository(myui, path='.') > commands.update(myui, repo, rev=0) > EOF $ hg up null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ python ./update_to_rev0.py 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg identify -n 0 Poke around at hashes: $ hg manifest --debug b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a $ hg cat a a Verify should succeed: $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions At the end... $ cd .. mercurial-3.7.3/tests/test-convert-cvs-detectmerge.t0000644000175000017500000001231512676531525022174 0ustar mpmmpm00000000000000#require cvs Test config convert.cvsps.mergefrom config setting. (Should test similar mergeto feature, but I don't understand it yet.) Requires builtin cvsps. $ CVSROOT=`pwd`/cvsrepo $ export CVSROOT $ cvscall() > { > cvs -f "$@" > } output of 'cvs ci' varies unpredictably, so just discard it XXX copied from test-convert-cvs-synthetic $ cvsci() > { > sleep 1 > cvs -f ci "$@" > /dev/null > } XXX copied from test-convert-cvs-synthetic $ cat <> $HGRCPATH > [extensions] > convert = > [convert] > cvsps.cache = 0 > cvsps.mergefrom = \[MERGE from (\S+)\] > EOF create cvs repository with one project $ cvscall -q -d "$CVSROOT" init $ mkdir cvsrepo/proj populate cvs repository $ cvscall -Q co proj $ cd proj $ touch file1 $ cvscall -Q add file1 $ cvsci -m"add file1 on trunk" cvs commit: Examining . create two release branches $ cvscall -q tag -b v1_0 T file1 $ cvscall -q tag -b v1_1 T file1 modify file1 on branch v1_0 $ cvscall -Q update -rv1_0 $ sleep 1 $ echo "change" >> file1 $ cvsci -m"add text" cvs commit: Examining . make unrelated change on v1_1 $ cvscall -Q update -rv1_1 $ touch unrelated $ cvscall -Q add unrelated $ cvsci -m"unrelated change" cvs commit: Examining . merge file1 to v1_1 $ cvscall -Q update -jv1_0 RCS file: $TESTTMP/cvsrepo/proj/file1,v retrieving revision 1.1 retrieving revision 1.1.2.1 Merging differences between 1.1 and 1.1.2.1 into file1 $ cvsci -m"add text [MERGE from v1_0]" cvs commit: Examining . merge change to trunk $ cvscall -Q update -A $ cvscall -Q update -jv1_1 RCS file: $TESTTMP/cvsrepo/proj/file1,v retrieving revision 1.1 retrieving revision 1.1.4.1 Merging differences between 1.1 and 1.1.4.1 into file1 $ cvsci -m"add text [MERGE from v1_1]" cvs commit: Examining . non-merged change on trunk $ echo "foo" > file2 $ cvscall -Q add file2 $ cvsci -m"add file2 on trunk" file2 this will create rev 1.3 change on trunk to backport $ echo "backport me" >> file1 $ cvsci -m"add other text" file1 $ cvscall log file1 RCS file: $TESTTMP/cvsrepo/proj/file1,v Working file: file1 head: 1.3 branch: locks: strict access list: symbolic names: v1_1: 1.1.0.4 v1_0: 1.1.0.2 keyword substitution: kv total revisions: 5; selected revisions: 5 description: ---------------------------- revision 1.3 date: * (glob) add other text ---------------------------- revision 1.2 date: * (glob) add text [MERGE from v1_1] ---------------------------- revision 1.1 date: * (glob) branches: 1.1.2; 1.1.4; add file1 on trunk ---------------------------- revision 1.1.4.1 date: * (glob) add text [MERGE from v1_0] ---------------------------- revision 1.1.2.1 date: * (glob) add text ============================================================================= XXX how many ways are there to spell "trunk" with CVS? backport trunk change to v1_1 $ cvscall -Q update -rv1_1 $ cvscall -Q update -j1.2 -j1.3 file1 RCS file: $TESTTMP/cvsrepo/proj/file1,v retrieving revision 1.2 retrieving revision 1.3 Merging differences between 1.2 and 1.3 into file1 $ cvsci -m"add other text [MERGE from HEAD]" file1 fix bug on v1_1, merge to trunk with error $ cvscall -Q update -rv1_1 $ echo "merge forward" >> file1 $ cvscall -Q tag unmerged $ cvsci -m"fix file1" cvs commit: Examining . $ cvscall -Q update -A $ cvscall -Q update -junmerged -jv1_1 RCS file: $TESTTMP/cvsrepo/proj/file1,v retrieving revision 1.1.4.2 retrieving revision 1.1.4.3 Merging differences between 1.1.4.2 and 1.1.4.3 into file1 note the typo in the commit log message $ cvsci -m"fix file1 [MERGE from v1-1]" cvs commit: Examining . $ cvs -Q tag -d unmerged convert to hg $ cd .. $ hg convert proj proj.hg initializing destination proj.hg repository connecting to $TESTTMP/cvsrepo scanning source... collecting CVS rlog 12 log entries creating changesets warning: CVS commit message references non-existent branch 'v1-1': fix file1 [MERGE from v1-1] 10 changeset entries sorting... converting... 9 add file1 on trunk 8 unrelated change 7 add text 6 add text [MERGE from v1_0] 5 add text [MERGE from v1_1] 4 add file2 on trunk 3 add other text 2 add other text [MERGE from HEAD] 1 fix file1 0 fix file1 [MERGE from v1-1] complete log $ template="{rev}: '{branches}' {desc}\n" $ hg -R proj.hg log --template="$template" 9: '' fix file1 [MERGE from v1-1] 8: 'v1_1' fix file1 7: 'v1_1' add other text [MERGE from HEAD] 6: '' add other text 5: '' add file2 on trunk 4: '' add text [MERGE from v1_1] 3: 'v1_1' add text [MERGE from v1_0] 2: 'v1_0' add text 1: 'v1_1' unrelated change 0: '' add file1 on trunk graphical log $ hg -R proj.hg log -G --template="$template" o 9: '' fix file1 [MERGE from v1-1] | | o 8: 'v1_1' fix file1 | | | o 7: 'v1_1' add other text [MERGE from HEAD] |/| o | 6: '' add other text | | o | 5: '' add file2 on trunk | | o | 4: '' add text [MERGE from v1_1] |\| | o 3: 'v1_1' add text [MERGE from v1_0] | |\ +---o 2: 'v1_0' add text | | | o 1: 'v1_1' unrelated change |/ o 0: '' add file1 on trunk mercurial-3.7.3/tests/test-strict.t0000644000175000017500000000302112676531525016737 0ustar mpmmpm00000000000000 $ hg init $ echo a > a $ hg ci -Ama adding a $ hg an a 0: a $ hg --config ui.strict=False an a 0: a $ echo "[ui]" >> $HGRCPATH $ echo "strict=True" >> $HGRCPATH $ hg an a hg: unknown command 'an' Mercurial Distributed SCM basic commands: add add the specified files on the next commit annotate show changeset information by line for each file clone make a copy of an existing repository commit commit the specified files or all outstanding changes diff diff repository (or selected files) export dump the header and diffs for one or more changesets forget forget the specified files on the next commit init create a new repository in the given directory log show revision history of entire repository or files merge merge another revision into working directory pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state update update working directory (or switch revisions) (use "hg help" for the full list of commands or "hg -v" for details) [255] $ hg annotate a 0: a should succeed - up is an alias, not an abbreviation $ hg up 0 files updated, 0 files merged, 0 files removed, 0 files unresolved mercurial-3.7.3/tests/test-diff-binary-file.t0000644000175000017500000000565412676531525020554 0ustar mpmmpm00000000000000 $ hg init a $ cd a $ cp "$TESTDIR/binfile.bin" . $ hg add binfile.bin $ hg ci -m 'add binfile.bin' $ echo >> binfile.bin $ hg ci -m 'change binfile.bin' $ hg revert -r 0 binfile.bin $ hg ci -m 'revert binfile.bin' $ hg cp binfile.bin nonbinfile $ echo text > nonbinfile $ hg ci -m 'make non-binary copy of binary file' $ hg diff --nodates -r 0 -r 1 diff -r 48b371597640 -r acea2ab458c8 binfile.bin Binary file binfile.bin has changed $ hg diff --nodates -r 0 -r 2 $ hg diff --git -r 0 -r 1 diff --git a/binfile.bin b/binfile.bin index 37ba3d1c6f17137d9c5f5776fa040caf5fe73ff9..58dc31a9e2f40f74ff3b45903f7d620b8e5b7356 GIT binary patch literal 594 zc$@)J0W$NUkd zX$nnYLt$-$V!?uy+1V%`z&Eh=ah|duER<4|QWhju3gb^nF*8iYobxWG-qqXl=2~5M z*IoDB)sG^CfNuoBmqLTVU^<;@nwHP!1wrWd`{(mHo6VNXWtyh{alzqmsH*yYzpvLT zLdYlXB*ODN003Z&P17_@)3Pi=i0wb04W$NUkd zX$nnYLt$-$V!?uy+1V%`z&Eh=ah|duER<4|QWhju3gb^nF*8iYobxWG-qqXl=2~5M z*IoDB)sG^CfNuoBmqLTVU^<;@nwHP!1wrWd`{(mHo6VNXWtyh{alzqmsH*yYzpvLT zLdYlXB*ODN003Z&P17_@)3Pi=i0wb04 b $ hg ci -Am "b" adding b $ echo a > a $ hg ci -Am "first a" adding a $ hg tag -r 1 a-tag $ hg bookmark -r 1 a-bookmark $ hg rm a $ hg ci -m "del a" $ hg branch a-branch marked working directory as branch a-branch (branches are permanent and global, did you want a bookmark?) $ echo b > a $ hg ci -Am "second a" adding a $ hg rm a $ hg ci -m "del2 a" $ hg mv b c $ hg ci -m "mv b" $ echo c >> c $ hg ci -m "change c" $ hg log -p changeset: 7:46c1a66bd8fc branch: a-branch tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change c diff -r c9637d3cc8ef -r 46c1a66bd8fc c --- a/c Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,2 @@ b +c changeset: 6:c9637d3cc8ef branch: a-branch user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: mv b diff -r 958bd88be4eb -r c9637d3cc8ef b --- a/b Thu Jan 01 00:00:00 1970 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -b diff -r 958bd88be4eb -r c9637d3cc8ef c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +b changeset: 5:958bd88be4eb branch: a-branch user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: del2 a diff -r 3f41bc784e7e -r 958bd88be4eb a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -b changeset: 4:3f41bc784e7e branch: a-branch user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: second a diff -r 292258f86fdf -r 3f41bc784e7e a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +b changeset: 3:292258f86fdf user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: del a diff -r 94c9dd5ca9b4 -r 292258f86fdf a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -a changeset: 2:94c9dd5ca9b4 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag a-tag for changeset 5ed941583260 diff -r 5ed941583260 -r 94c9dd5ca9b4 .hgtags --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +5ed941583260248620985524192fdc382ef57c36 a-tag changeset: 1:5ed941583260 bookmark: a-bookmark tag: a-tag user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: first a diff -r 6563da9dcf87 -r 5ed941583260 a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +a changeset: 0:6563da9dcf87 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b diff -r 000000000000 -r 6563da9dcf87 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +b $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log $ cat hg.pid >> $DAEMON_PIDS tip - two revisions $ (get-with-headers.py localhost:$HGPORT 'log/tip/a') 200 Script output follows test: a history

                        log a @ 4:3f41bc784e7e a-branch

                        age author description
                        Thu, 01 Jan 1970 00:00:00 +0000 test second a a-branch
                        Thu, 01 Jan 1970 00:00:00 +0000 test first a a-tag a-bookmark
                        second version - two revisions $ (get-with-headers.py localhost:$HGPORT 'log/4/a') 200 Script output follows test: a history

                        log a @ 4:3f41bc784e7e a-branch

                        age author description
                        Thu, 01 Jan 1970 00:00:00 +0000 test second a a-branch
                        Thu, 01 Jan 1970 00:00:00 +0000 test first a a-tag a-bookmark
                        first deleted - one revision $ (get-with-headers.py localhost:$HGPORT 'log/3/a') 200 Script output follows test: a history

                        log a @ 1:5ed941583260 a-tag a-bookmark

                        age author description
                        Thu, 01 Jan 1970 00:00:00 +0000 test first a a-tag a-bookmark
                        first version - one revision $ (get-with-headers.py localhost:$HGPORT 'log/1/a') 200 Script output follows test: a history

                        log a @ 1:5ed941583260 a-tag a-bookmark

                        age author description
                        Thu, 01 Jan 1970 00:00:00 +0000 test first a a-tag a-bookmark
                        before addition - error $ (get-with-headers.py localhost:$HGPORT 'log/0/a') 404 Not Found test: error

                        error

                        An error occurred while processing your request:

                        a@6563da9dcf87: not found in manifest

                        [1] should show base link, use spartan because it shows it $ (get-with-headers.py localhost:$HGPORT 'log/tip/c?style=spartan') 200 Script output follows test: c history

                        Mercurial / c revision history

                        navigate: (0) tip

                        Thu, 01 Jan 1970 00:00:00 +0000: change c
                        revision 1: 46c1a66bd8fc (diff) (annotate)
                        author: test
                        date: Thu, 01 Jan 1970 00:00:00 +0000
                        Thu, 01 Jan 1970 00:00:00 +0000: mv b
                        revision 0: c9637d3cc8ef (diff) (annotate)
                        base: b@1e88685f5dde
                        author: test
                        date: Thu, 01 Jan 1970 00:00:00 +0000
                        rss log $ (get-with-headers.py localhost:$HGPORT 'rss-log/tip/a') 200 Script output follows http://*:$HGPORT/ (glob) en-us test: a history a revision history second a http://*:$HGPORT/log3f41bc784e7e/a (glob) test Thu, 01 Jan 1970 00:00:00 +0000 first a http://*:$HGPORT/log5ed941583260/a (glob) test Thu, 01 Jan 1970 00:00:00 +0000 atom log $ (get-with-headers.py localhost:$HGPORT 'atom-log/tip/a') 200 Script output follows http://*:$HGPORT/atom-log/tip/a (glob) (glob) test: a history 1970-01-01T00:00:00+00:00 [a-branch] second a http://*:$HGPORT/#changeset-3f41bc784e7e73035c6d47112c6cc7efb673adf8 (glob) (glob) test test 1970-01-01T00:00:00+00:00 1970-01-01T00:00:00+00:00
                        changeset 3f41bc784e7e
                        branch a-branch
                        bookmark
                        tag
                        user test
                        description second a
                        files
                        first a http://*:$HGPORT/#changeset-5ed941583260248620985524192fdc382ef57c36 (glob) (glob) test test 1970-01-01T00:00:00+00:00 1970-01-01T00:00:00+00:00
                        changeset 5ed941583260
                        branch
                        bookmark a-bookmark
                        tag a-tag
                        user test
                        description first a
                        files
                        errors $ cat errors.log $ cd .. mercurial-3.7.3/tests/test-bundle2-exchange.t0000644000175000017500000012007012676531525020546 0ustar mpmmpm00000000000000Test exchange of common information using bundle2 $ getmainid() { > hg -R main log --template '{node}\n' --rev "$1" > } enable obsolescence $ cp $HGRCPATH $TESTTMP/hgrc.orig $ cat > $TESTTMP/bundle2-pushkey-hook.sh << EOF > echo pushkey: lock state after \"\$HG_NAMESPACE\" > hg debuglock > EOF $ cat >> $HGRCPATH << EOF > [experimental] > evolution=createmarkers,exchange > bundle2-exp=True > bundle2-output-capture=True > [ui] > ssh=python "$TESTDIR/dummyssh" > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline} > [web] > push_ssl = false > allow_push = * > [phases] > publish=False > [hooks] > pretxnclose.tip = hg log -r tip -T "pre-close-tip:{node|short} {phase} {bookmarks}\n" > txnclose.tip = hg log -r tip -T "postclose-tip:{node|short} {phase} {bookmarks}\n" > txnclose.env = sh -c "HG_LOCAL= printenv.py txnclose" > pushkey= sh "$TESTTMP/bundle2-pushkey-hook.sh" > EOF The extension requires a repo (currently unused) $ hg init main $ cd main $ touch a $ hg add a $ hg commit -m 'a' pre-close-tip:3903775176ed draft postclose-tip:3903775176ed draft txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob) $ hg unbundle $TESTDIR/bundles/rebase.hg adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+3 heads) pre-close-tip:02de42196ebe draft postclose-tip:02de42196ebe draft txnclose hook: HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=unbundle HG_TXNID=TXN:* HG_TXNNAME=unbundle (glob) bundle:*/tests/bundles/rebase.hg HG_URL=bundle:*/tests/bundles/rebase.hg (glob) (run 'hg heads' to see heads, 'hg merge' to merge) $ cd .. Real world exchange ===================== Add more obsolescence information $ hg -R main debugobsolete -d '0 0' 1111111111111111111111111111111111111111 `getmainid 9520eea781bc` pre-close-tip:02de42196ebe draft postclose-tip:02de42196ebe draft txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob) $ hg -R main debugobsolete -d '0 0' 2222222222222222222222222222222222222222 `getmainid 24b6387c8c8c` pre-close-tip:02de42196ebe draft postclose-tip:02de42196ebe draft txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob) clone --pull $ hg -R main phase --public cd010b8cd998 pre-close-tip:02de42196ebe draft postclose-tip:02de42196ebe draft txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob) $ hg clone main other --pull --rev 9520eea781bc adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files 1 new obsolescence markers pre-close-tip:9520eea781bc draft postclose-tip:9520eea781bc draft txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=cd010b8cd998f3981a5a8115f94f8da4ab506089 HG_NODE_LAST=9520eea781bcca16c1e15acc0ba14335a0e8e5ba HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob) file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob) updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R other log -G @ 1:9520eea781bc draft Nicolas Dumazet E | o 0:cd010b8cd998 public Nicolas Dumazet A $ hg -R other debugobsolete 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} pull $ hg -R main phase --public 9520eea781bc pre-close-tip:02de42196ebe draft postclose-tip:02de42196ebe draft txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob) $ hg -R other pull -r 24b6387c8c8c pulling from $TESTTMP/main (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) 1 new obsolescence markers pre-close-tip:24b6387c8c8c draft postclose-tip:24b6387c8c8c draft txnclose hook: HG_NEW_OBSMARKERS=1 HG_NODE=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_NODE_LAST=24b6387c8c8cae37178880f3fa95ded3cb1cf785 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob) file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg -R other log -G o 2:24b6387c8c8c draft Nicolas Dumazet F | | @ 1:9520eea781bc draft Nicolas Dumazet E |/ o 0:cd010b8cd998 public Nicolas Dumazet A $ hg -R other debugobsolete 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} pull empty (with phase movement) $ hg -R main phase --public 24b6387c8c8c pre-close-tip:02de42196ebe draft postclose-tip:02de42196ebe draft txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob) $ hg -R other pull -r 24b6387c8c8c pulling from $TESTTMP/main (glob) no changes found pre-close-tip:24b6387c8c8c public postclose-tip:24b6387c8c8c public txnclose hook: HG_NEW_OBSMARKERS=0 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob) file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob) $ hg -R other log -G o 2:24b6387c8c8c public Nicolas Dumazet F | | @ 1:9520eea781bc draft Nicolas Dumazet E |/ o 0:cd010b8cd998 public Nicolas Dumazet A $ hg -R other debugobsolete 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} pull empty $ hg -R other pull -r 24b6387c8c8c pulling from $TESTTMP/main (glob) no changes found pre-close-tip:24b6387c8c8c public postclose-tip:24b6387c8c8c public txnclose hook: HG_NEW_OBSMARKERS=0 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob) file:/*/$TESTTMP/main HG_URL=file:$TESTTMP/main (glob) $ hg -R other log -G o 2:24b6387c8c8c public Nicolas Dumazet F | | @ 1:9520eea781bc draft Nicolas Dumazet E |/ o 0:cd010b8cd998 public Nicolas Dumazet A $ hg -R other debugobsolete 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} add extra data to test their exchange during push $ hg -R main bookmark --rev eea13746799a book_eea1 pre-close-tip:02de42196ebe draft postclose-tip:02de42196ebe draft txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) $ hg -R main debugobsolete -d '0 0' 3333333333333333333333333333333333333333 `getmainid eea13746799a` pre-close-tip:02de42196ebe draft postclose-tip:02de42196ebe draft txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob) $ hg -R main bookmark --rev 02de42196ebe book_02de pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) $ hg -R main debugobsolete -d '0 0' 4444444444444444444444444444444444444444 `getmainid 02de42196ebe` pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob) $ hg -R main bookmark --rev 42ccdea3bb16 book_42cc pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) $ hg -R main debugobsolete -d '0 0' 5555555555555555555555555555555555555555 `getmainid 42ccdea3bb16` pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob) $ hg -R main bookmark --rev 5fddd98957c8 book_5fdd pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) $ hg -R main debugobsolete -d '0 0' 6666666666666666666666666666666666666666 `getmainid 5fddd98957c8` pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob) $ hg -R main bookmark --rev 32af7686d403 book_32af pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) $ hg -R main debugobsolete -d '0 0' 7777777777777777777777777777777777777777 `getmainid 32af7686d403` pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_NEW_OBSMARKERS=1 HG_TXNID=TXN:* HG_TXNNAME=debugobsolete (glob) $ hg -R other bookmark --rev cd010b8cd998 book_eea1 pre-close-tip:24b6387c8c8c public postclose-tip:24b6387c8c8c public txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) $ hg -R other bookmark --rev cd010b8cd998 book_02de pre-close-tip:24b6387c8c8c public postclose-tip:24b6387c8c8c public txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) $ hg -R other bookmark --rev cd010b8cd998 book_42cc pre-close-tip:24b6387c8c8c public postclose-tip:24b6387c8c8c public txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) $ hg -R other bookmark --rev cd010b8cd998 book_5fdd pre-close-tip:24b6387c8c8c public postclose-tip:24b6387c8c8c public txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) $ hg -R other bookmark --rev cd010b8cd998 book_32af pre-close-tip:24b6387c8c8c public postclose-tip:24b6387c8c8c public txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) $ hg -R main phase --public eea13746799a pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob) push $ hg -R main push other --rev eea13746799a --bookmark book_eea1 pushing to other searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 0 changes to 0 files (-1 heads) remote: 1 new obsolescence markers remote: pre-close-tip:eea13746799a public book_eea1 remote: pushkey: lock state after "phases" remote: lock: free remote: wlock: free remote: pushkey: lock state after "bookmarks" remote: lock: free remote: wlock: free remote: postclose-tip:eea13746799a public book_eea1 remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_NODE_LAST=eea13746799a9e0bfd88f29d3c2e9dc9389f524f HG_PHASES_MOVED=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob) updating bookmark book_eea1 pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob) file:/*/$TESTTMP/other HG_URL=file:$TESTTMP/other (glob) $ hg -R other log -G o 3:eea13746799a public Nicolas Dumazet book_eea1 G |\ | o 2:24b6387c8c8c public Nicolas Dumazet F | | @ | 1:9520eea781bc public Nicolas Dumazet E |/ o 0:cd010b8cd998 public Nicolas Dumazet book_02de book_32af book_42cc book_5fdd A $ hg -R other debugobsolete 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} pull over ssh $ hg -R other pull ssh://user@dummy/main -r 02de42196ebe --bookmark book_02de pulling from ssh://user@dummy/main searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) 1 new obsolescence markers updating bookmark book_02de pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_NODE_LAST=02de42196ebee42ef284b6780a87cdc96e8eaab6 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob) ssh://user@dummy/main HG_URL=ssh://user@dummy/main (run 'hg heads' to see heads, 'hg merge' to merge) $ hg -R other debugobsolete 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} pull over http $ hg -R main serve -p $HGPORT -d --pid-file=main.pid -E main-error.log $ cat main.pid >> $DAEMON_PIDS $ hg -R other pull http://localhost:$HGPORT/ -r 42ccdea3bb16 --bookmark book_42cc pulling from http://localhost:$HGPORT/ searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) 1 new obsolescence markers updating bookmark book_42cc pre-close-tip:42ccdea3bb16 draft book_42cc postclose-tip:42ccdea3bb16 draft book_42cc txnclose hook: HG_BOOKMARK_MOVED=1 HG_NEW_OBSMARKERS=1 HG_NODE=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_NODE_LAST=42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_TXNNAME=pull (glob) http://localhost:$HGPORT/ HG_URL=http://localhost:$HGPORT/ (run 'hg heads .' to see heads, 'hg merge' to merge) $ cat main-error.log $ hg -R other debugobsolete 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} push over ssh $ hg -R main push ssh://user@dummy/other -r 5fddd98957c8 --bookmark book_5fdd pushing to ssh://user@dummy/other searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: 1 new obsolescence markers remote: pre-close-tip:5fddd98957c8 draft book_5fdd remote: pushkey: lock state after "bookmarks" remote: lock: free remote: wlock: free remote: postclose-tip:5fddd98957c8 draft book_5fdd remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_NODE_LAST=5fddd98957c8a54a4d436dfe1da9d87f21a1b97b HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:ssh:127.0.0.1 (glob) updating bookmark book_5fdd pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob) ssh://user@dummy/other HG_URL=ssh://user@dummy/other $ hg -R other log -G o 6:5fddd98957c8 draft Nicolas Dumazet book_5fdd C | o 5:42ccdea3bb16 draft Nicolas Dumazet book_42cc B | | o 4:02de42196ebe draft Nicolas Dumazet book_02de H | | | | o 3:eea13746799a public Nicolas Dumazet book_eea1 G | |/| | o | 2:24b6387c8c8c public Nicolas Dumazet F |/ / | @ 1:9520eea781bc public Nicolas Dumazet E |/ o 0:cd010b8cd998 public Nicolas Dumazet book_32af A $ hg -R other debugobsolete 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} push over http $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log $ cat other.pid >> $DAEMON_PIDS $ hg -R main phase --public 32af7686d403 pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=phase (glob) $ hg -R main push http://localhost:$HGPORT2/ -r 32af7686d403 --bookmark book_32af pushing to http://localhost:$HGPORT2/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: 1 new obsolescence markers remote: pre-close-tip:32af7686d403 public book_32af remote: pushkey: lock state after "phases" remote: lock: free remote: wlock: free remote: pushkey: lock state after "bookmarks" remote: lock: free remote: wlock: free remote: postclose-tip:32af7686d403 public book_32af remote: txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_NEW_OBSMARKERS=1 HG_NODE=32af7686d403cf45b5d95f2d70cebea587ac806a HG_NODE_LAST=32af7686d403cf45b5d95f2d70cebea587ac806a HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:* HG_TXNNAME=serve HG_URL=remote:http:127.0.0.1: (glob) updating bookmark book_32af pre-close-tip:02de42196ebe draft book_02de postclose-tip:02de42196ebe draft book_02de txnclose hook: HG_SOURCE=push-response HG_TXNID=TXN:* HG_TXNNAME=push-response (glob) http://localhost:$HGPORT2/ HG_URL=http://localhost:$HGPORT2/ $ cat other-error.log Check final content. $ hg -R other log -G o 7:32af7686d403 public Nicolas Dumazet book_32af D | o 6:5fddd98957c8 public Nicolas Dumazet book_5fdd C | o 5:42ccdea3bb16 public Nicolas Dumazet book_42cc B | | o 4:02de42196ebe draft Nicolas Dumazet book_02de H | | | | o 3:eea13746799a public Nicolas Dumazet book_eea1 G | |/| | o | 2:24b6387c8c8c public Nicolas Dumazet F |/ / | @ 1:9520eea781bc public Nicolas Dumazet E |/ o 0:cd010b8cd998 public Nicolas Dumazet A $ hg -R other debugobsolete 1111111111111111111111111111111111111111 9520eea781bcca16c1e15acc0ba14335a0e8e5ba 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 2222222222222222222222222222222222222222 24b6387c8c8cae37178880f3fa95ded3cb1cf785 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 3333333333333333333333333333333333333333 eea13746799a9e0bfd88f29d3c2e9dc9389f524f 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 4444444444444444444444444444444444444444 02de42196ebee42ef284b6780a87cdc96e8eaab6 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 5555555555555555555555555555555555555555 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 6666666666666666666666666666666666666666 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} 7777777777777777777777777777777777777777 32af7686d403cf45b5d95f2d70cebea587ac806a 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} (check that no 'pending' files remain) $ ls -1 other/.hg/bookmarks* other/.hg/bookmarks $ ls -1 other/.hg/store/phaseroots* other/.hg/store/phaseroots $ ls -1 other/.hg/store/00changelog.i* other/.hg/store/00changelog.i Error Handling ============== Check that errors are properly returned to the client during push. Setting up $ cat > failpush.py << EOF > """A small extension that makes push fails when using bundle2 > > used to test error handling in bundle2 > """ > > from mercurial import error > from mercurial import bundle2 > from mercurial import exchange > from mercurial import extensions > > def _pushbundle2failpart(pushop, bundler): > reason = pushop.ui.config('failpush', 'reason', None) > part = None > if reason == 'abort': > bundler.newpart('test:abort') > if reason == 'unknown': > bundler.newpart('test:unknown') > if reason == 'race': > # 20 Bytes of crap > bundler.newpart('check:heads', data='01234567890123456789') > > @bundle2.parthandler("test:abort") > def handleabort(op, part): > raise error.Abort('Abandon ship!', hint="don't panic") > > def uisetup(ui): > exchange.b2partsgenmapping['failpart'] = _pushbundle2failpart > exchange.b2partsgenorder.insert(0, 'failpart') > > EOF $ cd main $ hg up tip 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo 'I' > I $ hg add I $ hg ci -m 'I' pre-close-tip:e7ec4e813ba6 draft postclose-tip:e7ec4e813ba6 draft txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob) $ hg id e7ec4e813ba6 tip $ cd .. $ cat << EOF >> $HGRCPATH > [extensions] > failpush=$TESTTMP/failpush.py > EOF $ killdaemons.py $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log $ cat other.pid >> $DAEMON_PIDS Doing the actual push: Abort error $ cat << EOF >> $HGRCPATH > [failpush] > reason = abort > EOF $ hg -R main push other -r e7ec4e813ba6 pushing to other searching for changes abort: Abandon ship! (don't panic) [255] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes remote: Abandon ship! abort: push failed on remote (don't panic) [255] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes remote: Abandon ship! abort: push failed on remote (don't panic) [255] Doing the actual push: unknown mandatory parts $ cat << EOF >> $HGRCPATH > [failpush] > reason = unknown > EOF $ hg -R main push other -r e7ec4e813ba6 pushing to other searching for changes abort: missing support for test:unknown [255] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes abort: missing support for test:unknown [255] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes abort: missing support for test:unknown [255] Doing the actual push: race $ cat << EOF >> $HGRCPATH > [failpush] > reason = race > EOF $ hg -R main push other -r e7ec4e813ba6 pushing to other searching for changes abort: push failed: 'repository changed while pushing - please try again' [255] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes abort: push failed: 'repository changed while pushing - please try again' [255] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes abort: push failed: 'repository changed while pushing - please try again' [255] Doing the actual push: hook abort $ cat << EOF >> $HGRCPATH > [failpush] > reason = > [hooks] > pretxnclose.failpush = sh -c "echo 'You shall not pass!'; false" > txnabort.failpush = sh -c "echo 'Cleaning up the mess...'" > EOF $ killdaemons.py $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log $ cat other.pid >> $DAEMON_PIDS $ hg -R main push other -r e7ec4e813ba6 pushing to other searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: pre-close-tip:e7ec4e813ba6 draft remote: You shall not pass! remote: transaction abort! remote: Cleaning up the mess... remote: rollback completed abort: pretxnclose.failpush hook exited with status 1 [255] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: pre-close-tip:e7ec4e813ba6 draft remote: You shall not pass! remote: transaction abort! remote: Cleaning up the mess... remote: rollback completed remote: pretxnclose.failpush hook exited with status 1 abort: push failed on remote [255] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: pre-close-tip:e7ec4e813ba6 draft remote: You shall not pass! remote: transaction abort! remote: Cleaning up the mess... remote: rollback completed remote: pretxnclose.failpush hook exited with status 1 abort: push failed on remote [255] (check that no 'pending' files remain) $ ls -1 other/.hg/bookmarks* other/.hg/bookmarks $ ls -1 other/.hg/store/phaseroots* other/.hg/store/phaseroots $ ls -1 other/.hg/store/00changelog.i* other/.hg/store/00changelog.i Check error from hook during the unbundling process itself $ cat << EOF >> $HGRCPATH > pretxnchangegroup = sh -c "echo 'Fail early!'; false" > EOF $ killdaemons.py # reload http config $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log $ cat other.pid >> $DAEMON_PIDS $ hg -R main push other -r e7ec4e813ba6 pushing to other searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: Fail early! remote: transaction abort! remote: Cleaning up the mess... remote: rollback completed abort: pretxnchangegroup hook exited with status 1 [255] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: Fail early! remote: transaction abort! remote: Cleaning up the mess... remote: rollback completed remote: pretxnchangegroup hook exited with status 1 abort: push failed on remote [255] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: Fail early! remote: transaction abort! remote: Cleaning up the mess... remote: rollback completed remote: pretxnchangegroup hook exited with status 1 abort: push failed on remote [255] Check output capture control. (should be still forced for http, disabled for local and ssh) $ cat >> $HGRCPATH << EOF > [experimental] > bundle2-output-capture=False > EOF $ hg -R main push other -r e7ec4e813ba6 pushing to other searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Fail early! transaction abort! Cleaning up the mess... rollback completed abort: pretxnchangegroup hook exited with status 1 [255] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: Fail early! remote: transaction abort! remote: Cleaning up the mess... remote: rollback completed remote: pretxnchangegroup hook exited with status 1 abort: push failed on remote [255] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: Fail early! remote: transaction abort! remote: Cleaning up the mess... remote: rollback completed remote: pretxnchangegroup hook exited with status 1 abort: push failed on remote [255] Check abort from mandatory pushkey $ cat > mandatorypart.py << EOF > from mercurial import exchange > from mercurial import pushkey > from mercurial import node > from mercurial import error > @exchange.b2partsgenerator('failingpuskey') > def addfailingpushey(pushop, bundler): > enc = pushkey.encode > part = bundler.newpart('pushkey') > part.addparam('namespace', enc('phases')) > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex())) > part.addparam('old', enc(str(0))) # successful update > part.addparam('new', enc(str(0))) > def fail(pushop, exc): > raise error.Abort('Correct phase push failed (because hooks)') > pushop.pkfailcb[part.id] = fail > EOF $ cat >> $HGRCPATH << EOF > [hooks] > pretxnchangegroup= > pretxnclose.failpush= > prepushkey.failpush = sh -c "echo 'do not push the key !'; false" > [extensions] > mandatorypart=$TESTTMP/mandatorypart.py > EOF $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log $ cat other.pid >> $DAEMON_PIDS (Failure from a hook) $ hg -R main push other -r e7ec4e813ba6 pushing to other searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files do not push the key ! pushkey-abort: prepushkey.failpush hook exited with status 1 transaction abort! Cleaning up the mess... rollback completed abort: Correct phase push failed (because hooks) [255] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: do not push the key ! remote: pushkey-abort: prepushkey.failpush hook exited with status 1 remote: transaction abort! remote: Cleaning up the mess... remote: rollback completed abort: Correct phase push failed (because hooks) [255] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: do not push the key ! remote: pushkey-abort: prepushkey.failpush hook exited with status 1 remote: transaction abort! remote: Cleaning up the mess... remote: rollback completed abort: Correct phase push failed (because hooks) [255] (Failure from a the pushkey) $ cat > mandatorypart.py << EOF > from mercurial import exchange > from mercurial import pushkey > from mercurial import node > from mercurial import error > @exchange.b2partsgenerator('failingpuskey') > def addfailingpushey(pushop, bundler): > enc = pushkey.encode > part = bundler.newpart('pushkey') > part.addparam('namespace', enc('phases')) > part.addparam('key', enc(pushop.repo['cd010b8cd998'].hex())) > part.addparam('old', enc(str(4))) # will fail > part.addparam('new', enc(str(3))) > def fail(pushop, exc): > raise error.Abort('Clown phase push failed') > pushop.pkfailcb[part.id] = fail > EOF $ cat >> $HGRCPATH << EOF > [hooks] > prepushkey.failpush = > EOF $ "$TESTDIR/killdaemons.py" $DAEMON_PIDS # reload http config $ hg -R other serve -p $HGPORT2 -d --pid-file=other.pid -E other-error.log $ cat other.pid >> $DAEMON_PIDS $ hg -R main push other -r e7ec4e813ba6 pushing to other searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files transaction abort! Cleaning up the mess... rollback completed pushkey: lock state after "phases" lock: free wlock: free abort: Clown phase push failed [255] $ hg -R main push ssh://user@dummy/other -r e7ec4e813ba6 pushing to ssh://user@dummy/other searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: transaction abort! remote: Cleaning up the mess... remote: rollback completed remote: pushkey: lock state after "phases" remote: lock: free remote: wlock: free abort: Clown phase push failed [255] $ hg -R main push http://localhost:$HGPORT2/ -r e7ec4e813ba6 pushing to http://localhost:$HGPORT2/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: transaction abort! remote: Cleaning up the mess... remote: rollback completed remote: pushkey: lock state after "phases" remote: lock: free remote: wlock: free abort: Clown phase push failed [255] Test lazily acquiring the lock during unbundle $ cp $TESTTMP/hgrc.orig $HGRCPATH $ cat >> $HGRCPATH < [ui] > ssh=python "$TESTDIR/dummyssh" > EOF $ cat >> $TESTTMP/locktester.py < import os > from mercurial import extensions, bundle2, util > def checklock(orig, repo, *args, **kwargs): > if repo.svfs.lexists("lock"): > raise util.Abort("Lock should not be taken") > return orig(repo, *args, **kwargs) > def extsetup(ui): > extensions.wrapfunction(bundle2, 'processbundle', checklock) > EOF $ hg init lazylock $ cat >> lazylock/.hg/hgrc < [extensions] > locktester=$TESTTMP/locktester.py > EOF $ hg clone -q ssh://user@dummy/lazylock lazylockclient $ cd lazylockclient $ touch a && hg ci -Aqm a $ hg push pushing to ssh://user@dummy/lazylock searching for changes remote: Lock should not be taken abort: push failed on remote [255] $ cat >> ../lazylock/.hg/hgrc < [experimental] > bundle2lazylocking=True > EOF $ hg push pushing to ssh://user@dummy/lazylock searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files $ cd .. Servers can disable bundle1 for clone/pull operations $ killdaemons.py $ hg init bundle2onlyserver $ cd bundle2onlyserver $ cat > .hg/hgrc << EOF > [server] > bundle1.pull = false > EOF $ touch foo $ hg -q commit -A -m initial $ hg serve -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2 requesting all changes abort: remote error: incompatible Mercurial client; bundle2 required (see https://www.mercurial-scm.org/wiki/IncompatibleClient) [255] $ killdaemons.py $ cd .. bundle1 can still pull non-generaldelta repos when generaldelta bundle1 disabled $ hg --config format.usegeneraldelta=false init notgdserver $ cd notgdserver $ cat > .hg/hgrc << EOF > [server] > bundle1gd.pull = false > EOF $ touch foo $ hg -q commit -A -m initial $ hg serve -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2-1 requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ killdaemons.py $ cd ../bundle2onlyserver bundle1 pull can be disabled for generaldelta repos only $ cat > .hg/hgrc << EOF > [server] > bundle1gd.pull = false > EOF $ hg serve -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2 requesting all changes abort: remote error: incompatible Mercurial client; bundle2 required (see https://www.mercurial-scm.org/wiki/IncompatibleClient) [255] $ killdaemons.py Verify the global server.bundle1 option works $ cat > .hg/hgrc << EOF > [server] > bundle1 = false > EOF $ hg serve -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT not-bundle2 requesting all changes abort: remote error: incompatible Mercurial client; bundle2 required (see https://www.mercurial-scm.org/wiki/IncompatibleClient) [255] $ killdaemons.py $ cat > .hg/hgrc << EOF > [server] > bundle1gd = false > EOF $ hg serve -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2 requesting all changes abort: remote error: incompatible Mercurial client; bundle2 required (see https://www.mercurial-scm.org/wiki/IncompatibleClient) [255] $ killdaemons.py $ cd ../notgdserver $ cat > .hg/hgrc << EOF > [server] > bundle1gd = false > EOF $ hg serve -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ hg --config experimental.bundle2-exp=false clone http://localhost:$HGPORT/ not-bundle2-2 requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ killdaemons.py $ cd ../bundle2onlyserver Verify bundle1 pushes can be disabled $ cat > .hg/hgrc << EOF > [server] > bundle1.push = false > [web] > allow_push = * > push_ssl = false > EOF $ hg serve -p $HGPORT -d --pid-file=hg.pid -E error.log $ cat hg.pid >> $DAEMON_PIDS $ cd .. $ hg clone http://localhost:$HGPORT bundle2-only requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd bundle2-only $ echo commit > foo $ hg commit -m commit $ hg --config experimental.bundle2-exp=false push pushing to http://localhost:$HGPORT/ searching for changes abort: remote error: incompatible Mercurial client; bundle2 required (see https://www.mercurial-scm.org/wiki/IncompatibleClient) [255] $ hg push pushing to http://localhost:$HGPORT/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files mercurial-3.7.3/tests/test-bdiff.py.out0000644000175000017500000000064512676531525017505 0ustar mpmmpm00000000000000*** 'a\nc\n\n\n\n' 'a\nb\n\n\n' *** 'a\nb\nc\n' 'a\nc\n' *** '' '' *** 'a\nb\nc' 'a\nb\nc' *** 'a\nb\nc\nd\n' 'a\nd\n' *** 'a\nb\nc\nd\n' 'a\nc\ne\n' *** 'a\nb\nc\n' 'a\nc\n' *** 'a\n' 'c\na\nb\n' *** 'a\n' '' *** 'a\n' 'b\nc\n' *** 'a\n' 'c\na\n' *** '' 'adjfkjdjksdhfksj' *** '' 'ab' *** '' 'abc' *** 'a' 'a' *** 'ab' 'ab' *** 'abc' 'abc' *** 'a\n' 'a\n' *** 'a\nb' 'a\nb' 6 6 'y\n\n' 6 6 'y\n\n' 9 9 'y\n\n' done done mercurial-3.7.3/tests/test-convert-svn-startrev.t0000644000175000017500000000331012676531525021564 0ustar mpmmpm00000000000000#require svn svn-bindings $ cat >> $HGRCPATH < [extensions] > convert = > EOF $ convert() > { > startrev=$1 > repopath=A-r$startrev-hg > hg convert --config convert.svn.startrev=$startrev \ > --config convert.svn.trunk=branches/branch1 \ > --config convert.svn.branches=" " \ > --config convert.svn.tags= \ > --datesort svn-repo $repopath > hg -R $repopath log -G \ > --template '{rev} {desc|firstline} files: {files}\n' > echo > } $ svnadmin create svn-repo $ svnadmin load -q svn-repo < "$TESTDIR/svn/startrev.svndump" Convert before branching point $ convert 3 initializing destination A-r3-hg repository scanning source... sorting... converting... 3 removeb 2 changeaa 1 branch, changeaaa 0 addc,changeaaaa o 3 addc,changeaaaa files: a c | o 2 branch, changeaaa files: a | o 1 changeaa files: a | o 0 removeb files: a Convert before branching point $ convert 4 initializing destination A-r4-hg repository scanning source... sorting... converting... 2 changeaa 1 branch, changeaaa 0 addc,changeaaaa o 2 addc,changeaaaa files: a c | o 1 branch, changeaaa files: a | o 0 changeaa files: a Convert at branching point $ convert 5 initializing destination A-r5-hg repository scanning source... sorting... converting... 1 branch, changeaaa 0 addc,changeaaaa o 1 addc,changeaaaa files: a c | o 0 branch, changeaaa files: a Convert last revision only $ convert 6 initializing destination A-r6-hg repository scanning source... sorting... converting... 0 addc,changeaaaa o 0 addc,changeaaaa files: a c mercurial-3.7.3/tests/f0000755000175000017500000001365712676531525014460 0ustar mpmmpm00000000000000#!/usr/bin/env python """ Utility for inspecting files in various ways. This tool is like the collection of tools found in a unix environment but are cross platform and stable and suitable for our needs in the test suite. This can be used instead of tools like: [ dd find head hexdump ls md5sum readlink sha1sum stat tail test readlink.py md5sum.py """ import sys, os, errno, re, glob, optparse def visit(opts, filenames, outfile): """Process filenames in the way specified in opts, writing output to outfile.""" for f in sorted(filenames): isstdin = f == '-' if not isstdin and not os.path.lexists(f): outfile.write('%s: file not found\n' % f) continue quiet = opts.quiet and not opts.recurse or isstdin isdir = os.path.isdir(f) islink = os.path.islink(f) isfile = os.path.isfile(f) and not islink dirfiles = None content = None facts = [] if isfile: if opts.type: facts.append('file') if opts.hexdump or opts.dump or opts.md5: content = file(f, 'rb').read() elif islink: if opts.type: facts.append('link') content = os.readlink(f) elif isstdin: content = sys.stdin.read() if opts.size: facts.append('size=%s' % len(content)) elif isdir: if opts.recurse or opts.type: dirfiles = glob.glob(f + '/*') facts.append('directory with %s files' % len(dirfiles)) elif opts.type: facts.append('type unknown') if not isstdin: stat = os.lstat(f) if opts.size and not isdir: facts.append('size=%s' % stat.st_size) if opts.mode and not islink: facts.append('mode=%o' % (stat.st_mode & 0777)) if opts.links: facts.append('links=%s' % stat.st_nlink) if opts.newer: # mtime might be in whole seconds so newer file might be same if stat.st_mtime >= os.stat(opts.newer).st_mtime: facts.append('newer than %s' % opts.newer) else: facts.append('older than %s' % opts.newer) if opts.md5 and content is not None: try: from hashlib import md5 except ImportError: from md5 import md5 facts.append('md5=%s' % md5(content).hexdigest()[:opts.bytes]) if opts.sha1 and content is not None: try: from hashlib import sha1 except ImportError: from sha import sha as sha1 facts.append('sha1=%s' % sha1(content).hexdigest()[:opts.bytes]) if isstdin: outfile.write(', '.join(facts) + '\n') elif facts: outfile.write('%s: %s\n' % (f, ', '.join(facts))) elif not quiet: outfile.write('%s:\n' % f) if content is not None: chunk = content if not islink: if opts.lines: if opts.lines >= 0: chunk = ''.join(chunk.splitlines(True)[:opts.lines]) else: chunk = ''.join(chunk.splitlines(True)[opts.lines:]) if opts.bytes: if opts.bytes >= 0: chunk = chunk[:opts.bytes] else: chunk = chunk[opts.bytes:] if opts.hexdump: for i in range(0, len(chunk), 16): s = chunk[i:i+16] outfile.write('%04x: %-47s |%s|\n' % (i, ' '.join('%02x' % ord(c) for c in s), re.sub('[^ -~]', '.', s))) if opts.dump: if not quiet: outfile.write('>>>\n') outfile.write(chunk) if not quiet: if chunk.endswith('\n'): outfile.write('<<<\n') else: outfile.write('\n<<< no trailing newline\n') if opts.recurse and dirfiles: assert not isstdin visit(opts, dirfiles, outfile) if __name__ == "__main__": parser = optparse.OptionParser("%prog [options] [filenames]") parser.add_option("-t", "--type", action="store_true", help="show file type (file or directory)") parser.add_option("-m", "--mode", action="store_true", help="show file mode") parser.add_option("-l", "--links", action="store_true", help="show number of links") parser.add_option("-s", "--size", action="store_true", help="show size of file") parser.add_option("-n", "--newer", action="store", help="check if file is newer (or same)") parser.add_option("-r", "--recurse", action="store_true", help="recurse into directories") parser.add_option("-S", "--sha1", action="store_true", help="show sha1 hash of the content") parser.add_option("-M", "--md5", action="store_true", help="show md5 hash of the content") parser.add_option("-D", "--dump", action="store_true", help="dump file content") parser.add_option("-H", "--hexdump", action="store_true", help="hexdump file content") parser.add_option("-B", "--bytes", type="int", help="number of characters to dump") parser.add_option("-L", "--lines", type="int", help="number of lines to dump") parser.add_option("-q", "--quiet", action="store_true", help="no default output") (opts, filenames) = parser.parse_args(sys.argv[1:]) if not filenames: filenames = ['-'] visit(opts, filenames, sys.stdout) mercurial-3.7.3/tests/test-verify.t0000644000175000017500000000537612676531525016752 0ustar mpmmpm00000000000000prepare repo $ hg init a $ cd a $ echo "some text" > FOO.txt $ echo "another text" > bar.txt $ echo "more text" > QUICK.txt $ hg add adding FOO.txt adding QUICK.txt adding bar.txt $ hg ci -mtest1 verify $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 1 changesets, 3 total revisions verify with journal $ touch .hg/store/journal $ hg verify abandoned transaction found - run hg recover checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 1 changesets, 3 total revisions $ rm .hg/store/journal introduce some bugs in repo $ cd .hg/store/data $ mv _f_o_o.txt.i X_f_o_o.txt.i $ mv bar.txt.i xbar.txt.i $ rm _q_u_i_c_k.txt.i $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files warning: revlog 'data/FOO.txt.i' not in fncache! 0: empty or missing FOO.txt FOO.txt@0: f62022d3d590 in manifests not found warning: revlog 'data/QUICK.txt.i' not in fncache! 0: empty or missing QUICK.txt QUICK.txt@0: 88b857db8eba in manifests not found warning: revlog 'data/bar.txt.i' not in fncache! 0: empty or missing bar.txt bar.txt@0: 256559129457 in manifests not found 3 files, 1 changesets, 0 total revisions 3 warnings encountered! hint: run "hg debugrebuildfncache" to recover from corrupt fncache 6 integrity errors encountered! (first damaged changeset appears to be 0) [1] $ cd ../../.. $ cd .. test changelog without a manifest $ hg init b $ cd b $ hg branch foo marked working directory as branch foo (branches are permanent and global, did you want a bookmark?) $ hg ci -m branchfoo $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 0 files, 1 changesets, 0 total revisions test revlog corruption $ touch a $ hg add a $ hg ci -m a $ echo 'corrupted' > b $ dd if=.hg/store/data/a.i of=start bs=1 count=20 2>/dev/null $ cat start b > .hg/store/data/a.i $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files a@1: broken revlog! (index data/a.i is corrupted) warning: orphan revlog 'data/a.i' 1 files, 2 changesets, 0 total revisions 1 warnings encountered! 1 integrity errors encountered! (first damaged changeset appears to be 1) [1] $ cd .. test revlog format 0 $ revlog-formatv0.py $ cd formatv0 $ hg verify repository uses revlog format 0 checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions $ cd .. mercurial-3.7.3/tests/test-strip.t0000644000175000017500000005567312676531525016614 0ustar mpmmpm00000000000000 $ echo "[format]" >> $HGRCPATH $ echo "usegeneraldelta=yes" >> $HGRCPATH $ echo "[extensions]" >> $HGRCPATH $ echo "strip=" >> $HGRCPATH $ restore() { > hg unbundle -q .hg/strip-backup/* > rm .hg/strip-backup/* > } $ teststrip() { > hg up -C $1 > echo % before update $1, strip $2 > hg parents > hg --traceback strip $2 > echo % after update $1, strip $2 > hg parents > restore > } $ hg init test $ cd test $ echo foo > bar $ hg ci -Ama adding bar $ echo more >> bar $ hg ci -Amb $ echo blah >> bar $ hg ci -Amc $ hg up 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo blah >> bar $ hg ci -Amd created new head $ echo final >> bar $ hg ci -Ame $ hg log changeset: 4:443431ffac4f tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: e changeset: 3:65bd5f99a4a3 parent: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: d changeset: 2:264128213d29 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: c changeset: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b changeset: 0:9ab35a2d17cb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ teststrip 4 4 0 files updated, 0 files merged, 0 files removed, 0 files unresolved % before update 4, strip 4 changeset: 4:443431ffac4f tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: e 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) % after update 4, strip 4 changeset: 3:65bd5f99a4a3 tag: tip parent: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: d $ teststrip 4 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved % before update 4, strip 3 changeset: 4:443431ffac4f tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: e 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) % after update 4, strip 3 changeset: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b $ teststrip 1 4 0 files updated, 0 files merged, 0 files removed, 0 files unresolved % before update 1, strip 4 changeset: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) % after update 1, strip 4 changeset: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b $ teststrip 4 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved % before update 4, strip 2 changeset: 4:443431ffac4f tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: e saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) % after update 4, strip 2 changeset: 3:443431ffac4f tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: e $ teststrip 4 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved % before update 4, strip 1 changeset: 4:264128213d29 tag: tip parent: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: c 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) % after update 4, strip 1 changeset: 0:9ab35a2d17cb tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ teststrip null 4 0 files updated, 0 files merged, 1 files removed, 0 files unresolved % before update null, strip 4 saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) % after update null, strip 4 $ hg log changeset: 4:264128213d29 tag: tip parent: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: c changeset: 3:443431ffac4f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: e changeset: 2:65bd5f99a4a3 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: d changeset: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b changeset: 0:9ab35a2d17cb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ hg up -C 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg parents changeset: 4:264128213d29 tag: tip parent: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: c $ hg --traceback strip 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/test/.hg/strip-backup/264128213d29-0b39d6bf-backup.hg (glob) $ hg parents changeset: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b $ hg debugbundle .hg/strip-backup/* Stream params: {'Compression': 'BZ'} changegroup -- "{'version': '02'}" 264128213d290d868c54642d13aeaa3675551a78 $ hg pull .hg/strip-backup/* pulling from .hg/strip-backup/264128213d29-0b39d6bf-backup.hg searching for changes adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ rm .hg/strip-backup/* $ hg log --graph o changeset: 4:264128213d29 | tag: tip | parent: 1:ef3a871183d7 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | | o changeset: 3:443431ffac4f | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: e | | | o changeset: 2:65bd5f99a4a3 |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | @ changeset: 1:ef3a871183d7 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:9ab35a2d17cb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ hg up -C 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 4 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) before strip of merge parent $ hg parents changeset: 2:65bd5f99a4a3 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: d changeset: 4:264128213d29 tag: tip parent: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: c $ hg strip 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) after strip of merge parent $ hg parents changeset: 1:ef3a871183d7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b $ restore $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -G @ changeset: 4:264128213d29 | tag: tip | parent: 1:ef3a871183d7 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | | o changeset: 3:443431ffac4f | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: e | | | o changeset: 2:65bd5f99a4a3 |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 1:ef3a871183d7 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:9ab35a2d17cb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a 2 is parent of 3, only one strip should happen $ hg strip "roots(2)" 3 saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) $ hg log -G @ changeset: 2:264128213d29 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 1:ef3a871183d7 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:9ab35a2d17cb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ restore $ hg log -G o changeset: 4:443431ffac4f | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 3:65bd5f99a4a3 | parent: 1:ef3a871183d7 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | | @ changeset: 2:264128213d29 |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 1:ef3a871183d7 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:9ab35a2d17cb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a 2 different branches: 2 strips $ hg strip 2 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) $ hg log -G o changeset: 2:65bd5f99a4a3 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | @ changeset: 1:ef3a871183d7 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:9ab35a2d17cb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ restore 2 different branches and a common ancestor: 1 strip $ hg strip 1 "2|4" 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) $ restore verify fncache is kept up-to-date $ touch a $ hg ci -qAm a $ cat .hg/store/fncache | sort data/a.i data/bar.i $ hg strip tip 0 files updated, 0 files merged, 1 files removed, 0 files unresolved saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) $ cat .hg/store/fncache data/bar.i stripping an empty revset $ hg strip "1 and not 1" abort: empty revision set [255] remove branchy history for qimport tests $ hg strip 3 saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) strip of applied mq should cleanup status file $ echo "mq=" >> $HGRCPATH $ hg up -C 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo fooagain >> bar $ hg ci -mf $ hg qimport -r tip:2 applied patches before strip $ hg qapplied d e f stripping revision in queue $ hg strip 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) applied patches after stripping rev in queue $ hg qapplied d stripping ancestor of queue $ hg strip 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) applied patches after stripping ancestor of queue $ hg qapplied Verify strip protects against stripping wc parent when there are uncommitted mods $ echo b > b $ echo bb > bar $ hg add b $ hg ci -m 'b' $ hg log --graph @ changeset: 1:76dcf9fab855 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:9ab35a2d17cb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ hg up 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo c > bar $ hg up -t false merging bar merging bar failed! 1 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges [1] $ hg sum parent: 1:76dcf9fab855 tip b branch: default commit: 1 modified, 1 unknown, 1 unresolved update: (current) phases: 2 draft mq: 3 unapplied $ echo c > b $ hg strip tip abort: local changes found [255] $ hg strip tip --keep saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) $ hg log --graph @ changeset: 0:9ab35a2d17cb tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ hg status M bar ? b ? bar.orig $ rm bar.orig $ hg sum parent: 0:9ab35a2d17cb tip a branch: default commit: 1 modified, 1 unknown update: (current) phases: 1 draft mq: 3 unapplied Strip adds, removes, modifies with --keep $ touch b $ hg add b $ hg commit -mb $ touch c ... with a clean working dir $ hg add c $ hg rm bar $ hg commit -mc $ hg status $ hg strip --keep tip saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) $ hg status ! bar ? c ... with a dirty working dir $ hg add c $ hg rm bar $ hg commit -mc $ hg status $ echo b > b $ echo d > d $ hg strip --keep tip saved backup bundle to $TESTTMP/test/.hg/strip-backup/*-backup.hg (glob) $ hg status M b ! bar ? c ? d ... after updating the dirstate $ hg add c $ hg commit -mc $ hg rm c $ hg commit -mc $ hg strip --keep '.^' -q $ cd .. stripping many nodes on a complex graph (issue3299) $ hg init issue3299 $ cd issue3299 $ hg debugbuilddag '@a.:a@b.:b.:x a $ hg add a $ hg strip -B B abort: local changes found [255] $ hg bookmarks * B 6:ff43616e5d0f Make sure no one adds back a -b option: $ hg strip -b tip hg strip: option -b not recognized hg strip [-k] [-f] [-B bookmark] [-r] REV... strip changesets and all their descendants from the repository (use "hg help -e strip" to show help for the strip extension) options ([+] can be repeated): -r --rev REV [+] strip specified revision (optional, can specify revisions without this option) -f --force force removal of changesets, discard uncommitted changes (no backup) --no-backup no backups -k --keep do not modify working directory during strip -B --bookmark VALUE [+] remove revs only reachable from given bookmark --mq operate on patch repository (use "hg strip -h" to show more help) [255] $ cd .. Verify bundles don't get overwritten: $ hg init doublebundle $ cd doublebundle $ touch a $ hg commit -Aqm a $ touch b $ hg commit -Aqm b $ hg strip -r 0 0 files updated, 0 files merged, 2 files removed, 0 files unresolved saved backup bundle to $TESTTMP/doublebundle/.hg/strip-backup/3903775176ed-e68910bd-backup.hg (glob) $ ls .hg/strip-backup 3903775176ed-e68910bd-backup.hg $ hg pull -q -r 3903775176ed .hg/strip-backup/3903775176ed-e68910bd-backup.hg $ hg strip -r 0 saved backup bundle to $TESTTMP/doublebundle/.hg/strip-backup/3903775176ed-54390173-backup.hg (glob) $ ls .hg/strip-backup 3903775176ed-54390173-backup.hg 3903775176ed-e68910bd-backup.hg $ cd .. Test that we only bundle the stripped changesets (issue4736) ------------------------------------------------------------ initialization (previous repo is empty anyway) $ hg init issue4736 $ cd issue4736 $ echo a > a $ hg add a $ hg commit -m commitA $ echo b > b $ hg add b $ hg commit -m commitB $ echo c > c $ hg add c $ hg commit -m commitC $ hg up 'desc(commitB)' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo d > d $ hg add d $ hg commit -m commitD created new head $ hg up 'desc(commitC)' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 'desc(commitD)' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m 'mergeCD' $ hg log -G @ changeset: 4:d8db9d137221 |\ tag: tip | | parent: 2:5c51d8d6557d | | parent: 3:6625a5168474 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: mergeCD | | | o changeset: 3:6625a5168474 | | parent: 1:eca11cf91c71 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: commitD | | o | changeset: 2:5c51d8d6557d |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: commitC | o changeset: 1:eca11cf91c71 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: commitB | o changeset: 0:105141ef12d0 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commitA Check bundle behavior: $ hg bundle -r 'desc(mergeCD)' --base 'desc(commitC)' ../issue4736.hg 2 changesets found $ hg log -r 'bundle()' -R ../issue4736.hg changeset: 3:6625a5168474 parent: 1:eca11cf91c71 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commitD changeset: 4:d8db9d137221 tag: tip parent: 2:5c51d8d6557d parent: 3:6625a5168474 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: mergeCD check strip behavior $ hg --config extensions.strip= strip 'desc(commitD)' --debug resolving manifests branchmerge: False, force: True, partial: False ancestor: d8db9d137221+, local: d8db9d137221+, remote: eca11cf91c71 c: other deleted -> r removing c d: other deleted -> r removing d 0 files updated, 0 files merged, 2 files removed, 0 files unresolved 2 changesets found list of changesets: 6625a516847449b6f0fa3737b9ba56e9f0f3032c d8db9d1372214336d2b5570f20ee468d2c72fa8b bundle2-output-bundle: "HG20", (1 params) 1 parts total bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/6625a5168474-345bb43d-backup.hg (glob) invalid branchheads cache (served): tip differs truncating cache/rbc-revs-v1 to 24 $ hg log -G o changeset: 2:5c51d8d6557d | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: commitC | @ changeset: 1:eca11cf91c71 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: commitB | o changeset: 0:105141ef12d0 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commitA strip backup content $ hg log -r 'bundle()' -R .hg/strip-backup/6625a5168474-*-backup.hg changeset: 3:6625a5168474 parent: 1:eca11cf91c71 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commitD changeset: 4:d8db9d137221 tag: tip parent: 2:5c51d8d6557d parent: 3:6625a5168474 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: mergeCD Error during post-close callback of the strip transaction (They should be gracefully handled and reported) $ cat > ../crashstrip.py << EOF > from mercurial import error > def reposetup(ui, repo): > class crashstriprepo(repo.__class__): > def transaction(self, desc, *args, **kwargs): > tr = super(crashstriprepo, self).transaction(self, desc, *args, **kwargs) > if desc == 'strip': > def crash(tra): raise error.Abort('boom') > tr.addpostclose('crash', crash) > return tr > repo.__class__ = crashstriprepo > EOF $ hg strip tip --config extensions.crash=$TESTTMP/crashstrip.py saved backup bundle to $TESTTMP/issue4736/.hg/strip-backup/5c51d8d6557d-70daef06-backup.hg (glob) strip failed, full bundle stored in '$TESTTMP/issue4736/.hg/strip-backup/5c51d8d6557d-70daef06-backup.hg' (glob) abort: boom [255] mercurial-3.7.3/tests/test-ui-config.py0000644000175000017500000000704512676531525017506 0ustar mpmmpm00000000000000from mercurial import ui, dispatch, error testui = ui.ui() parsed = dispatch._parseconfig(testui, [ 'values.string=string value', 'values.bool1=true', 'values.bool2=false', 'values.boolinvalid=foo', 'values.int1=42', 'values.int2=-42', 'values.intinvalid=foo', 'lists.list1=foo', 'lists.list2=foo bar baz', 'lists.list3=alice, bob', 'lists.list4=foo bar baz alice, bob', 'lists.list5=abc d"ef"g "hij def"', 'lists.list6="hello world", "how are you?"', 'lists.list7=Do"Not"Separate', 'lists.list8="Do"Separate', 'lists.list9="Do\\"NotSeparate"', 'lists.list10=string "with extraneous" quotation mark"', 'lists.list11=x, y', 'lists.list12="x", "y"', 'lists.list13=""" key = "x", "y" """', 'lists.list14=,,,, ', 'lists.list15=" just with starting quotation', 'lists.list16="longer quotation" with "no ending quotation', 'lists.list17=this is \\" "not a quotation mark"', 'lists.list18=\n \n\nding\ndong', ]) print repr(testui.configitems('values')) print repr(testui.configitems('lists')) print "---" print repr(testui.config('values', 'string')) print repr(testui.config('values', 'bool1')) print repr(testui.config('values', 'bool2')) print repr(testui.config('values', 'unknown')) print "---" try: print repr(testui.configbool('values', 'string')) except error.ConfigError as inst: print inst print repr(testui.configbool('values', 'bool1')) print repr(testui.configbool('values', 'bool2')) print repr(testui.configbool('values', 'bool2', True)) print repr(testui.configbool('values', 'unknown')) print repr(testui.configbool('values', 'unknown', True)) print "---" print repr(testui.configint('values', 'int1')) print repr(testui.configint('values', 'int2')) print "---" print repr(testui.configlist('lists', 'list1')) print repr(testui.configlist('lists', 'list2')) print repr(testui.configlist('lists', 'list3')) print repr(testui.configlist('lists', 'list4')) print repr(testui.configlist('lists', 'list4', ['foo'])) print repr(testui.configlist('lists', 'list5')) print repr(testui.configlist('lists', 'list6')) print repr(testui.configlist('lists', 'list7')) print repr(testui.configlist('lists', 'list8')) print repr(testui.configlist('lists', 'list9')) print repr(testui.configlist('lists', 'list10')) print repr(testui.configlist('lists', 'list11')) print repr(testui.configlist('lists', 'list12')) print repr(testui.configlist('lists', 'list13')) print repr(testui.configlist('lists', 'list14')) print repr(testui.configlist('lists', 'list15')) print repr(testui.configlist('lists', 'list16')) print repr(testui.configlist('lists', 'list17')) print repr(testui.configlist('lists', 'list18')) print repr(testui.configlist('lists', 'unknown')) print repr(testui.configlist('lists', 'unknown', '')) print repr(testui.configlist('lists', 'unknown', 'foo')) print repr(testui.configlist('lists', 'unknown', ['foo'])) print repr(testui.configlist('lists', 'unknown', 'foo bar')) print repr(testui.configlist('lists', 'unknown', 'foo, bar')) print repr(testui.configlist('lists', 'unknown', ['foo bar'])) print repr(testui.configlist('lists', 'unknown', ['foo', 'bar'])) print repr(testui.config('values', 'String')) def function(): pass # values that aren't strings should work testui.setconfig('hook', 'commit', function) print function == testui.config('hook', 'commit') # invalid values try: testui.configbool('values', 'boolinvalid') except error.ConfigError: print 'boolinvalid' try: testui.configint('values', 'intinvalid') except error.ConfigError: print 'intinvalid' mercurial-3.7.3/tests/hypothesishelpers.py0000644000175000017500000000407512676531525020433 0ustar mpmmpm00000000000000# Helper module to use the Hypothesis tool in tests # # Copyright 2015 David R. MacIver # # For details see http://hypothesis.readthedocs.org import os import sys import traceback try: # hypothesis 2.x from hypothesis.configuration import set_hypothesis_home_dir from hypothesis import settings except ImportError: # hypothesis 1.x from hypothesis.settings import set_hypothesis_home_dir from hypothesis import Settings as settings import hypothesis.strategies as st from hypothesis import given # hypothesis store data regarding generate example and code set_hypothesis_home_dir(os.path.join( os.getenv('TESTTMP'), ".hypothesis" )) def check(*args, **kwargs): """decorator to make a function a hypothesis test Decorated function are run immediately (to be used doctest style)""" def accept(f): # Workaround for https://github.com/DRMacIver/hypothesis/issues/206 # Fixed in version 1.13 (released 2015 october 29th) f.__module__ = '__anon__' try: given(*args, settings=settings(max_examples=2000), **kwargs)(f)() except Exception: traceback.print_exc(file=sys.stdout) sys.exit(1) return accept def roundtrips(data, decode, encode): """helper to tests function that must do proper encode/decode roundtripping """ @given(data) def testroundtrips(value): encoded = encode(value) decoded = decode(encoded) if decoded != value: raise ValueError( "Round trip failed: %s(%r) -> %s(%r) -> %r" % ( encode.__name__, value, decode.__name__, encoded, decoded )) try: testroundtrips() except Exception: # heredoc swallow traceback, we work around it traceback.print_exc(file=sys.stdout) raise print("Round trip OK") # strategy for generating bytestring that might be an issue for Mercurial bytestrings = ( st.builds(lambda s, e: s.encode(e), st.text(), st.sampled_from([ 'utf-8', 'utf-16', ]))) | st.binary() mercurial-3.7.3/tests/test-merge-types.t0000644000175000017500000003026512676531525017702 0ustar mpmmpm00000000000000#require symlink execbit $ tellmeabout() { > if [ -h $1 ]; then > echo $1 is a symlink: > $TESTDIR/readlink.py $1 > elif [ -x $1 ]; then > echo $1 is an executable file with content: > cat $1 > else > echo $1 is a plain file with content: > cat $1 > fi > } $ hg init test1 $ cd test1 $ echo a > a $ hg ci -Aqmadd $ chmod +x a $ hg ci -mexecutable $ hg up -q 0 $ rm a $ ln -s symlink a $ hg ci -msymlink created new head Symlink is local parent, executable is other: $ hg merge --debug searching for copies back to rev 1 resolving manifests branchmerge: True, force: False, partial: False ancestor: c334dc3be0da, local: 521a1e40188f+, remote: 3574f3e69b1c preserving a for resolve of a a: versions differ -> m (premerge) picked tool ':merge' for a (binary False symlink True changedelete False) merging a my a@521a1e40188f+ other a@3574f3e69b1c ancestor a@c334dc3be0da warning: internal :merge cannot merge symlinks for a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ tellmeabout a a is a symlink: a -> symlink $ hg resolve a --tool internal:other (no more unresolved files) $ tellmeabout a a is an executable file with content: a $ hg st M a ? a.orig Symlink is other parent, executable is local: $ hg update -C 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge --debug --tool :union searching for copies back to rev 1 resolving manifests branchmerge: True, force: False, partial: False ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f preserving a for resolve of a a: versions differ -> m (premerge) picked tool ':union' for a (binary False symlink True changedelete False) merging a my a@3574f3e69b1c+ other a@521a1e40188f ancestor a@c334dc3be0da warning: internal :union cannot merge symlinks for a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ tellmeabout a a is an executable file with content: a $ hg update -C 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge --debug --tool :merge3 searching for copies back to rev 1 resolving manifests branchmerge: True, force: False, partial: False ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f preserving a for resolve of a a: versions differ -> m (premerge) picked tool ':merge3' for a (binary False symlink True changedelete False) merging a my a@3574f3e69b1c+ other a@521a1e40188f ancestor a@c334dc3be0da warning: internal :merge3 cannot merge symlinks for a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ tellmeabout a a is an executable file with content: a $ hg update -C 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge --debug --tool :merge-local searching for copies back to rev 1 resolving manifests branchmerge: True, force: False, partial: False ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f preserving a for resolve of a a: versions differ -> m (premerge) picked tool ':merge-local' for a (binary False symlink True changedelete False) merging a my a@3574f3e69b1c+ other a@521a1e40188f ancestor a@c334dc3be0da warning: internal :merge-local cannot merge symlinks for a 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ tellmeabout a a is an executable file with content: a $ hg update -C 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge --debug --tool :merge-other searching for copies back to rev 1 resolving manifests branchmerge: True, force: False, partial: False ancestor: c334dc3be0da, local: 3574f3e69b1c+, remote: 521a1e40188f preserving a for resolve of a a: versions differ -> m (premerge) picked tool ':merge-other' for a (binary False symlink True changedelete False) merging a my a@3574f3e69b1c+ other a@521a1e40188f ancestor a@c334dc3be0da warning: internal :merge-other cannot merge symlinks for a 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ tellmeabout a a is an executable file with content: a Update to link without local change should get us a symlink (issue3316): $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg st ? a.orig Update to link with local change should cause a merge prompt (issue3200): $ hg up -Cq 0 $ echo data > a $ HGMERGE= hg up -y --debug searching for copies back to rev 2 resolving manifests branchmerge: False, force: False, partial: False ancestor: c334dc3be0da, local: c334dc3be0da+, remote: 521a1e40188f preserving a for resolve of a a: versions differ -> m (premerge) (couldn't find merge tool hgmerge|tool hgmerge can't handle symlinks) (re) picked tool ':prompt' for a (binary False symlink True changedelete False) no tool found to merge a keep (l)ocal, take (o)ther, or leave (u)nresolved? u 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges [1] $ hg diff --git diff --git a/a b/a old mode 120000 new mode 100644 --- a/a +++ b/a @@ -1,1 +1,1 @@ -symlink \ No newline at end of file +data Test only 'l' change - happens rarely, except when recovering from situations where that was what happened. $ hg init test2 $ cd test2 $ printf base > f $ hg ci -Aqm0 $ echo file > f $ echo content >> f $ hg ci -qm1 $ hg up -qr0 $ rm f $ ln -s base f $ hg ci -qm2 $ hg merge merging f warning: internal :merge cannot merge symlinks for f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ tellmeabout f f is a symlink: f -> base $ hg up -Cqr1 $ hg merge merging f warning: internal :merge cannot merge symlinks for f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ tellmeabout f f is a plain file with content: file content $ cd .. Test removed 'x' flag merged with change to symlink $ hg init test3 $ cd test3 $ echo f > f $ chmod +x f $ hg ci -Aqm0 $ chmod -x f $ hg ci -qm1 $ hg up -qr0 $ rm f $ ln -s dangling f $ hg ci -qm2 $ hg merge merging f warning: internal :merge cannot merge symlinks for f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ tellmeabout f f is a symlink: f -> dangling $ hg up -Cqr1 $ hg merge merging f warning: internal :merge cannot merge symlinks for f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ tellmeabout f f is a plain file with content: f Test removed 'x' flag merged with content change - both ways $ hg up -Cqr0 $ echo change > f $ hg ci -qm3 $ hg merge -r1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ tellmeabout f f is a plain file with content: change $ hg up -qCr1 $ hg merge -r3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ tellmeabout f f is a plain file with content: change $ cd .. Test merge with no common ancestor: a: just different b: x vs -, different (cannot calculate x, cannot ask merge tool) c: x vs -, same (cannot calculate x, merge tool is no good) d: x vs l, different e: x vs l, same f: - vs l, different g: - vs l, same h: l vs l, different (where same means the filelog entry is shared and there thus is an ancestor!) $ hg init test4 $ cd test4 $ echo 0 > 0 $ hg ci -Aqm0 $ echo 1 > a $ echo 1 > b $ chmod +x b $ echo x > c $ chmod +x c $ echo 1 > d $ chmod +x d $ printf x > e $ chmod +x e $ echo 1 > f $ printf x > g $ ln -s 1 h $ hg ci -qAm1 $ hg up -qr0 $ echo 2 > a $ echo 2 > b $ echo x > c $ ln -s 2 d $ ln -s x e $ ln -s 2 f $ ln -s x g $ ln -s 2 h $ hg ci -Aqm2 $ hg merge merging a warning: cannot merge flags for b merging b warning: cannot merge flags for c merging d warning: internal :merge cannot merge symlinks for d warning: conflicts while merging d! (edit, then use 'hg resolve --mark') merging f warning: internal :merge cannot merge symlinks for f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') merging h warning: internal :merge cannot merge symlinks for h warning: conflicts while merging h! (edit, then use 'hg resolve --mark') warning: conflicts while merging a! (edit, then use 'hg resolve --mark') warning: conflicts while merging b! (edit, then use 'hg resolve --mark') 3 files updated, 0 files merged, 0 files removed, 5 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg resolve -l U a U b U d U f U h $ tellmeabout a a is a plain file with content: <<<<<<< local: 0139c5610547 - test: 2 2 ======= 1 >>>>>>> other: 97e29675e796 - test: 1 $ tellmeabout b b is a plain file with content: <<<<<<< local: 0139c5610547 - test: 2 2 ======= 1 >>>>>>> other: 97e29675e796 - test: 1 $ tellmeabout c c is a plain file with content: x $ tellmeabout d d is a symlink: d -> 2 $ tellmeabout e e is a symlink: e -> x $ tellmeabout f f is a symlink: f -> 2 $ tellmeabout g g is a symlink: g -> x $ tellmeabout h h is a symlink: h -> 2 $ hg up -Cqr1 $ hg merge merging a warning: cannot merge flags for b merging b warning: cannot merge flags for c merging d warning: internal :merge cannot merge symlinks for d warning: conflicts while merging d! (edit, then use 'hg resolve --mark') merging f warning: internal :merge cannot merge symlinks for f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') merging h warning: internal :merge cannot merge symlinks for h warning: conflicts while merging h! (edit, then use 'hg resolve --mark') warning: conflicts while merging a! (edit, then use 'hg resolve --mark') warning: conflicts while merging b! (edit, then use 'hg resolve --mark') 3 files updated, 0 files merged, 0 files removed, 5 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ tellmeabout a a is a plain file with content: <<<<<<< local: 97e29675e796 - test: 1 1 ======= 2 >>>>>>> other: 0139c5610547 - test: 2 $ tellmeabout b b is an executable file with content: <<<<<<< local: 97e29675e796 - test: 1 1 ======= 2 >>>>>>> other: 0139c5610547 - test: 2 $ tellmeabout c c is an executable file with content: x $ tellmeabout d d is an executable file with content: 1 $ tellmeabout e e is an executable file with content: x (no-eol) $ tellmeabout f f is a plain file with content: 1 $ tellmeabout g g is a plain file with content: x (no-eol) $ tellmeabout h h is a symlink: h -> 1 $ cd .. mercurial-3.7.3/tests/test-clone-r.t0000644000175000017500000001651412676531525017001 0ustar mpmmpm00000000000000 $ hg init test $ cd test $ echo 0 >> afile $ hg add afile $ hg commit -m "0.0" $ echo 1 >> afile $ hg commit -m "0.1" $ echo 2 >> afile $ hg commit -m "0.2" $ echo 3 >> afile $ hg commit -m "0.3" $ hg update -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 1 >> afile $ hg commit -m "1.1" created new head $ echo 2 >> afile $ hg commit -m "1.2" $ echo a line > fred $ echo 3 >> afile $ hg add fred $ hg commit -m "1.3" $ hg mv afile adifferentfile $ hg commit -m "1.3m" $ hg update -C 3 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg mv afile anotherfile $ hg commit -m "0.3m" $ hg debugindex -f 1 afile rev flag offset length size ..... link p1 p2 nodeid (re) 0 0000 0 3 2 ..... 0 -1 -1 362fef284ce2 (re) 1 0000 3 5 4 ..... 1 0 -1 125144f7e028 (re) 2 0000 8 7 6 ..... 2 1 -1 4c982badb186 (re) 3 0000 15 9 8 ..... 3 2 -1 19b1fc555737 (re) $ hg debugindex adifferentfile rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 75 ..... 7 2565f3199a74 000000000000 000000000000 (re) $ hg debugindex anotherfile rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 75 ..... 8 2565f3199a74 000000000000 000000000000 (re) $ hg debugindex fred rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 8 ..... 6 12ab3bcc5ea4 000000000000 000000000000 (re) $ hg debugindex --manifest rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 48 ..... 0 43eadb1d2d06 000000000000 000000000000 (re) 1 48 48 ..... 1 8b89697eba2c 43eadb1d2d06 000000000000 (re) 2 96 48 ..... 2 626a32663c2f 8b89697eba2c 000000000000 (re) 3 144 48 ..... 3 f54c32f13478 626a32663c2f 000000000000 (re) 4 192 .. ..... 6 de68e904d169 626a32663c2f 000000000000 (re) 5 2.. .. ..... 7 09bb521d218d de68e904d169 000000000000 (re) 6 3.. 54 ..... 8 1fde233dfb0f f54c32f13478 000000000000 (re) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 9 changesets, 7 total revisions $ cd .. $ for i in 0 1 2 3 4 5 6 7 8; do > echo > echo ---- hg clone -r "$i" test test-"$i" > hg clone -r "$i" test test-"$i" > cd test-"$i" > hg verify > cd .. > done ---- hg clone -r 0 test test-0 adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions ---- hg clone -r 1 test test-1 adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions ---- hg clone -r 2 test test-2 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 3 changesets, 3 total revisions ---- hg clone -r 3 test test-3 adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 4 changesets, 4 total revisions ---- hg clone -r 4 test test-4 adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions ---- hg clone -r 5 test test-5 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 3 changesets, 3 total revisions ---- hg clone -r 6 test test-6 adding changesets adding manifests adding file changes added 4 changesets with 5 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 4 changesets, 5 total revisions ---- hg clone -r 7 test test-7 adding changesets adding manifests adding file changes added 5 changesets with 6 changes to 3 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 5 changesets, 6 total revisions ---- hg clone -r 8 test test-8 adding changesets adding manifests adding file changes added 5 changesets with 5 changes to 2 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 5 changesets, 5 total revisions $ cd test-8 $ hg pull ../test-7 pulling from ../test-7 searching for changes adding changesets adding manifests adding file changes added 4 changesets with 2 changes to 3 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 9 changesets, 7 total revisions $ cd .. $ hg clone test test-9 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd test-9 $ hg branch foobar marked working directory as branch foobar (branches are permanent and global, did you want a bookmark?) $ echo file2 >> file2 $ hg add file2 $ hg commit -m "changeset9" $ echo file3 >> file3 $ hg add file3 $ hg commit -m "changeset10" $ cd .. $ hg clone -r 9 -u foobar test-9 test-10 adding changesets adding manifests adding file changes added 6 changesets with 6 changes to 3 files updating to branch foobar 2 files updated, 0 files merged, 0 files removed, 0 files unresolved mercurial-3.7.3/tests/test-lfconvert.t0000644000175000017500000003154612676531525017446 0ustar mpmmpm00000000000000 $ USERCACHE="$TESTTMP/cache"; export USERCACHE $ mkdir "${USERCACHE}" $ cat >> $HGRCPATH < [format] > usegeneraldelta=yes > [extensions] > largefiles = > share = > strip = > convert = > [largefiles] > minsize = 0.5 > patterns = **.other > **.dat > usercache=${USERCACHE} > EOF "lfconvert" works $ hg init bigfile-repo $ cd bigfile-repo $ cat >> .hg/hgrc < [extensions] > largefiles = ! > EOF $ mkdir sub $ dd if=/dev/zero bs=1k count=256 > large 2> /dev/null $ dd if=/dev/zero bs=1k count=256 > large2 2> /dev/null $ echo normal > normal1 $ echo alsonormal > sub/normal2 $ dd if=/dev/zero bs=1k count=10 > sub/maybelarge.dat 2> /dev/null $ hg addremove adding large adding large2 adding normal1 adding sub/maybelarge.dat adding sub/normal2 $ hg commit -m"add large, normal1" large normal1 $ hg commit -m"add sub/*" sub Test tag parsing $ cat >> .hgtags < IncorrectlyFormattedTag! > invalidhash sometag > 0123456789abcdef anothertag > EOF $ hg add .hgtags $ hg commit -m"add large2" large2 .hgtags Test link+rename largefile codepath $ [ -d .hg/largefiles ] && echo fail || echo pass pass $ cd .. $ hg lfconvert --size 0.2 bigfile-repo largefiles-repo initializing destination largefiles-repo skipping incorrectly formatted tag IncorrectlyFormattedTag! skipping incorrectly formatted id invalidhash no mapping for id 0123456789abcdef #if symlink $ hg --cwd bigfile-repo rename large2 large3 $ ln -sf large bigfile-repo/large3 $ hg --cwd bigfile-repo commit -m"make large2 a symlink" large2 large3 $ hg lfconvert --size 0.2 bigfile-repo largefiles-repo-symlink initializing destination largefiles-repo-symlink skipping incorrectly formatted tag IncorrectlyFormattedTag! skipping incorrectly formatted id invalidhash no mapping for id 0123456789abcdef abort: renamed/copied largefile large3 becomes symlink [255] #endif $ cd bigfile-repo $ hg strip --no-backup 2 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ cd .. $ rm -rf largefiles-repo largefiles-repo-symlink $ hg lfconvert --size 0.2 bigfile-repo largefiles-repo initializing destination largefiles-repo "lfconvert" converts content correctly $ cd largefiles-repo $ hg up getting changed largefiles 2 largefiles updated, 0 removed 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg locate .hglf/large .hglf/sub/maybelarge.dat normal1 sub/normal2 $ cat normal1 normal $ cat sub/normal2 alsonormal $ md5sum.py large sub/maybelarge.dat ec87a838931d4d5d2e94a04644788a55 large 1276481102f218c981e0324180bafd9f sub/maybelarge.dat "lfconvert" adds 'largefiles' to .hg/requires. $ cat .hg/requires dotencode fncache generaldelta largefiles revlogv1 store "lfconvert" includes a newline at the end of the standin files. $ cat .hglf/large .hglf/sub/maybelarge.dat 2e000fa7e85759c7f4c254d4d9c33ef481e459a7 34e163be8e43c5631d8b92e9c43ab0bf0fa62b9c $ cd .. add some changesets to rename/remove/merge $ cd bigfile-repo $ hg mv -q sub stuff $ hg commit -m"rename sub/ to stuff/" $ hg update -q 1 $ echo blah >> normal3 $ echo blah >> sub/normal2 $ echo blah >> sub/maybelarge.dat $ md5sum.py sub/maybelarge.dat 1dd0b99ff80e19cff409702a1d3f5e15 sub/maybelarge.dat $ hg commit -A -m"add normal3, modify sub/*" adding normal3 created new head $ hg rm large normal3 $ hg commit -q -m"remove large, normal3" $ hg merge merging sub/maybelarge.dat and stuff/maybelarge.dat to stuff/maybelarge.dat merging sub/normal2 and stuff/normal2 to stuff/normal2 warning: $TESTTMP/bigfile-repo/stuff/maybelarge.dat looks like a binary file. (glob) warning: conflicts while merging stuff/maybelarge.dat! (edit, then use 'hg resolve --mark') 0 files updated, 1 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg cat -r . sub/maybelarge.dat > stuff/maybelarge.dat $ hg resolve -m stuff/maybelarge.dat (no more unresolved files) $ hg commit -m"merge" $ hg log -G --template "{rev}:{node|short} {desc|firstline}\n" @ 5:4884f215abda merge |\ | o 4:7285f817b77e remove large, normal3 | | | o 3:67e3892e3534 add normal3, modify sub/* | | o | 2:c96c8beb5d56 rename sub/ to stuff/ |/ o 1:020c65d24e11 add sub/* | o 0:117b8328f97a add large, normal1 $ cd .. lfconvert with rename, merge, and remove $ rm -rf largefiles-repo $ hg lfconvert --size 0.2 bigfile-repo largefiles-repo initializing destination largefiles-repo $ cd largefiles-repo $ hg log -G --template "{rev}:{node|short} {desc|firstline}\n" o 5:8e05f5f2b77e merge |\ | o 4:a5a02de7a8e4 remove large, normal3 | | | o 3:55759520c76f add normal3, modify sub/* | | o | 2:261ad3f3f037 rename sub/ to stuff/ |/ o 1:334e5237836d add sub/* | o 0:d4892ec57ce2 add large, normal1 $ hg locate -r 2 .hglf/large .hglf/stuff/maybelarge.dat normal1 stuff/normal2 $ hg locate -r 3 .hglf/large .hglf/sub/maybelarge.dat normal1 normal3 sub/normal2 $ hg locate -r 4 .hglf/sub/maybelarge.dat normal1 sub/normal2 $ hg locate -r 5 .hglf/stuff/maybelarge.dat normal1 stuff/normal2 $ hg update getting changed largefiles 1 largefiles updated, 0 removed 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat stuff/normal2 alsonormal blah $ md5sum.py stuff/maybelarge.dat 1dd0b99ff80e19cff409702a1d3f5e15 stuff/maybelarge.dat $ cat .hglf/stuff/maybelarge.dat 76236b6a2c6102826c61af4297dd738fb3b1de38 $ cd .. "lfconvert" error cases $ hg lfconvert http://localhost/foo foo abort: http://localhost/foo is not a local Mercurial repo [255] $ hg lfconvert foo ssh://localhost/foo abort: ssh://localhost/foo is not a local Mercurial repo [255] $ hg lfconvert nosuchrepo foo abort: repository nosuchrepo not found! [255] $ hg share -q -U bigfile-repo shared $ printf 'bogus' > shared/.hg/sharedpath $ hg lfconvert shared foo abort: .hg/sharedpath points to nonexistent directory $TESTTMP/bogus! (glob) [255] $ hg lfconvert bigfile-repo largefiles-repo initializing destination largefiles-repo abort: repository largefiles-repo already exists! [255] add another largefile to the new largefiles repo $ cd largefiles-repo $ dd if=/dev/zero bs=1k count=1k > anotherlarge 2> /dev/null $ hg add --lfsize=1 anotherlarge $ hg commit -m "add anotherlarge (should be a largefile)" $ cat .hglf/anotherlarge 3b71f43ff30f4b15b5cd85dd9e95ebc7e84eb5a3 $ hg tag mytag $ cd .. round-trip: converting back to a normal (non-largefiles) repo with "lfconvert --to-normal" should give the same as ../bigfile-repo $ cd largefiles-repo $ hg lfconvert --to-normal . ../normal-repo initializing destination ../normal-repo 0 additional largefiles cached scanning source... sorting... converting... 7 add large, normal1 6 add sub/* 5 rename sub/ to stuff/ 4 add normal3, modify sub/* 3 remove large, normal3 2 merge 1 add anotherlarge (should be a largefile) 0 Added tag mytag for changeset abacddda7028 $ cd ../normal-repo $ cat >> .hg/hgrc < [extensions] > largefiles = ! > EOF $ hg log -G --template "{rev}:{node|short} {desc|firstline}\n" o 7:b5fedc110b9d Added tag mytag for changeset 867ab992ecf4 | o 6:867ab992ecf4 add anotherlarge (should be a largefile) | o 5:4884f215abda merge |\ | o 4:7285f817b77e remove large, normal3 | | | o 3:67e3892e3534 add normal3, modify sub/* | | o | 2:c96c8beb5d56 rename sub/ to stuff/ |/ o 1:020c65d24e11 add sub/* | o 0:117b8328f97a add large, normal1 $ hg update 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg locate .hgtags anotherlarge normal1 stuff/maybelarge.dat stuff/normal2 $ [ -d .hg/largefiles ] && echo fail || echo pass pass $ cd .. Clearing the usercache ensures that commitctx doesn't try to cache largefiles from the working dir on a convert. $ rm "${USERCACHE}"/* $ hg convert largefiles-repo assuming destination largefiles-repo-hg initializing destination largefiles-repo-hg repository scanning source... sorting... converting... 7 add large, normal1 6 add sub/* 5 rename sub/ to stuff/ 4 add normal3, modify sub/* 3 remove large, normal3 2 merge 1 add anotherlarge (should be a largefile) 0 Added tag mytag for changeset abacddda7028 $ hg -R largefiles-repo-hg log -G --template "{rev}:{node|short} {desc|firstline}\n" o 7:2f08f66459b7 Added tag mytag for changeset 17126745edfd | o 6:17126745edfd add anotherlarge (should be a largefile) | o 5:9cc5aa7204f0 merge |\ | o 4:a5a02de7a8e4 remove large, normal3 | | | o 3:55759520c76f add normal3, modify sub/* | | o | 2:261ad3f3f037 rename sub/ to stuff/ |/ o 1:334e5237836d add sub/* | o 0:d4892ec57ce2 add large, normal1 Verify will fail (for now) if the usercache is purged before converting, since largefiles are not cached in the converted repo's local store by the conversion process. $ cd largefiles-repo-hg $ cat >> .hg/hgrc < [experimental] > evolution=createmarkers > EOF $ hg debugobsolete `hg log -r tip -T "{node}"` $ cd .. $ hg -R largefiles-repo-hg verify --large --lfa checking changesets checking manifests crosschecking files in changesets and manifests checking files 9 files, 8 changesets, 13 total revisions searching 7 changesets for largefiles changeset 0:d4892ec57ce2: large references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/2e000fa7e85759c7f4c254d4d9c33ef481e459a7 (glob) changeset 1:334e5237836d: sub/maybelarge.dat references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/34e163be8e43c5631d8b92e9c43ab0bf0fa62b9c (glob) changeset 2:261ad3f3f037: stuff/maybelarge.dat references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/34e163be8e43c5631d8b92e9c43ab0bf0fa62b9c (glob) changeset 3:55759520c76f: sub/maybelarge.dat references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/76236b6a2c6102826c61af4297dd738fb3b1de38 (glob) changeset 5:9cc5aa7204f0: stuff/maybelarge.dat references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/76236b6a2c6102826c61af4297dd738fb3b1de38 (glob) changeset 6:17126745edfd: anotherlarge references missing $TESTTMP/largefiles-repo-hg/.hg/largefiles/3b71f43ff30f4b15b5cd85dd9e95ebc7e84eb5a3 (glob) verified existence of 6 revisions of 4 largefiles [1] $ hg -R largefiles-repo-hg showconfig paths [1] Avoid a traceback if a largefile isn't available (issue3519) Ensure the largefile can be cached in the source if necessary $ hg clone -U largefiles-repo issue3519 $ rm -f "${USERCACHE}"/* $ hg lfconvert --to-normal issue3519 normalized3519 initializing destination normalized3519 4 additional largefiles cached scanning source... sorting... converting... 7 add large, normal1 6 add sub/* 5 rename sub/ to stuff/ 4 add normal3, modify sub/* 3 remove large, normal3 2 merge 1 add anotherlarge (should be a largefile) 0 Added tag mytag for changeset abacddda7028 Ensure the abort message is useful if a largefile is entirely unavailable $ rm -rf normalized3519 $ rm "${USERCACHE}"/* $ rm issue3519/.hg/largefiles/* $ rm largefiles-repo/.hg/largefiles/* $ hg lfconvert --to-normal issue3519 normalized3519 initializing destination normalized3519 anotherlarge: largefile 3b71f43ff30f4b15b5cd85dd9e95ebc7e84eb5a3 not available from file:/*/$TESTTMP/largefiles-repo (glob) stuff/maybelarge.dat: largefile 76236b6a2c6102826c61af4297dd738fb3b1de38 not available from file:/*/$TESTTMP/largefiles-repo (glob) stuff/maybelarge.dat: largefile 76236b6a2c6102826c61af4297dd738fb3b1de38 not available from file:/*/$TESTTMP/largefiles-repo (glob) sub/maybelarge.dat: largefile 76236b6a2c6102826c61af4297dd738fb3b1de38 not available from file:/*/$TESTTMP/largefiles-repo (glob) large: largefile 2e000fa7e85759c7f4c254d4d9c33ef481e459a7 not available from file:/*/$TESTTMP/largefiles-repo (glob) sub/maybelarge.dat: largefile 76236b6a2c6102826c61af4297dd738fb3b1de38 not available from file:/*/$TESTTMP/largefiles-repo (glob) large: largefile 2e000fa7e85759c7f4c254d4d9c33ef481e459a7 not available from file:/*/$TESTTMP/largefiles-repo (glob) stuff/maybelarge.dat: largefile 34e163be8e43c5631d8b92e9c43ab0bf0fa62b9c not available from file:/*/$TESTTMP/largefiles-repo (glob) large: largefile 2e000fa7e85759c7f4c254d4d9c33ef481e459a7 not available from file:/*/$TESTTMP/largefiles-repo (glob) sub/maybelarge.dat: largefile 34e163be8e43c5631d8b92e9c43ab0bf0fa62b9c not available from file:/*/$TESTTMP/largefiles-repo (glob) large: largefile 2e000fa7e85759c7f4c254d4d9c33ef481e459a7 not available from file:/*/$TESTTMP/largefiles-repo (glob) 0 additional largefiles cached 11 largefiles failed to download abort: all largefiles must be present locally [255] mercurial-3.7.3/tests/test-bundle2-remote-changegroup.t0000644000175000017500000005036412676531525022567 0ustar mpmmpm00000000000000#require killdaemons Create an extension to test bundle2 remote-changegroup parts $ cat > bundle2.py << EOF > """A small extension to test bundle2 remote-changegroup parts. > > Current bundle2 implementation doesn't provide a way to generate those > parts, so they must be created by extensions. > """ > from mercurial import bundle2, changegroup, exchange, util > > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None, > b2caps=None, heads=None, common=None, > **kwargs): > """this function replaces the changegroup part handler for getbundle. > It allows to create a set of arbitrary parts containing changegroups > and remote-changegroups, as described in a bundle2maker file in the > repository .hg/ directory. > > Each line of that bundle2maker file contain a description of the > part to add: > - changegroup common_revset heads_revset > Creates a changegroup part based, using common_revset and > heads_revset for changegroup.getchangegroup. > - remote-changegroup url file > Creates a remote-changegroup part for a bundle at the given > url. Size and digest, as required by the client, are computed > from the given file. > - raw-remote-changegroup > Creates a remote-changegroup part with the data given in the > python expression as parameters. The python expression is > evaluated with eval, and is expected to be a dict. > """ > def newpart(name, data=''): > """wrapper around bundler.newpart adding an extra part making the > client output information about each processed part""" > bundler.newpart('output', data=name) > part = bundler.newpart(name, data=data) > return part > > for line in open(repo.join('bundle2maker'), 'r'): > line = line.strip() > try: > verb, args = line.split(None, 1) > except ValueError: > verb, args = line, '' > if verb == 'remote-changegroup': > url, file = args.split() > bundledata = open(file, 'rb').read() > digest = util.digester.preferred(b2caps['digests']) > d = util.digester([digest], bundledata) > part = newpart('remote-changegroup') > part.addparam('url', url) > part.addparam('size', str(len(bundledata))) > part.addparam('digests', digest) > part.addparam('digest:%s' % digest, d[digest]) > elif verb == 'raw-remote-changegroup': > part = newpart('remote-changegroup') > for k, v in eval(args).items(): > part.addparam(k, str(v)) > elif verb == 'changegroup': > _common, heads = args.split() > common.extend(repo.lookup(r) for r in repo.revs(_common)) > heads = [repo.lookup(r) for r in repo.revs(heads)] > cg = changegroup.getchangegroup(repo, 'changegroup', > heads=heads, common=common) > newpart('changegroup', cg.getchunks()) > else: > raise Exception('unknown verb') > > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart > EOF Start a simple HTTP server to serve bundles $ python "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid $ cat dumb.pid >> $DAEMON_PIDS $ cat >> $HGRCPATH << EOF > [experimental] > bundle2-exp=True > [ui] > ssh=python "$TESTDIR/dummyssh" > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline} > EOF $ hg init repo $ hg -R repo unbundle $TESTDIR/bundles/rebase.hg adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg -R repo log -G o 7:02de42196ebe draft Nicolas Dumazet H | | o 6:eea13746799a draft Nicolas Dumazet G |/| o | 5:24b6387c8c8c draft Nicolas Dumazet F | | | o 4:9520eea781bc draft Nicolas Dumazet E |/ | o 3:32af7686d403 draft Nicolas Dumazet D | | | o 2:5fddd98957c8 draft Nicolas Dumazet C | | | o 1:42ccdea3bb16 draft Nicolas Dumazet B |/ o 0:cd010b8cd998 draft Nicolas Dumazet A $ hg clone repo orig updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat > repo/.hg/hgrc << EOF > [extensions] > bundle2=$TESTTMP/bundle2.py > EOF Test a pull with an remote-changegroup $ hg bundle -R repo --type v1 --base '0:4' -r '5:7' bundle.hg 3 changesets found $ cat > repo/.hg/bundle2maker << EOF > remote-changegroup http://localhost:$HGPORT/bundle.hg bundle.hg > EOF $ hg clone orig clone -r 3 -r 4 adding changesets adding manifests adding file changes added 5 changesets with 5 changes to 5 files (+1 heads) updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 2 files (+1 heads) (run 'hg heads .' to see heads, 'hg merge' to merge) $ hg -R clone log -G o 7:02de42196ebe public Nicolas Dumazet H | | o 6:eea13746799a public Nicolas Dumazet G |/| o | 5:24b6387c8c8c public Nicolas Dumazet F | | | o 4:9520eea781bc public Nicolas Dumazet E |/ | @ 3:32af7686d403 public Nicolas Dumazet D | | | o 2:5fddd98957c8 public Nicolas Dumazet C | | | o 1:42ccdea3bb16 public Nicolas Dumazet B |/ o 0:cd010b8cd998 public Nicolas Dumazet A $ rm -rf clone Test a pull with an remote-changegroup and a following changegroup $ hg bundle -R repo --type v1 --base 2 -r '3:4' bundle2.hg 2 changesets found $ cat > repo/.hg/bundle2maker << EOF > remote-changegroup http://localhost:$HGPORT/bundle2.hg bundle2.hg > changegroup 0:4 5:7 > EOF $ hg clone orig clone -r 2 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) remote: changegroup adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 2 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg -R clone log -G o 7:02de42196ebe public Nicolas Dumazet H | | o 6:eea13746799a public Nicolas Dumazet G |/| o | 5:24b6387c8c8c public Nicolas Dumazet F | | | o 4:9520eea781bc public Nicolas Dumazet E |/ | o 3:32af7686d403 public Nicolas Dumazet D | | | @ 2:5fddd98957c8 public Nicolas Dumazet C | | | o 1:42ccdea3bb16 public Nicolas Dumazet B |/ o 0:cd010b8cd998 public Nicolas Dumazet A $ rm -rf clone Test a pull with a changegroup followed by an remote-changegroup $ hg bundle -R repo --type v1 --base '0:4' -r '5:7' bundle3.hg 3 changesets found $ cat > repo/.hg/bundle2maker << EOF > changegroup 000000000000 :4 > remote-changegroup http://localhost:$HGPORT/bundle3.hg bundle3.hg > EOF $ hg clone orig clone -r 2 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: changegroup adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) remote: remote-changegroup adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 2 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg -R clone log -G o 7:02de42196ebe public Nicolas Dumazet H | | o 6:eea13746799a public Nicolas Dumazet G |/| o | 5:24b6387c8c8c public Nicolas Dumazet F | | | o 4:9520eea781bc public Nicolas Dumazet E |/ | o 3:32af7686d403 public Nicolas Dumazet D | | | @ 2:5fddd98957c8 public Nicolas Dumazet C | | | o 1:42ccdea3bb16 public Nicolas Dumazet B |/ o 0:cd010b8cd998 public Nicolas Dumazet A $ rm -rf clone Test a pull with two remote-changegroups and a changegroup $ hg bundle -R repo --type v1 --base 2 -r '3:4' bundle4.hg 2 changesets found $ hg bundle -R repo --type v1 --base '3:4' -r '5:6' bundle5.hg 2 changesets found $ cat > repo/.hg/bundle2maker << EOF > remote-changegroup http://localhost:$HGPORT/bundle4.hg bundle4.hg > remote-changegroup http://localhost:$HGPORT/bundle5.hg bundle5.hg > changegroup 0:6 7 > EOF $ hg clone orig clone -r 2 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) remote: remote-changegroup adding changesets adding manifests adding file changes added 2 changesets with 1 changes to 1 files remote: changegroup adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg -R clone log -G o 7:02de42196ebe public Nicolas Dumazet H | | o 6:eea13746799a public Nicolas Dumazet G |/| o | 5:24b6387c8c8c public Nicolas Dumazet F | | | o 4:9520eea781bc public Nicolas Dumazet E |/ | o 3:32af7686d403 public Nicolas Dumazet D | | | @ 2:5fddd98957c8 public Nicolas Dumazet C | | | o 1:42ccdea3bb16 public Nicolas Dumazet B |/ o 0:cd010b8cd998 public Nicolas Dumazet A $ rm -rf clone Hash digest tests $ hg bundle -R repo --type v1 -a bundle6.hg 8 changesets found $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'sha1', 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'} > EOF $ hg clone ssh://user@dummy/repo clone requesting all changes remote: remote-changegroup adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf clone $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394'} > EOF $ hg clone ssh://user@dummy/repo clone requesting all changes remote: remote-changegroup adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf clone Hash digest mismatch throws an error $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'sha1', 'digest:sha1': '0' * 40} > EOF $ hg clone ssh://user@dummy/repo clone requesting all changes remote: remote-changegroup adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) transaction abort! rollback completed abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted: sha1 mismatch: expected 0000000000000000000000000000000000000000, got 2c880cfec23cff7d8f80c2f12958d1563cbdaba6 [255] Multiple hash digests can be given $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394', 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'} > EOF $ hg clone ssh://user@dummy/repo clone requesting all changes remote: remote-changegroup adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf clone If either of the multiple hash digests mismatches, an error is thrown $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': '0' * 32, 'digest:sha1': '2c880cfec23cff7d8f80c2f12958d1563cbdaba6'} > EOF $ hg clone ssh://user@dummy/repo clone requesting all changes remote: remote-changegroup adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) transaction abort! rollback completed abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted: md5 mismatch: expected 00000000000000000000000000000000, got e22172c2907ef88794b7bea6642c2394 [255] $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle6.hg', 'size': 1663, 'digests': 'md5 sha1', 'digest:md5': 'e22172c2907ef88794b7bea6642c2394', 'digest:sha1': '0' * 40} > EOF $ hg clone ssh://user@dummy/repo clone requesting all changes remote: remote-changegroup adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) transaction abort! rollback completed abort: bundle at http://localhost:$HGPORT/bundle6.hg is corrupted: sha1 mismatch: expected 0000000000000000000000000000000000000000, got 2c880cfec23cff7d8f80c2f12958d1563cbdaba6 [255] Corruption tests $ hg clone orig clone -r 2 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat > repo/.hg/bundle2maker << EOF > remote-changegroup http://localhost:$HGPORT/bundle4.hg bundle4.hg > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle5.hg', 'size': 578, 'digests': 'sha1', 'digest:sha1': '0' * 40} > changegroup 0:6 7 > EOF $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) remote: remote-changegroup adding changesets adding manifests adding file changes added 2 changesets with 1 changes to 1 files transaction abort! rollback completed abort: bundle at http://localhost:$HGPORT/bundle5.hg is corrupted: sha1 mismatch: expected 0000000000000000000000000000000000000000, got f29485d6bfd37db99983cfc95ecb52f8ca396106 [255] The entire transaction has been rolled back in the pull above $ hg -R clone log -G @ 2:5fddd98957c8 public Nicolas Dumazet C | o 1:42ccdea3bb16 public Nicolas Dumazet B | o 0:cd010b8cd998 public Nicolas Dumazet A No params $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {} > EOF $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup abort: remote-changegroup: missing "url" param [255] Missing size $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg'} > EOF $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup abort: remote-changegroup: missing "size" param [255] Invalid size $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 'foo'} > EOF $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup abort: remote-changegroup: invalid value for param "size" [255] Size mismatch $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 42} > EOF $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) transaction abort! rollback completed abort: bundle at http://localhost:$HGPORT/bundle4.hg is corrupted: size mismatch: expected 42, got 581 [255] Unknown digest $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 581, 'digests': 'foo', 'digest:foo': 'bar'} > EOF $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup abort: missing support for remote-changegroup - digest:foo [255] Missing digest $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {'url': 'http://localhost:$HGPORT/bundle4.hg', 'size': 581, 'digests': 'sha1'} > EOF $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup abort: remote-changegroup: missing "digest:sha1" param [255] Not an HTTP url $ cat > repo/.hg/bundle2maker << EOF > raw-remote-changegroup {'url': 'ssh://localhost:$HGPORT/bundle4.hg', 'size': 581} > EOF $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup abort: remote-changegroup does not support ssh urls [255] Not a bundle $ cat > notbundle.hg << EOF > foo > EOF $ cat > repo/.hg/bundle2maker << EOF > remote-changegroup http://localhost:$HGPORT/notbundle.hg notbundle.hg > EOF $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup abort: http://localhost:$HGPORT/notbundle.hg: not a Mercurial bundle [255] Not a bundle 1.0 $ cat > notbundle10.hg << EOF > HG20 > EOF $ cat > repo/.hg/bundle2maker << EOF > remote-changegroup http://localhost:$HGPORT/notbundle10.hg notbundle10.hg > EOF $ hg pull -R clone ssh://user@dummy/repo pulling from ssh://user@dummy/repo searching for changes remote: remote-changegroup abort: http://localhost:$HGPORT/notbundle10.hg: not a bundle version 1.0 [255] $ hg -R clone log -G @ 2:5fddd98957c8 public Nicolas Dumazet C | o 1:42ccdea3bb16 public Nicolas Dumazet B | o 0:cd010b8cd998 public Nicolas Dumazet A $ rm -rf clone $ killdaemons.py mercurial-3.7.3/tests/test-histedit-arguments.t0000644000175000017500000002756212676531525021267 0ustar mpmmpm00000000000000Test argument handling and various data parsing ================================================== Enable extensions used by this test. $ cat >>$HGRCPATH < [extensions] > histedit= > EOF Repo setup. $ hg init foo $ cd foo $ echo alpha >> alpha $ hg addr adding alpha $ hg ci -m one $ echo alpha >> alpha $ hg ci -m two $ echo alpha >> alpha $ hg ci -m three $ echo alpha >> alpha $ hg ci -m four $ echo alpha >> alpha $ hg ci -m five $ hg log --style compact --graph @ 4[tip] 08d98a8350f3 1970-01-01 00:00 +0000 test | five | o 3 c8e68270e35a 1970-01-01 00:00 +0000 test | four | o 2 eb57da33312f 1970-01-01 00:00 +0000 test | three | o 1 579e40513370 1970-01-01 00:00 +0000 test | two | o 0 6058cbb6cfd7 1970-01-01 00:00 +0000 test one histedit --continue/--abort with no existing state -------------------------------------------------- $ hg histedit --continue abort: no histedit in progress [255] $ hg histedit --abort abort: no histedit in progress [255] Run a dummy edit to make sure we get tip^^ correctly via revsingle. -------------------------------------------------------------------- $ HGEDITOR=cat hg histedit "tip^^" pick eb57da33312f 2 three pick c8e68270e35a 3 four pick 08d98a8350f3 4 five # Edit history between eb57da33312f and 08d98a8350f3 # # Commits are listed from least to most recent # # Commands: # # e, edit = use commit, but stop for amending # m, mess = edit commit message without changing commit content # p, pick = use commit # d, drop = remove commit from history # f, fold = use commit, but combine it with the one above # r, roll = like fold, but discard this commit's description # Run on a revision not ancestors of the current working directory. -------------------------------------------------------------------- $ hg up 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg histedit -r 4 abort: 08d98a8350f3 is not an ancestor of working directory [255] $ hg up --quiet Test that we pick the minimum of a revrange --------------------------------------- $ HGEDITOR=cat hg histedit '2::' --commands - << EOF > pick eb57da33312f 2 three > pick c8e68270e35a 3 four > pick 08d98a8350f3 4 five > EOF $ hg up --quiet $ HGEDITOR=cat hg histedit 'tip:2' --commands - << EOF > pick eb57da33312f 2 three > pick c8e68270e35a 3 four > pick 08d98a8350f3 4 five > EOF $ hg up --quiet Test config specified default ----------------------------- $ HGEDITOR=cat hg histedit --config "histedit.defaultrev=only(.) - ::eb57da33312f" --commands - << EOF > pick c8e68270e35a 3 four > pick 08d98a8350f3 4 five > EOF Run on a revision not descendants of the initial parent -------------------------------------------------------------------- Test the message shown for inconsistent histedit state, which may be created (and forgotten) by Mercurial earlier than 2.7. This emulates Mercurial earlier than 2.7 by renaming ".hg/histedit-state" temporarily. $ hg log -G -T '{rev} {shortest(node)} {desc}\n' -r 2:: @ 4 08d9 five | o 3 c8e6 four | o 2 eb57 three | $ HGEDITOR=cat hg histedit -r 4 --commands - << EOF > edit 08d98a8350f3 4 five > EOF 1 files updated, 0 files merged, 0 files removed, 0 files unresolved reverting alpha Editing (08d98a8350f3), you may commit or record as needed now. (hg histedit --continue to resume) [1] $ mv .hg/histedit-state .hg/histedit-state.back $ hg update --quiet --clean 2 $ echo alpha >> alpha $ mv .hg/histedit-state.back .hg/histedit-state $ hg histedit --continue saved backup bundle to $TESTTMP/foo/.hg/strip-backup/08d98a8350f3-02594089-backup.hg (glob) $ hg log -G -T '{rev} {shortest(node)} {desc}\n' -r 2:: @ 4 f5ed five | | o 3 c8e6 four |/ o 2 eb57 three | $ hg unbundle -q $TESTTMP/foo/.hg/strip-backup/08d98a8350f3-02594089-backup.hg $ hg strip -q -r f5ed --config extensions.strip= $ hg up -q 08d98a8350f3 Test that missing revisions are detected --------------------------------------- $ HGEDITOR=cat hg histedit "tip^^" --commands - << EOF > pick eb57da33312f 2 three > pick 08d98a8350f3 4 five > EOF hg: parse error: missing rules for changeset c8e68270e35a (use "drop c8e68270e35a" to discard, see also: "hg help -e histedit.config") [255] Test that extra revisions are detected --------------------------------------- $ HGEDITOR=cat hg histedit "tip^^" --commands - << EOF > pick 6058cbb6cfd7 0 one > pick c8e68270e35a 3 four > pick 08d98a8350f3 4 five > EOF hg: parse error: pick "6058cbb6cfd7" changeset was not a candidate (only use listed changesets) [255] Test malformed line --------------------------------------- $ HGEDITOR=cat hg histedit "tip^^" --commands - << EOF > pickeb57da33312f2three > pick c8e68270e35a 3 four > pick 08d98a8350f3 4 five > EOF hg: parse error: malformed line "pickeb57da33312f2three" [255] Test unknown changeset --------------------------------------- $ HGEDITOR=cat hg histedit "tip^^" --commands - << EOF > pick 0123456789ab 2 three > pick c8e68270e35a 3 four > pick 08d98a8350f3 4 five > EOF hg: parse error: unknown changeset 0123456789ab listed [255] Test unknown command --------------------------------------- $ HGEDITOR=cat hg histedit "tip^^" --commands - << EOF > coin eb57da33312f 2 three > pick c8e68270e35a 3 four > pick 08d98a8350f3 4 five > EOF hg: parse error: unknown action "coin" [255] Test duplicated changeset --------------------------------------- So one is missing and one appear twice. $ HGEDITOR=cat hg histedit "tip^^" --commands - << EOF > pick eb57da33312f 2 three > pick eb57da33312f 2 three > pick 08d98a8350f3 4 five > EOF hg: parse error: duplicated command for changeset eb57da33312f [255] Test bogus rev --------------------------------------- $ HGEDITOR=cat hg histedit "tip^^" --commands - << EOF > pick eb57da33312f 2 three > pick 0 > pick 08d98a8350f3 4 five > EOF hg: parse error: invalid changeset 0 [255] Test short version of command --------------------------------------- Note: we use varying amounts of white space between command name and changeset short hash. This tests issue3893. $ HGEDITOR=cat hg histedit "tip^^" --commands - << EOF > pick eb57da33312f 2 three > p c8e68270e35a 3 four > f 08d98a8350f3 4 five > EOF 1 files updated, 0 files merged, 0 files removed, 0 files unresolved reverting alpha 1 files updated, 0 files merged, 0 files removed, 0 files unresolved four *** five HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: changed alpha 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/foo/.hg/strip-backup/*-backup.hg (glob) saved backup bundle to $TESTTMP/foo/.hg/strip-backup/*-backup.hg (glob) $ hg update -q 2 $ echo x > x $ hg add x $ hg commit -m'x' x created new head $ hg histedit -r 'heads(all())' abort: The specified revisions must have exactly one common root [255] Test that trimming description using multi-byte characters -------------------------------------------------------------------- $ python < fp = open('logfile', 'w') > fp.write('12345678901234567890123456789012345678901234567890' + > '12345') # there are 5 more columns for 80 columns > > # 2 x 4 = 8 columns, but 3 x 4 = 12 bytes > fp.write(u'\u3042\u3044\u3046\u3048'.encode('utf-8')) > > fp.close() > EOF $ echo xx >> x $ hg --encoding utf-8 commit --logfile logfile $ HGEDITOR=cat hg --encoding utf-8 histedit tip pick 3d3ea1f3a10b 5 1234567890123456789012345678901234567890123456789012345\xe3\x81\x82... (esc) # Edit history between 3d3ea1f3a10b and 3d3ea1f3a10b # # Commits are listed from least to most recent # # Commands: # # e, edit = use commit, but stop for amending # m, mess = edit commit message without changing commit content # p, pick = use commit # d, drop = remove commit from history # f, fold = use commit, but combine it with the one above # r, roll = like fold, but discard this commit's description # Test --continue with --keep $ hg strip -q -r . --config extensions.strip= $ hg histedit '.^' -q --keep --commands - << EOF > edit eb57da33312f 2 three > pick f3cfcca30c44 4 x > EOF Editing (eb57da33312f), you may commit or record as needed now. (hg histedit --continue to resume) [1] $ echo edit >> alpha $ hg histedit -q --continue $ hg log -G -T '{rev}:{node|short} {desc}' @ 6:8fda0c726bf2 x | o 5:63379946892c three | | o 4:f3cfcca30c44 x | | | | o 3:2a30f3cfee78 four | |/ *** | | five | o 2:eb57da33312f three |/ o 1:579e40513370 two | o 0:6058cbb6cfd7 one Test that abort fails gracefully on exception ---------------------------------------------- $ hg histedit . -q --commands - << EOF > edit 8fda0c726bf2 6 x > EOF Editing (8fda0c726bf2), you may commit or record as needed now. (hg histedit --continue to resume) [1] Corrupt histedit state file $ sed 's/8fda0c726bf2/123456789012/' .hg/histedit-state > ../corrupt-histedit $ mv ../corrupt-histedit .hg/histedit-state $ hg histedit --abort warning: encountered an exception during histedit --abort; the repository may not have been completely cleaned up abort: .*(No such file or directory:|The system cannot find the file specified).* (re) [255] Histedit state has been exited $ hg summary -q parent: 5:63379946892c commit: 1 added, 1 unknown (new branch head) update: 4 new changesets (update) $ cd .. Set up default base revision tests $ hg init defaultbase $ cd defaultbase $ touch foo $ hg -q commit -A -m root $ echo 1 > foo $ hg commit -m 'public 1' $ hg phase --force --public -r . $ echo 2 > foo $ hg commit -m 'draft after public' $ hg -q up -r 1 $ echo 3 > foo $ hg commit -m 'head 1 public' created new head $ hg phase --force --public -r . $ echo 4 > foo $ hg commit -m 'head 1 draft 1' $ echo 5 > foo $ hg commit -m 'head 1 draft 2' $ hg -q up -r 2 $ echo 6 > foo $ hg commit -m 'head 2 commit 1' $ echo 7 > foo $ hg commit -m 'head 2 commit 2' $ hg -q up -r 2 $ echo 8 > foo $ hg commit -m 'head 3' created new head $ hg -q up -r 2 $ echo 9 > foo $ hg commit -m 'head 4' created new head $ hg merge --tool :local -r 8 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m 'merge head 3 into head 4' $ echo 11 > foo $ hg commit -m 'commit 1 after merge' $ echo 12 > foo $ hg commit -m 'commit 2 after merge' $ hg log -G -T '{rev}:{node|short} {phase} {desc}\n' @ 12:8cde254db839 draft commit 2 after merge | o 11:6f2f0241f119 draft commit 1 after merge | o 10:90506cc76b00 draft merge head 3 into head 4 |\ | o 9:f8607a373a97 draft head 4 | | o | 8:0da92be05148 draft head 3 |/ | o 7:4c35cdf97d5e draft head 2 commit 2 | | | o 6:931820154288 draft head 2 commit 1 |/ | o 5:8cdc02b9bc63 draft head 1 draft 2 | | | o 4:463b8c0d2973 draft head 1 draft 1 | | | o 3:23a0c4eefcbf public head 1 public | | o | 2:4117331c3abb draft draft after public |/ o 1:4426d359ea59 public public 1 | o 0:54136a8ddf32 public root Default base revision should stop at public changesets $ hg -q up 8cdc02b9bc63 $ hg histedit --commands - < pick 463b8c0d2973 > pick 8cdc02b9bc63 > EOF Default base revision should stop at branchpoint $ hg -q up 4c35cdf97d5e $ hg histedit --commands - < pick 931820154288 > pick 4c35cdf97d5e > EOF Default base revision should stop at merge commit $ hg -q up 8cde254db839 $ hg histedit --commands - < pick 6f2f0241f119 > pick 8cde254db839 > EOF mercurial-3.7.3/tests/md5sum.py0000755000175000017500000000174712676531525016071 0ustar mpmmpm00000000000000#!/usr/bin/env python # # Based on python's Tools/scripts/md5sum.py # # This software may be used and distributed according to the terms # of the PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2, which is # GPL-compatible. import sys, os try: from hashlib import md5 except ImportError: from md5 import md5 try: import msvcrt msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY) except ImportError: pass for filename in sys.argv[1:]: try: fp = open(filename, 'rb') except IOError as msg: sys.stderr.write('%s: Can\'t open: %s\n' % (filename, msg)) sys.exit(1) m = md5() try: while True: data = fp.read(8192) if not data: break m.update(data) except IOError as msg: sys.stderr.write('%s: I/O error: %s\n' % (filename, msg)) sys.exit(1) sys.stdout.write('%s %s\n' % (m.hexdigest(), filename)) sys.exit(0) mercurial-3.7.3/tests/test-filelog.py0000755000175000017500000000301612676531525017244 0ustar mpmmpm00000000000000#!/usr/bin/env python """ Tests the behavior of filelog w.r.t. data starting with '\1\n' """ from mercurial import ui, hg from mercurial.node import nullid, hex myui = ui.ui() repo = hg.repository(myui, path='.', create=True) fl = repo.file('foobar') def addrev(text, renamed=False): if renamed: # data doesn't matter. Just make sure filelog.renamed() returns True meta = {'copyrev': hex(nullid), 'copy': 'bar'} else: meta = {} lock = t = None try: lock = repo.lock() t = repo.transaction('commit') node = fl.add(text, meta, t, 0, nullid, nullid) return node finally: if t: t.close() if lock: lock.release() def error(text): print 'ERROR: ' + text textwith = '\1\nfoo' without = 'foo' node = addrev(textwith) if not textwith == fl.read(node): error('filelog.read for data starting with \\1\\n') if fl.cmp(node, textwith) or not fl.cmp(node, without): error('filelog.cmp for data starting with \\1\\n') if fl.size(0) != len(textwith): error('FIXME: This is a known failure of filelog.size for data starting ' 'with \\1\\n') node = addrev(textwith, renamed=True) if not textwith == fl.read(node): error('filelog.read for a renaming + data starting with \\1\\n') if fl.cmp(node, textwith) or not fl.cmp(node, without): error('filelog.cmp for a renaming + data starting with \\1\\n') if fl.size(1) != len(textwith): error('filelog.size for a renaming + data starting with \\1\\n') print 'OK.' mercurial-3.7.3/tests/test-mq-qqueue.t0000644000175000017500000000532312676531525017356 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg init foo $ cd foo $ echo a > a $ hg ci -qAm a Default queue: $ hg qqueue patches (active) $ echo b > a $ hg qnew -fgDU somestuff Applied patches in default queue: $ hg qap somestuff Try to change patch (create succeeds, switch fails): $ hg qqueue foo --create abort: new queue created, but cannot make active as patches are applied [255] $ hg qqueue foo patches (active) Empty default queue: $ hg qpop popping somestuff patch queue now empty Switch queue: $ hg qqueue foo $ hg qqueue foo (active) patches List queues, quiet: $ hg qqueue --quiet foo patches Fail creating queue with already existing name: $ hg qqueue --create foo abort: queue "foo" already exists [255] $ hg qqueue foo (active) patches Create new queue for rename: $ hg qqueue --create bar $ hg qqueue bar (active) foo patches Rename queue, same name: $ hg qqueue --rename bar abort: can't rename "bar" to its current name [255] Rename queue to existing: $ hg qqueue --rename foo abort: queue "foo" already exists [255] Rename queue: $ hg qqueue --rename buz $ hg qqueue buz (active) foo patches Switch back to previous queue: $ hg qqueue foo $ hg qqueue --delete buz $ hg qqueue foo (active) patches Create queue for purge: $ hg qqueue --create purge-me $ hg qqueue foo patches purge-me (active) Create patch for purge: $ hg qnew patch-purge-me $ ls -1d .hg/patches-purge-me 2>/dev/null || true .hg/patches-purge-me $ hg qpop -a popping patch-purge-me patch queue now empty Purge queue: $ hg qqueue foo $ hg qqueue --purge purge-me $ hg qqueue foo (active) patches $ ls -1d .hg/patches-purge-me 2>/dev/null || true Unapplied patches: $ hg qun $ echo c > a $ hg qnew -fgDU otherstuff Fail switching back: $ hg qqueue patches abort: new queue created, but cannot make active as patches are applied [255] Fail deleting current: $ hg qqueue foo --delete abort: cannot delete currently active queue [255] Switch back and delete foo: $ hg qpop -a popping otherstuff patch queue now empty $ hg qqueue patches $ hg qqueue foo --delete $ hg qqueue patches (active) Tricky cases: $ hg qqueue store --create $ hg qnew journal $ hg qqueue patches store (active) $ hg qpop -a popping journal patch queue now empty $ hg qqueue patches $ hg qun somestuff Invalid names: $ hg qqueue test/../../bar --create abort: invalid queue name, may not contain the characters ":\/." [255] $ hg qqueue . --create abort: invalid queue name, may not contain the characters ":\/." [255] $ cd .. mercurial-3.7.3/tests/test-diff-copy-depth.t0000644000175000017500000000143312676531525020416 0ustar mpmmpm00000000000000 $ for i in aaa zzz; do > hg init t > cd t > > echo > echo "-- With $i" > > touch file > hg add file > hg ci -m "Add" > > hg cp file $i > hg ci -m "a -> $i" > > hg cp $i other-file > echo "different" >> $i > hg ci -m "$i -> other-file" > > hg cp other-file somename > > echo "Status": > hg st -C > echo > echo "Diff:" > hg diff -g > > cd .. > rm -rf t > done -- With aaa Status: A somename other-file Diff: diff --git a/other-file b/somename copy from other-file copy to somename -- With zzz Status: A somename other-file Diff: diff --git a/other-file b/somename copy from other-file copy to somename mercurial-3.7.3/tests/test-no-symlinks.t0000644000175000017500000000226512676531525017723 0ustar mpmmpm00000000000000#require no-symlink # The following script was used to create the bundle: # # hg init symlinks # cd symlinks # echo a > a # mkdir d # echo b > d/b # ln -s a a.lnk # ln -s d/b d/b.lnk # hg ci -Am t # hg bundle --base null ../test-no-symlinks.hg Extract a symlink on a platform not supporting them $ hg init t $ cd t $ hg pull -q "$TESTDIR/bundles/test-no-symlinks.hg" $ hg update 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat a.lnk && echo a $ cat d/b.lnk && echo d/b Copy a symlink and move another $ hg copy a.lnk d/a2.lnk $ hg mv d/b.lnk b2.lnk $ hg ci -Am copy $ cat d/a2.lnk && echo a $ cat b2.lnk && echo d/b Bundle and extract again $ hg bundle --base null ../symlinks.hg 2 changesets found $ cd .. $ hg init t2 $ cd t2 $ hg pull ../symlinks.hg pulling from ../symlinks.hg requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 6 changes to 6 files (run 'hg update' to get a working copy) $ hg update 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat a.lnk && echo a $ cat d/a2.lnk && echo a $ cat b2.lnk && echo d/b mercurial-3.7.3/tests/test-resolve.t0000644000175000017500000002124612676531525017117 0ustar mpmmpm00000000000000test that a commit clears the merge state. $ hg init repo $ cd repo $ echo foo > file1 $ echo foo > file2 $ hg commit -Am 'add files' adding file1 adding file2 $ echo bar >> file1 $ echo bar >> file2 $ hg commit -Am 'append bar to files' create a second head with conflicting edits $ hg up -C 0 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo baz >> file1 $ echo baz >> file2 $ hg commit -Am 'append baz to files' created new head create a third head with no conflicting edits $ hg up -qC 0 $ echo foo > file3 $ hg commit -Am 'add non-conflicting file' adding file3 created new head failing merge $ hg up -qC 2 $ hg merge --tool=internal:fail 1 0 files updated, 0 files merged, 0 files removed, 2 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] resolve -l should contain unresolved entries $ hg resolve -l U file1 U file2 $ hg resolve -l --no-status file1 file2 resolving an unknown path should emit a warning, but not for -l $ hg resolve -m does-not-exist arguments do not match paths that need resolving $ hg resolve -l does-not-exist don't allow marking or unmarking driver-resolved files $ cat > $TESTTMP/markdriver.py << EOF > '''mark and unmark files as driver-resolved''' > from mercurial import cmdutil, merge, scmutil > cmdtable = {} > command = cmdutil.command(cmdtable) > @command('markdriver', > [('u', 'unmark', None, '')], > 'FILE...') > def markdriver(ui, repo, *pats, **opts): > wlock = repo.wlock() > try: > ms = merge.mergestate.read(repo) > m = scmutil.match(repo[None], pats, opts) > for f in ms: > if not m(f): > continue > if not opts['unmark']: > ms.mark(f, 'd') > else: > ms.mark(f, 'u') > ms.commit() > finally: > wlock.release() > EOF $ hg --config extensions.markdriver=$TESTTMP/markdriver.py markdriver file1 $ hg resolve --list D file1 U file2 $ hg resolve --mark file1 not marking file1 as it is driver-resolved this should not print out file1 $ hg resolve --mark --all (no more unresolved files -- run "hg resolve --all" to conclude) $ hg resolve --mark 'glob:file*' (no more unresolved files -- run "hg resolve --all" to conclude) $ hg resolve --list D file1 R file2 $ hg resolve --unmark file1 not unmarking file1 as it is driver-resolved (no more unresolved files -- run "hg resolve --all" to conclude) $ hg resolve --unmark --all $ hg resolve --list D file1 U file2 $ hg --config extensions.markdriver=$TESTTMP/markdriver.py markdriver --unmark file1 $ hg resolve --list U file1 U file2 resolve the failure $ echo resolved > file1 $ hg resolve -m file1 resolve -l should show resolved file as resolved $ hg resolve -l R file1 U file2 $ hg resolve -l -Tjson [ { "path": "file1", "status": "R" }, { "path": "file2", "status": "U" } ] resolve -m without paths should mark all resolved $ hg resolve -m (no more unresolved files) $ hg commit -m 'resolved' resolve -l should be empty after commit $ hg resolve -l $ hg resolve -l -Tjson [ ] resolve --all should abort when no merge in progress $ hg resolve --all abort: resolve command not applicable when not merging [255] resolve -m should abort when no merge in progress $ hg resolve -m abort: resolve command not applicable when not merging [255] can not update or merge when there are unresolved conflicts $ hg up -qC 0 $ echo quux >> file1 $ hg up 1 merging file1 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark') 1 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges [1] $ hg up 0 abort: outstanding merge conflicts [255] $ hg merge 2 abort: outstanding merge conflicts [255] $ hg merge --force 2 abort: outstanding merge conflicts [255] set up conflict-free merge $ hg up -qC 3 $ hg merge 1 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) resolve --all should do nothing in merge without conflicts $ hg resolve --all (no more unresolved files) resolve -m should do nothing in merge without conflicts $ hg resolve -m (no more unresolved files) get back to conflicting state $ hg up -qC 2 $ hg merge --tool=internal:fail 1 0 files updated, 0 files merged, 0 files removed, 2 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] resolve without arguments should suggest --all $ hg resolve abort: no files or directories specified (use --all to re-merge all unresolved files) [255] resolve --all should re-merge all unresolved files $ hg resolve --all merging file1 merging file2 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark') warning: conflicts while merging file2! (edit, then use 'hg resolve --mark') [1] $ cat file1.orig foo baz $ cat file2.orig foo baz .orig files should exists where specified $ hg resolve --all --verbose --config 'ui.origbackuppath=.hg/origbackups' merging file1 creating directory: $TESTTMP/repo/.hg/origbackups (glob) merging file2 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark') warning: conflicts while merging file2! (edit, then use 'hg resolve --mark') [1] $ ls .hg/origbackups file1.orig file2.orig $ grep '<<<' file1 > /dev/null $ grep '<<<' file2 > /dev/null resolve should re-merge file $ echo resolved > file1 $ hg resolve -q file1 warning: conflicts while merging file1! (edit, then use 'hg resolve --mark') [1] $ grep '<<<' file1 > /dev/null test .orig behavior with resolve $ hg resolve -q file1 --tool "sh -c 'f --dump \"$TESTTMP/repo/file1.orig\"'" $TESTTMP/repo/file1.orig: (glob) >>> foo baz <<< resolve should do nothing if 'file' was marked resolved $ echo resolved > file1 $ hg resolve -m file1 $ hg resolve -q file1 $ cat file1 resolved insert unsupported advisory merge record $ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -x $ hg debugmergestate * version 2 records local: 57653b9f834a4493f7240b0681efcb9ae7cab745 other: dc77451844e37f03f5c559e3b8529b2b48d381d1 unrecognized entry: x advisory record file: file1 (record type "F", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd) other path: file1 (node 6f4310b00b9a147241b071a60c28a650827fb03d) file: file2 (record type "F", state "u", hash cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523) local path: file2 (flags "") ancestor path: file2 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd) other path: file2 (node 6f4310b00b9a147241b071a60c28a650827fb03d) $ hg resolve -l R file1 U file2 insert unsupported mandatory merge record $ hg --config extensions.fakemergerecord=$TESTDIR/fakemergerecord.py fakemergerecord -X $ hg debugmergestate * version 2 records local: 57653b9f834a4493f7240b0681efcb9ae7cab745 other: dc77451844e37f03f5c559e3b8529b2b48d381d1 file: file1 (record type "F", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd) other path: file1 (node 6f4310b00b9a147241b071a60c28a650827fb03d) file: file2 (record type "F", state "u", hash cb99b709a1978bd205ab9dfd4c5aaa1fc91c7523) local path: file2 (flags "") ancestor path: file2 (node 2ed2a3912a0b24502043eae84ee4b279c18b90dd) other path: file2 (node 6f4310b00b9a147241b071a60c28a650827fb03d) unrecognized entry: X mandatory record $ hg resolve -l abort: unsupported merge state records: X (see https://mercurial-scm.org/wiki/MergeStateRecords for more information) [255] $ hg resolve -ma abort: unsupported merge state records: X (see https://mercurial-scm.org/wiki/MergeStateRecords for more information) [255] $ hg summary parent: 2:57653b9f834a append baz to files parent: 1:dc77451844e3 append bar to files branch: default warning: merge state has unsupported record types: X commit: 2 modified, 2 unknown (merge) update: 2 new changesets (update) phases: 5 draft update --clean shouldn't abort on unsupported records $ hg up -qC 1 $ hg debugmergestate no merge state found test crashed merge with empty mergestate $ mkdir .hg/merge $ touch .hg/merge/state resolve -l should be empty $ hg resolve -l $ cd .. mercurial-3.7.3/tests/test-changelog-exec.t0000644000175000017500000000216412676531525020307 0ustar mpmmpm00000000000000#require execbit b51a8138292a introduced a regression where we would mention in the changelog executable files added by the second parent of a merge. Test that that doesn't happen anymore $ hg init repo $ cd repo $ echo foo > foo $ hg ci -qAm 'add foo' $ echo bar > bar $ chmod +x bar $ hg ci -qAm 'add bar' manifest of p2: $ hg manifest bar foo $ hg up -qC 0 $ echo >> foo $ hg ci -m 'change foo' created new head manifest of p1: $ hg manifest foo $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ chmod +x foo $ hg ci -m 'merge' this should not mention bar but should mention foo: $ hg tip -v changeset: 3:c53d17ff3380 tag: tip parent: 2:ed1b79f46b9a parent: 1:d394a8db219b user: test date: Thu Jan 01 00:00:00 1970 +0000 files: foo description: merge $ hg debugindex bar rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 5 ..... 1 b004912a8510 000000000000 000000000000 (re) $ cd .. mercurial-3.7.3/tests/filterpyflakes.py0000755000175000017500000000341612676531525017676 0ustar mpmmpm00000000000000#!/usr/bin/env python # Filter output by pyflakes to control which warnings we check from __future__ import absolute_import import re import sys def makekey(typeandline): """ for sorting lines by: msgtype, path/to/file, lineno, message typeandline is a sequence of a message type and the entire message line the message line format is path/to/file:line: message >>> makekey((3, 'example.py:36: any message')) (3, 'example.py', 36, ' any message') >>> makekey((7, 'path/to/file.py:68: dummy message')) (7, 'path/to/file.py', 68, ' dummy message') >>> makekey((2, 'fn:88: m')) > makekey((2, 'fn:9: m')) True """ msgtype, line = typeandline fname, line, message = line.split(":", 2) # line as int for ordering 9 before 88 return msgtype, fname, int(line), message lines = [] for line in sys.stdin: # We whitelist tests (see more messages in pyflakes.messages) pats = [ (r"imported but unused", None), (r"local variable '.*' is assigned to but never used", None), (r"unable to detect undefined names", None), (r"undefined name '.*'", r"undefined name '(WindowsError|memoryview)'") ] for msgtype, (pat, excl) in enumerate(pats): if re.search(pat, line) and (not excl or not re.search(excl, line)): break # pattern matches else: continue # no pattern matched, next line fn = line.split(':', 1)[0] f = open(fn) data = f.read() f.close() if 'no-' 'check-code' in data: continue lines.append((msgtype, line)) for msgtype, line in sorted(lines, key=makekey): sys.stdout.write(line) print # self test of "undefined name" detection for other than 'memoryview' if False: print undefinedname mercurial-3.7.3/tests/test-convert-bzr-114.t0000644000175000017500000000162412676531525020214 0ustar mpmmpm00000000000000#require bzr bzr114 $ . "$TESTDIR/bzr-definitions" The file/directory replacement can only be reproduced on bzr >= 1.4. Merge it back in test-convert-bzr-directories once this version becomes mainstream. replace file with dir $ mkdir test-replace-file-with-dir $ cd test-replace-file-with-dir $ bzr init -q source $ cd source $ echo d > d $ bzr add -q d $ bzr commit -q -m 'add d file' $ rm d $ mkdir d $ bzr add -q d $ bzr commit -q -m 'replace with d dir' $ echo a > d/a $ bzr add -q d/a $ bzr commit -q -m 'add d/a' $ cd .. $ hg convert source source-hg initializing destination source-hg repository scanning source... sorting... converting... 2 add d file 1 replace with d dir 0 add d/a $ manifest source-hg tip % manifest of tip 644 d/a $ cd source-hg $ hg update 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../.. mercurial-3.7.3/tests/test-issue522.t0000644000175000017500000000315212676531525017015 0ustar mpmmpm00000000000000https://bz.mercurial-scm.org/522 In the merge below, the file "foo" has the same contents in both parents, but if we look at the file-level history, we'll notice that the version in p1 is an ancestor of the version in p2. This test makes sure that we'll use the version from p2 in the manifest of the merge revision. $ hg init $ echo foo > foo $ hg ci -qAm 'add foo' $ echo bar >> foo $ hg ci -m 'change foo' $ hg backout -r tip -m 'backout changed foo' reverting foo changeset 2:4d9e78aaceee backs out changeset 1:b515023e500e $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ touch bar $ hg ci -qAm 'add bar' $ hg merge --debug searching for copies back to rev 1 unmatched files in local: bar resolving manifests branchmerge: True, force: False, partial: False ancestor: bbd179dfa0a7, local: 71766447bdbb+, remote: 4d9e78aaceee foo: remote is newer -> g getting foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg debugstate | grep foo m 0 -2 unset foo $ hg st -A foo M foo $ hg ci -m 'merge' $ hg manifest --debug | grep foo c6fc755d7e68f49f880599da29f15add41f42f5a 644 foo $ hg debugindex foo rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 5 ..... 0 2ed2a3912a0b 000000000000 000000000000 (re) 1 5 9 ..... 1 6f4310b00b9a 2ed2a3912a0b 000000000000 (re) 2 14 5 ..... 2 c6fc755d7e68 6f4310b00b9a 000000000000 (re) mercurial-3.7.3/tests/test-convert-bzr-treeroot.t0000644000175000017500000000142312676531525021547 0ustar mpmmpm00000000000000#require bzr $ . "$TESTDIR/bzr-definitions" $ cat > treeset.py < import sys > from bzrlib import workingtree > wt = workingtree.WorkingTree.open('.') > > message, rootid = sys.argv[1:] > wt.set_root_id('tree_root-%s' % rootid) > wt.commit(message) > EOF change the id of the tree root $ mkdir test-change-treeroot-id $ cd test-change-treeroot-id $ bzr init -q source $ cd source $ echo content > file $ bzr add -q file $ bzr commit -q -m 'Initial add' $ python ../../treeset.py 'Changed root' new $ cd .. $ hg convert source source-hg initializing destination source-hg repository scanning source... sorting... converting... 1 Initial add 0 Changed root $ manifest source-hg tip % manifest of tip 644 file $ cd .. mercurial-3.7.3/tests/test-commit-unresolved.t0000644000175000017500000000314612676531525021113 0ustar mpmmpm00000000000000 $ addcommit () { > echo $1 > $1 > hg add $1 > hg commit -d "${2} 0" -m $1 > } $ commit () { > hg commit -d "${2} 0" -m $1 > } $ hg init a $ cd a $ addcommit "A" 0 $ addcommit "B" 1 $ echo "C" >> A $ commit "C" 2 $ hg update -C 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo "D" >> A $ commit "D" 3 created new head Merging a conflict araises $ hg merge merging A warning: conflicts while merging A! (edit, then use 'hg resolve --mark') 1 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] Correct the conflict without marking the file as resolved $ echo "ABCD" > A $ hg commit -m "Merged" abort: unresolved merge conflicts (see "hg help resolve") [255] Mark the conflict as resolved and commit $ hg resolve -m A (no more unresolved files) $ hg commit -m "Merged" Test that if a file is removed but not marked resolved, the commit still fails (issue4972) $ hg up ".^" 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 2 merging A warning: conflicts while merging A! (edit, then use 'hg resolve --mark') 1 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg rm --force A $ hg commit -m merged abort: unresolved merge conflicts (see "hg help resolve") [255] $ hg resolve -ma (no more unresolved files) $ hg commit -m merged created new head $ cd .. mercurial-3.7.3/tests/test-repair-strip.t0000644000175000017500000000735112676531525020062 0ustar mpmmpm00000000000000#require unix-permissions no-root $ echo "[extensions]" >> $HGRCPATH $ echo "mq=">> $HGRCPATH $ teststrip() { > hg -q up -C $1 > echo % before update $1, strip $2 > hg parents > chmod -$3 $4 > hg strip $2 2>&1 | sed 's/\(bundle\).*/\1/' | sed 's/Permission denied.*\.hg\/store\/\(.*\)/Permission denied \.hg\/store\/\1/' > echo % after update $1, strip $2 > chmod +$3 $4 > hg verify > echo % journal contents > if [ -f .hg/store/journal ]; then > sed -e 's/\.i[^\n]*/\.i/' .hg/store/journal > else > echo "(no journal)" > fi > ls .hg/store/journal >/dev/null 2>&1 && hg recover > ls .hg/strip-backup/* >/dev/null 2>&1 && hg unbundle -q .hg/strip-backup/* > rm -rf .hg/strip-backup > } $ hg init test $ cd test $ echo a > a $ hg -q ci -m "a" -A $ echo b > b $ hg -q ci -m "b" -A $ echo b2 >> b $ hg -q ci -m "b2" -A $ echo c > c $ hg -q ci -m "c" -A $ teststrip 0 2 w .hg/store/data/b.i % before update 0, strip 2 changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a saved backup bundle transaction abort! failed to truncate data/b.i rollback failed - please run hg recover strip failed, full bundle abort: Permission denied .hg/store/data/b.i % after update 0, strip 2 abandoned transaction found - run hg recover checking changesets checking manifests crosschecking files in changesets and manifests checking files b@?: rev 1 points to nonexistent changeset 2 (expected 1) b@?: 736c29771fba not in manifests warning: orphan revlog 'data/c.i' 2 files, 2 changesets, 3 total revisions 2 warnings encountered! 2 integrity errors encountered! % journal contents 00changelog.i 00manifest.i data/b.i data/c.i rolling back interrupted transaction checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 2 changesets, 2 total revisions $ teststrip 0 2 r .hg/store/data/b.i % before update 0, strip 2 changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a abort: Permission denied .hg/store/data/b.i % after update 0, strip 2 checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 4 changesets, 4 total revisions % journal contents (no journal) $ teststrip 0 2 w .hg/store/00manifest.i % before update 0, strip 2 changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a saved backup bundle transaction abort! failed to truncate 00manifest.i rollback failed - please run hg recover strip failed, full bundle abort: Permission denied .hg/store/00manifest.i % after update 0, strip 2 abandoned transaction found - run hg recover checking changesets checking manifests manifest@?: rev 2 points to nonexistent changeset 2 manifest@?: 3362547cdf64 not in changesets manifest@?: rev 3 points to nonexistent changeset 3 manifest@?: 265a85892ecb not in changesets crosschecking files in changesets and manifests c@3: in manifest but not in changeset checking files b@?: rev 1 points to nonexistent changeset 2 (expected 1) c@?: rev 0 points to nonexistent changeset 3 3 files, 2 changesets, 4 total revisions 1 warnings encountered! 7 integrity errors encountered! (first damaged changeset appears to be 3) % journal contents 00changelog.i 00manifest.i data/b.i data/c.i rolling back interrupted transaction checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 2 changesets, 2 total revisions $ cd .. mercurial-3.7.3/tests/test-websub.t0000644000175000017500000000157512676531525016732 0ustar mpmmpm00000000000000#require serve $ hg init test $ cd test $ cat > .hg/hgrc < [extensions] > # this is only necessary to check that the mapping from > # interhg to websub works > interhg = > > [websub] > issues = s|Issue(\d+)|Issue\1| > > [interhg] > # check that we maintain some interhg backwards compatibility... > # yes, 'x' is a weird delimiter... > markbugs = sxbugxbugx > EOF $ touch foo $ hg add foo $ hg commit -d '1 0' -m 'Issue123: fixed the bug!' $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS log $ get-with-headers.py localhost:$HGPORT "rev/tip" | grep bts
                        Issue123: fixed the bug!
                        errors $ cat errors.log $ cd .. mercurial-3.7.3/tests/test-empty.t0000644000175000017500000000146412676531525016576 0ustar mpmmpm00000000000000Create an empty repo: $ hg init a $ cd a Try some commands: $ hg log $ hg grep wah [1] $ hg manifest $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 0 files, 0 changesets, 0 total revisions Check the basic files created: $ ls .hg 00changelog.i requires store Should be empty: $ ls .hg/store Poke at a clone: $ cd .. $ hg clone a b updating to branch default 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd b $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 0 files, 0 changesets, 0 total revisions $ ls .hg 00changelog.i hgrc requires store Should be empty: $ ls .hg/store $ cd .. mercurial-3.7.3/tests/test-locate.t0000644000175000017500000000370012676531525016702 0ustar mpmmpm00000000000000 $ hg init repo $ cd repo $ echo 0 > a $ echo 0 > b $ echo 0 > t.h $ mkdir t $ echo 0 > t/x $ echo 0 > t/b $ echo 0 > t/e.h $ mkdir dir.h $ echo 0 > dir.h/foo $ hg ci -A -m m adding a adding b adding dir.h/foo adding t.h adding t/b adding t/e.h adding t/x $ touch nottracked $ hg locate a a $ hg locate NONEXISTENT [1] $ hg locate a b dir.h/foo t.h t/b t/e.h t/x $ hg rm a $ hg ci -m m $ hg locate a [1] $ hg locate NONEXISTENT [1] $ hg locate relpath:NONEXISTENT [1] $ hg locate b dir.h/foo t.h t/b t/e.h t/x $ hg locate -r 0 a a $ hg locate -r 0 NONEXISTENT [1] $ hg locate -r 0 relpath:NONEXISTENT [1] $ hg locate -r 0 a b dir.h/foo t.h t/b t/e.h t/x -I/-X with relative path should work: $ cd t $ hg locate b dir.h/foo t.h t/b t/e.h t/x $ hg locate -I ../t t/b t/e.h t/x Issue294: hg remove --after dir fails when dir.* also exists $ cd .. $ rm -r t $ hg rm t/b $ hg locate 't/**' t/b (glob) t/e.h (glob) t/x (glob) $ hg files b dir.h/foo (glob) t.h t/e.h (glob) t/x (glob) $ hg files b b $ mkdir otherdir $ cd otherdir $ hg files path: ../b (glob) ../dir.h/foo (glob) ../t.h (glob) ../t/e.h (glob) ../t/x (glob) $ hg files path:. ../b (glob) ../dir.h/foo (glob) ../t.h (glob) ../t/e.h (glob) ../t/x (glob) $ hg locate b ../b (glob) ../t/b (glob) $ hg locate '*.h' ../t.h (glob) ../t/e.h (glob) $ hg locate path:t/x ../t/x (glob) $ hg locate 're:.*\.h$' ../t.h (glob) ../t/e.h (glob) $ hg locate -r 0 b ../b (glob) ../t/b (glob) $ hg locate -r 0 '*.h' ../t.h (glob) ../t/e.h (glob) $ hg locate -r 0 path:t/x ../t/x (glob) $ hg locate -r 0 're:.*\.h$' ../t.h (glob) ../t/e.h (glob) $ hg files ../b (glob) ../dir.h/foo (glob) ../t.h (glob) ../t/e.h (glob) ../t/x (glob) $ hg files . [1] $ cd ../.. mercurial-3.7.3/tests/test-hybridencode.py.out0000644000175000017500000007607412676531525021103 0ustar mpmmpm00000000000000A = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}' B = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}' uppercase char X is encoded as _x A = 'data/ABCDEFGHIJKLMNOPQRSTUVWXYZ' B = 'data/_a_b_c_d_e_f_g_h_i_j_k_l_m_n_o_p_q_r_s_t_u_v_w_x_y_z' underbar is doubled A = 'data/_' B = 'data/__' tilde is character-encoded A = 'data/~' B = 'data/~7e' characters in ASCII code range 1..31 A = 'data/\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f' B = 'data/~01~02~03~04~05~06~07~08~09~0a~0b~0c~0d~0e~0f~10~11~12~13~14~15~16~17~18~19~1a~1b~1c~1d~1e~1f' characters in ASCII code range 126..255 A = 'data/~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f' B = 'data/~7e~7f~80~81~82~83~84~85~86~87~88~89~8a~8b~8c~8d~8e~8f~90~91~92~93~94~95~96~97~98~99~9a~9b~9c~9d~9e~9f' A = 'data/\xa0\xa1\xa2\xa3\xa4\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba\xbb\xbc\xbd\xbe\xbf' B = 'data/~a0~a1~a2~a3~a4~a5~a6~a7~a8~a9~aa~ab~ac~ad~ae~af~b0~b1~b2~b3~b4~b5~b6~b7~b8~b9~ba~bb~bc~bd~be~bf' A = 'data/\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf' B = 'data/~c0~c1~c2~c3~c4~c5~c6~c7~c8~c9~ca~cb~cc~cd~ce~cf~d0~d1~d2~d3~d4~d5~d6~d7~d8~d9~da~db~dc~dd~de~df' A = 'data/\xe0\xe1\xe2\xe3\xe4\xe5\xe6\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff' B = 'data/~e0~e1~e2~e3~e4~e5~e6~e7~e8~e9~ea~eb~ec~ed~ee~ef~f0~f1~f2~f3~f4~f5~f6~f7~f8~f9~fa~fb~fc~fd~fe~ff' Windows reserved characters A = 'data/less <, greater >, colon :, double-quote ", backslash \\, pipe |, question-mark ?, asterisk *' B = 'data/less ~3c, greater ~3e, colon ~3a, double-quote ~22, backslash ~5c, pipe ~7c, question-mark ~3f, asterisk ~2a' encoding directories ending in .hg, .i or .d with '.hg' suffix A = 'data/x.h.i/x.hg/x.i/x.d/foo' B = 'data/x.h.i.hg/x.hg.hg/x.i.hg/x.d.hg/foo' A = 'data/a.hg/a.i/a.d/foo' B = 'data/a.hg.hg/a.i.hg/a.d.hg/foo' A = 'data/au.hg/au.i/au.d/foo' B = 'data/au.hg.hg/au.i.hg/au.d.hg/foo' A = 'data/aux.hg/aux.i/aux.d/foo' B = 'data/au~78.hg.hg/au~78.i.hg/au~78.d.hg/foo' A = 'data/auxy.hg/auxy.i/auxy.d/foo' B = 'data/auxy.hg.hg/auxy.i.hg/auxy.d.hg/foo' but these are not encoded on *filenames* A = 'data/foo/x.hg' B = 'data/foo/x.hg' A = 'data/foo/x.i' B = 'data/foo/x.i' A = 'data/foo/x.d' B = 'data/foo/x.d' A = 'data/foo/a.hg' B = 'data/foo/a.hg' A = 'data/foo/a.i' B = 'data/foo/a.i' A = 'data/foo/a.d' B = 'data/foo/a.d' A = 'data/foo/au.hg' B = 'data/foo/au.hg' A = 'data/foo/au.i' B = 'data/foo/au.i' A = 'data/foo/au.d' B = 'data/foo/au.d' A = 'data/foo/aux.hg' B = 'data/foo/au~78.hg' A = 'data/foo/aux.i' B = 'data/foo/au~78.i' A = 'data/foo/aux.d' B = 'data/foo/au~78.d' A = 'data/foo/auxy.hg' B = 'data/foo/auxy.hg' A = 'data/foo/auxy.i' B = 'data/foo/auxy.i' A = 'data/foo/auxy.d' B = 'data/foo/auxy.d' plain .hg, .i and .d directories have the leading dot encoded A = 'data/.hg/.i/.d/foo' B = 'data/~2ehg.hg/~2ei.hg/~2ed.hg/foo' A = 'data/aux.bla/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c.i' B = 'data/au~78.bla/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i' A = 'data/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT.i' B = 'dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i' A = 'data/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider.i' B = 'dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i' A = 'data/AUX.THE-QUICK-BROWN-FOX-JU:MPS-OVER-THE-LAZY-DOG-THE-QUICK-BROWN-FOX-JUMPS-OVER-THE-LAZY-DOG.TXT.i' B = 'dh/au~78.the-quick-brown-fox-ju~3amps-over-the-lazy-dog-the-quick-brown-fox-jud4dcadd033000ab2b26eb66bae1906bcb15d4a70.i' A = 'data/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt' B = 'dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilenaf93030515d9849cfdca52937c2204d19f83913e5.txt' A = 'data/Project.Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt' B = 'dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilena0fd7c506f5c9d58204444fc67e9499006bd2d445.txt' A = 'data/foo.../foo / /a./_. /__/.x../ bla/.FOO/something.i' B = 'data/foo..~2e/foo ~20/~20/a~2e/__.~20/____/~2ex.~2e/~20 bla/~2e_f_o_o/something.i' A = 'data/c/co/com/com0/com1/com2/com3/com4/com5/com6/com7/com8/com9' B = 'data/c/co/com/com0/co~6d1/co~6d2/co~6d3/co~6d4/co~6d5/co~6d6/co~6d7/co~6d8/co~6d9' A = 'data/C/CO/COM/COM0/COM1/COM2/COM3/COM4/COM5/COM6/COM7/COM8/COM9' B = 'data/_c/_c_o/_c_o_m/_c_o_m0/_c_o_m1/_c_o_m2/_c_o_m3/_c_o_m4/_c_o_m5/_c_o_m6/_c_o_m7/_c_o_m8/_c_o_m9' A = 'data/c.x/co.x/com.x/com0.x/com1.x/com2.x/com3.x/com4.x/com5.x/com6.x/com7.x/com8.x/com9.x' B = 'data/c.x/co.x/com.x/com0.x/co~6d1.x/co~6d2.x/co~6d3.x/co~6d4.x/co~6d5.x/co~6d6.x/co~6d7.x/co~6d8.x/co~6d9.x' A = 'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6/x.com7/x.com8/x.com9' B = 'data/x.c/x.co/x.com0/x.com1/x.com2/x.com3/x.com4/x.com5/x.com6/x.com7/x.com8/x.com9' A = 'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/com7x/com8x/com9x' B = 'data/cx/cox/comx/com0x/com1x/com2x/com3x/com4x/com5x/com6x/com7x/com8x/com9x' A = 'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/xcom8/xcom9' B = 'data/xc/xco/xcom0/xcom1/xcom2/xcom3/xcom4/xcom5/xcom6/xcom7/xcom8/xcom9' A = 'data/l/lp/lpt/lpt0/lpt1/lpt2/lpt3/lpt4/lpt5/lpt6/lpt7/lpt8/lpt9' B = 'data/l/lp/lpt/lpt0/lp~741/lp~742/lp~743/lp~744/lp~745/lp~746/lp~747/lp~748/lp~749' A = 'data/L/LP/LPT/LPT0/LPT1/LPT2/LPT3/LPT4/LPT5/LPT6/LPT7/LPT8/LPT9' B = 'data/_l/_l_p/_l_p_t/_l_p_t0/_l_p_t1/_l_p_t2/_l_p_t3/_l_p_t4/_l_p_t5/_l_p_t6/_l_p_t7/_l_p_t8/_l_p_t9' A = 'data/l.x/lp.x/lpt.x/lpt0.x/lpt1.x/lpt2.x/lpt3.x/lpt4.x/lpt5.x/lpt6.x/lpt7.x/lpt8.x/lpt9.x' B = 'data/l.x/lp.x/lpt.x/lpt0.x/lp~741.x/lp~742.x/lp~743.x/lp~744.x/lp~745.x/lp~746.x/lp~747.x/lp~748.x/lp~749.x' A = 'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5/x.lpt6/x.lpt7/x.lpt8/x.lpt9' B = 'data/x.l/x.lp/x.lpt/x.lpt0/x.lpt1/x.lpt2/x.lpt3/x.lpt4/x.lpt5/x.lpt6/x.lpt7/x.lpt8/x.lpt9' A = 'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/lpt7x/lpt8x/lpt9x' B = 'data/lx/lpx/lptx/lpt0x/lpt1x/lpt2x/lpt3x/lpt4x/lpt5x/lpt6x/lpt7x/lpt8x/lpt9x' A = 'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/xlpt7/xlpt8/xlpt9' B = 'data/xl/xlp/xlpt/xlpt0/xlpt1/xlpt2/xlpt3/xlpt4/xlpt5/xlpt6/xlpt7/xlpt8/xlpt9' A = 'data/con/p/pr/prn/a/au/aux/n/nu/nul' B = 'data/co~6e/p/pr/pr~6e/a/au/au~78/n/nu/nu~6c' A = 'data/CON/P/PR/PRN/A/AU/AUX/N/NU/NUL' B = 'data/_c_o_n/_p/_p_r/_p_r_n/_a/_a_u/_a_u_x/_n/_n_u/_n_u_l' A = 'data/con.x/p.x/pr.x/prn.x/a.x/au.x/aux.x/n.x/nu.x/nul.x' B = 'data/co~6e.x/p.x/pr.x/pr~6e.x/a.x/au.x/au~78.x/n.x/nu.x/nu~6c.x' A = 'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul' B = 'data/x.con/x.p/x.pr/x.prn/x.a/x.au/x.aux/x.n/x.nu/x.nul' A = 'data/conx/px/prx/prnx/ax/aux/auxx/nx/nux/nulx' B = 'data/conx/px/prx/prnx/ax/au~78/auxx/nx/nux/nulx' A = 'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul' B = 'data/xcon/xp/xpr/xprn/xa/xau/xaux/xn/xnu/xnul' A = 'data/a./au./aux./auxy./aux.' B = 'data/a~2e/au~2e/au~78~2e/auxy~2e/au~78~2e' A = 'data/c./co./con./cony./con.' B = 'data/c~2e/co~2e/co~6e~2e/cony~2e/co~6e~2e' A = 'data/p./pr./prn./prny./prn.' B = 'data/p~2e/pr~2e/pr~6e~2e/prny~2e/pr~6e~2e' A = 'data/n./nu./nul./nuly./nul.' B = 'data/n~2e/nu~2e/nu~6c~2e/nuly~2e/nu~6c~2e' A = 'data/l./lp./lpt./lpt1./lpt1y./lpt1.' B = 'data/l~2e/lp~2e/lpt~2e/lp~741~2e/lpt1y~2e/lp~741~2e' A = 'data/lpt9./lpt9y./lpt9.' B = 'data/lp~749~2e/lpt9y~2e/lp~749~2e' A = 'data/com./com1./com1y./com1.' B = 'data/com~2e/co~6d1~2e/com1y~2e/co~6d1~2e' A = 'data/com9./com9y./com9.' B = 'data/co~6d9~2e/com9y~2e/co~6d9~2e' A = 'data/a /au /aux /auxy /aux ' B = 'data/a~20/au~20/aux~20/auxy~20/aux~20' largest unhashed path A = 'data/123456789-123456789-123456789-123456789-123456789-unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'data/123456789-123456789-123456789-123456789-123456789-unhashed--xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' shortest hashed path A = 'data/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxe9c55002b50bf5181e7a6fc1f60b126e2a6fcf71' changing one char in part that's hashed away produces a different hash A = 'data/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxy-123456789-123456' B = 'dh/123456789-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxd24fa4455faf8a94350c18e5eace7c2bb17af706' uppercase hitting length limit due to encoding A = 'data/A23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxcbbc657029b41b94ed510d05feb6716a5c03bc6b' A = 'data/Z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx938f32a725c89512833fb96b6602dd9ebff51ddd' compare with lowercase not hitting limit A = 'data/a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'data/a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' A = 'data/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'data/z23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' not hitting limit with any of these A = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'data/abcdefghijklmnopqrstuvwxyz0123456789 !#%&\'()+,-.;=[]^`{}xxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' underbar hitting length limit due to encoding A = 'data/_23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/_23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx9921a01af50feeabc060ce00eee4cba6efc31d2b' tilde hitting length limit due to encoding A = 'data/~23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~7e23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx9cec6f97d569c10995f785720044ea2e4227481b' Windows reserved characters hitting length limit A = 'data/<23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~3c23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxee67d8f275876ca1ef2500fc542e63c885c4e62d' A = 'data/>23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~3e23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx387a85a5b1547cc9136310c974df716818458ddb' A = 'data/:23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~3a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx2e4154fb571d13d22399c58cc4ef4858e4b75999' A = 'data/"23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~2223456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxfc7e3ec7b0687ee06ed8c32fef0eb0c1980259f5' A = 'data/\\23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~5c23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx944e1f2b7110687e116e0d151328ac648b06ab4a' A = 'data/|23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~7c23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx28b23dd3fd0242946334126ab62bcd772aac32f4' A = 'data/?23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~3f23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxa263022d3994d2143d98f94f431eef8b5e7e0f8a' A = 'data/*23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~2a23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx0e7e6020e3c00ba7bb7893d84ca2966fbf53e140' initial space hitting length limit A = 'data/ 23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~2023456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx92acbc78ef8c0b796111629a02601f07d8aec4ea' initial dot hitting length limit A = 'data/.23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~2e23456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxdbe19cc6505b3515ab9228cebf877ad07075168f' trailing space in filename hitting length limit A = 'data/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-1234 ' B = 'dh/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx0025dc73e04f97426db4893e3bf67d581dc6d066' trailing dot in filename hitting length limit A = 'data/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-1234.' B = 'dh/123456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxx85a16cf03ee7feba8a5abc626f1ba9886d01e89d' initial space in directory hitting length limit A = 'data/ x/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~20x/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx1b3a3b712b2ac00d6af14ae8b4c14fdbf904f516' initial dot in directory hitting length limit A = 'data/.x/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/~2ex/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx39dbc4c193a5643a8936fc69c3363cd7ac91ab14' trailing space in directory hitting length limit A = 'data/x /456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/x~20/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx2253c341df0b5290790ad312cd8499850f2273e5' trailing dot in directory hitting length limit A = 'data/x./456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/x~2e/456789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxcc0324d696d34562b44b5138db08ee1594ccc583' with directories that need direncoding, hitting length limit A = 'data/x.i/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/x.i.hg/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxa4c4399bdf81c67dbbbb7060aa0124d8dea94f74' A = 'data/x.d/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/x.d.hg/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxx1303fa90473b230615f5b3ea7b660e881ae5270a' A = 'data/x.hg/5789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/x.hg.hg/5789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxx26d724a8af68e7a4e4455e6602ea9adbd0eb801f' Windows reserved filenames, hitting length limit A = 'data/con/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/co~6e/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxc0794d4f4c605a2617900eb2563d7113cf6ea7d3' A = 'data/prn/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/pr~6e/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx64db876e1a9730e27236cb9b167aff942240e932' A = 'data/aux/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/au~78/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx8a178558405ca6fb4bbd75446dfa186f06751a0d' A = 'data/nul/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/nu~6c/56789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxc5e51b6fec1bd07bd243b053a0c3f7209855b886' A = 'data/com1/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/co~6d1/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx32f5f44ece3bb62b9327369ca84cc19c86259fcd' A = 'data/com9/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/co~6d9/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxx734360b28c66a3230f55849fe8926206d229f990' A = 'data/lpt1/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/lp~741/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxe6f16ab4b6b0637676b2842b3345c9836df46ef7' A = 'data/lpt9/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'dh/lp~749/6789-123456789-123456789-123456789-123456789-xxxxxxxxx-xxxxxxxxx-xxxxxa475814c51acead3e44f2ff801f0c4903f986157' non-reserved names, just not hitting limit A = 'data/123456789-123456789-123456789-123456789-123456789-/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' B = 'data/123456789-123456789-123456789-123456789-123456789-/com/com0/lpt/lpt0/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12345' hashed path with largest untruncated 1st dir A = 'data/12345678/-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/-123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxx4e9e9e384d00929a93b6835fbf976eb32321ff3c' hashed path with smallest truncated 1st dir A = 'data/123456789/123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/123456789-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxx1f4e4ec5f2be76e109bfaa8e31c062fe426d5490' hashed path with largest untruncated two dirs A = 'data/12345678/12345678/9-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/9-123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxx3332d8329d969cf835542a9f2cbcfb385b6cf39d' hashed path with smallest truncated two dirs A = 'data/123456789/123456789/123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/123456789-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx9699559798247dffa18717138859be5f8874840e' hashed path with largest untruncated three dirs A = 'data/12345678/12345678/12345678/89-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/89-123456789-123456789-hashed----xxxxxxxxx-xxxxxxxf0a2b053bb1369cce02f78c217d6a7aaea18c439' hashed path with smallest truncated three dirs A = 'data/123456789/123456789/123456789/123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/123456789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-1c6f8284967384ec13985a046d3553179d9d03cd' hashed path with largest untruncated four dirs A = 'data/12345678/12345678/12345678/12345678/789-123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/789-123456789-hashed----xxxxxxxxx-xxxxxxx0d30c99049d8f0ff97b94d4ef302027e8d54c6fd' hashed path with smallest truncated four dirs A = 'data/123456789/123456789/123456789/123456789/123456789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/123456789-hashed----xxxxxxxxx-xxxxxxxxx-x46162779e1a771810b37a737f82ae7ed33771402' hashed path with largest untruncated five dirs A = 'data/12345678/12345678/12345678/12345678/12345678/6789-hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/6789-hashed----xxxxxxxxx-xxxxxxxbfe752ddc8b003c2790c66a9f2eb1ea75c114390' hashed path with smallest truncated five dirs A = 'data/123456789/123456789/123456789/123456789/123456789/hashed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/hashed----xxxxxxxxx-xxxxxxxxx-xxb94c27b3532fa880cdd572b1c514785cab7b6ff2' hashed path with largest untruncated six dirs A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/ed----xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/ed----xxxxxxxxx-xxxxxxxcd8cc5483a0f3be409e0e5d4bf9e36e113c59235' hashed path with smallest truncated six dirs A = 'data/123456789/123456789/123456789/123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxxxxxxx-xxx47dd6f616f833a142da00701b334cebbf640da06' hashed path with largest untruncated seven dirs A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxx-xxxxxxx1c8ed635229fc22efe51035feeadeb4c8a0ecb82' hashed path with smallest truncated seven dirs A = 'data/123456789/123456789/123456789/123456789/123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxx298ff7d33f8ce6db57930837ffea2fb2f48bb926' hashed path with largest untruncated eight dirs (directory 8 is dropped because it hits _maxshortdirslen) A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxx-xxxxxxc8996ccd41b471f768057181a4d59d2febe7277d' hashed path with smallest truncated eight dirs (directory 8 is dropped because it hits _maxshortdirslen) A = 'data/123456789/123456789/123456789/123456789/123456789/123456789/123456789/123456789/xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxx4fa04a839a6bda93e1c21c713f2edcbd16e8890d' hashed path with largest non-dropped directory 8 (just not hitting the _maxshortdirslen boundary) A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxx4d43d1ccaa20efbfe99ec779dc063611536ff2c5' ...adding one truncated char to dir 1..7 won't drop dir 8 A = 'data/12345678x/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx0f9efce65189cc60fd90fe4ffd49d7b58bbe0f2e' A = 'data/12345678/12345678x/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx945ca395708cafdd54a94501859beabd3e243921' A = 'data/12345678/12345678/12345678x/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxac62bf6898c4fd0502146074547c11caa751a327' A = 'data/12345678/12345678/12345678/12345678x/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx2ae5a2baed7983fae8974d0ca06c6bf08b9aee92' A = 'data/12345678/12345678/12345678/12345678/12345678x/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxx214aba07b6687532a43d1e9eaf6e88cfca96b68c' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678x/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxe7a022ae82f0f55cf4e0498e55ba59ea4ebb55bf' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678x/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxb51ce61164996a80f36ce3cfe64b62d519aedae3' hashed path with shortest dropped directory 8 (just hitting the _maxshortdirslen boundary) A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/123456/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/xxxxxxxxx-xxxx11fa9873cc6c3215eae864528b5530a04efc6cfe' hashed path that drops dir 8 due to dot or space at end is encoded, and thus causing to hit _maxshortdirslen A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/1234./-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/-xxxxxxxxx-xxx602df9b45bec564e2e1f0645d5140dddcc76ed58' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/1234 /-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/-xxxxxxxxx-xxxd99ff212bc84b4d1f70cd6b0071e3ef69d4e12ce' ... with dir 8 short enough for encoding A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12./xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12~2e/xx-xxxxx7baeb5ed7f14a586ee1cacecdbcbff70032d1b3c' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12 /xx-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-123456' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12~20/xx-xxxxxcf79ca9795f77d7f75745da36807e5d772bd5182' Extensions are replicated on hashed paths. Note that we only get to encode files that end in .i or .d inside the store. Encoded filenames are thus bound in length. A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.345.i' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxc10ad03b5755ed524f5286aab1815dfe07729438.i' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.345.d' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx9eec83381f2b39ef5ac8b4ecdf2c94f7983f57c8.d' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456.i' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxb7796dc7d175cfb0bb8a7728f58f6ebec9042568.i' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.34567.i' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxb515857a6bfeef017c4894d8df42458ac65d55b8.i' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.345678.i' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxb05a0f247bc0a776211cd6a32ab714fd9cc09f2b.i' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789.i' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxf192b48bff08d9e0e12035fb52bc58c70de72c94.i' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-.i' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx435551e0ed4c7b083b9ba83cee916670e02e80ad.i' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-1.i' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxa7f74eb98d8d58b716356dfd26e2f9aaa65d6a9a.i' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12.i' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxed68d9bd43b931f0b100267fee488d65a0c66f62.i' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-123.i' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx5cea44de2b642d2ba2b4a30693ffb1049644d698.i' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-1234.i' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx68462f62a7f230b39c1b5400d73ec35920990b7e.i' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345.i' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx4cb852a314c6da240a83eec94761cdd71c6ec22e.i' A = 'data/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz-ABCDEFGHIJKLMNOPRSTUVWXYZ-1234567890-xxxxxxxxx-xxxxxxxxx-xxxxxxxx-xxxxxxxxx-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww-wwwwwwwww.i' B = 'dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/-xxxxx93352aa50377751d9e5ebdf52da1e6e69a6887a6.i' paths outside data/ can be encoded A = 'metadata/dir/00manifest.i' B = 'metadata/dir/00manifest.i' A = 'metadata/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345678/00manifest.i' B = 'dh/ata/12345678/12345678/12345678/12345678/12345678/12345678/12345678/00manife0a4da1f89aa2aa9eb0896eb451288419049781b4.i' mercurial-3.7.3/tests/test-confused-revert.t0000644000175000017500000000231712676531525020551 0ustar mpmmpm00000000000000 $ hg init $ echo foo > a $ hg add a $ hg commit -m "1" $ echo bar > b $ hg add b $ hg remove a Should show a removed and b added: $ hg status A b R a $ hg revert --all undeleting a forgetting b Should show b unknown and a back to normal: $ hg status ? b $ rm b $ hg co -C 0 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo foo-a > a $ hg commit -m "2a" $ hg co -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo foo-b > a $ hg commit -m "2b" created new head $ HGMERGE=true hg merge 1 merging a 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) Should show foo-b: $ cat a foo-b $ echo bar > b $ hg add b $ rm a $ hg remove a Should show a removed and b added: $ hg status A b R a Revert should fail: $ hg revert abort: uncommitted merge with no revision specified (use "hg update" or see "hg help revert") [255] Revert should be ok now: $ hg revert -r2 --all undeleting a forgetting b Should show b unknown and a marked modified (merged): $ hg status M a ? b Should show foo-b: $ cat a foo-b mercurial-3.7.3/tests/test-convert-svn-encoding.t0000644000175000017500000001240512676531525021505 0ustar mpmmpm00000000000000#require svn svn-bindings $ cat >> $HGRCPATH < [extensions] > convert = > EOF $ svnadmin create svn-repo $ svnadmin load -q svn-repo < "$TESTDIR/svn/encoding.svndump" Convert while testing all possible outputs $ hg --debug convert svn-repo A-hg --config progress.debug=1 initializing destination A-hg repository reparent to file://*/svn-repo (glob) run hg sink pre-conversion action scanning source... found trunk at 'trunk' found tags at 'tags' found branches at 'branches' found branch branch\xc3\xa9 at 5 (esc) found branch branch\xc3\xa9e at 6 (esc) scanning: 1/4 revisions (25.00%) reparent to file://*/svn-repo/trunk (glob) fetching revision log for "/trunk" from 4 to 0 parsing revision 4 (2 changes) parsing revision 3 (4 changes) parsing revision 2 (3 changes) parsing revision 1 (3 changes) no copyfrom path, don't know what to do. '/branches' is not under '/trunk', ignoring '/tags' is not under '/trunk', ignoring scanning: 2/4 revisions (50.00%) reparent to file://*/svn-repo/branches/branch%C3%A9 (glob) fetching revision log for "/branches/branch\xc3\xa9" from 5 to 0 (esc) parsing revision 5 (1 changes) reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/branches/branch%C3%A9 (glob) found parent of branch /branches/branch\xc3\xa9 at 4: /trunk (esc) scanning: 3/4 revisions (75.00%) reparent to file://*/svn-repo/branches/branch%C3%A9e (glob) fetching revision log for "/branches/branch\xc3\xa9e" from 6 to 0 (esc) parsing revision 6 (1 changes) reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/branches/branch%C3%A9e (glob) found parent of branch /branches/branch\xc3\xa9e at 5: /branches/branch\xc3\xa9 (esc) scanning: 4/4 revisions (100.00%) scanning: 5/4 revisions (125.00%) scanning: 6/4 revisions (150.00%) sorting... converting... 5 init projA source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@1 converting: 0/6 revisions (0.00%) committing changelog 4 hello source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@2 converting: 1/6 revisions (16.67%) reparent to file://*/svn-repo/trunk (glob) scanning paths: /trunk/\xc3\xa0 0/3 (0.00%) (esc) scanning paths: /trunk/\xc3\xa0/e\xcc\x81 1/3 (33.33%) (esc) scanning paths: /trunk/\xc3\xa9 2/3 (66.67%) (esc) committing files: \xc3\xa0/e\xcc\x81 (esc) getting files: \xc3\xa0/e\xcc\x81 1/2 (50.00%) (esc) \xc3\xa9 (esc) getting files: \xc3\xa9 2/2 (100.00%) (esc) committing manifest committing changelog 3 copy files source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@3 converting: 2/6 revisions (33.33%) scanning paths: /trunk/\xc3\xa0 0/4 (0.00%) (esc) gone from -1 reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/trunk (glob) scanning paths: /trunk/\xc3\xa8 1/4 (25.00%) (esc) copied to \xc3\xa8 from \xc3\xa9@2 (esc) scanning paths: /trunk/\xc3\xa9 2/4 (50.00%) (esc) gone from -1 reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/trunk (glob) scanning paths: /trunk/\xc3\xb9 3/4 (75.00%) (esc) mark /trunk/\xc3\xb9 came from \xc3\xa0:2 (esc) getting files: \xc3\xa0/e\xcc\x81 1/4 (25.00%) (esc) getting files: \xc3\xa9 2/4 (50.00%) (esc) committing files: \xc3\xa8 (esc) getting files: \xc3\xa8 3/4 (75.00%) (esc) \xc3\xa8: copy \xc3\xa9:6b67ccefd5ce6de77e7ead4f5292843a0255329f (esc) \xc3\xb9/e\xcc\x81 (esc) getting files: \xc3\xb9/e\xcc\x81 4/4 (100.00%) (esc) \xc3\xb9/e\xcc\x81: copy \xc3\xa0/e\xcc\x81:a9092a3d84a37b9993b5c73576f6de29b7ea50f6 (esc) committing manifest committing changelog 2 remove files source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/trunk@4 converting: 3/6 revisions (50.00%) scanning paths: /trunk/\xc3\xa8 0/2 (0.00%) (esc) gone from -1 reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/trunk (glob) scanning paths: /trunk/\xc3\xb9 1/2 (50.00%) (esc) gone from -1 reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/trunk (glob) getting files: \xc3\xa8 1/2 (50.00%) (esc) getting files: \xc3\xb9/e\xcc\x81 2/2 (100.00%) (esc) committing files: committing manifest committing changelog 1 branch to branch? source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/branches/branch?@5 converting: 4/6 revisions (66.67%) reparent to file://*/svn-repo/branches/branch%C3%A9 (glob) scanning paths: /branches/branch\xc3\xa9 0/1 (0.00%) (esc) committing changelog 0 branch to branch?e source: svn:afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af/branches/branch?e@6 converting: 5/6 revisions (83.33%) reparent to file://*/svn-repo/branches/branch%C3%A9e (glob) scanning paths: /branches/branch\xc3\xa9e 0/1 (0.00%) (esc) committing changelog reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/branches/branch%C3%A9e (glob) reparent to file://*/svn-repo (glob) reparent to file://*/svn-repo/branches/branch%C3%A9e (glob) updating tags committing files: .hgtags committing manifest committing changelog run hg sink post-conversion action $ cd A-hg $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Check tags are in UTF-8 $ cat .hgtags e94e4422020e715add80525e8f0f46c9968689f1 branch\xc3\xa9e (esc) f7e66f98380ed1e53a797c5c7a7a2616a7ab377d branch\xc3\xa9 (esc) $ cd .. mercurial-3.7.3/tests/test-debian-packages.t0000644000175000017500000000131112676531525020425 0ustar mpmmpm00000000000000#require test-repo slow debhelper Ensure debuild doesn't run the testsuite, as that could get silly. $ DEB_BUILD_OPTIONS=nocheck $ export DEB_BUILD_OPTIONS $ OUTPUTDIR=`pwd` $ export OUTPUTDIR $ cd "$TESTDIR"/.. $ make deb > $OUTPUTDIR/build.log 2>&1 $ cd $OUTPUTDIR $ ls *.deb mercurial-common_*.deb (glob) mercurial_*.deb (glob) main deb should have .so but no .py $ dpkg --contents mercurial_*.deb | egrep '(localrepo|parsers)' * ./usr/lib/python2.7/dist-packages/mercurial/parsers*.so (glob) mercurial-common should have py but no .so or pyc $ dpkg --contents mercurial-common_*.deb | egrep '(localrepo|parsers)' * ./usr/lib/python2.7/dist-packages/mercurial/localrepo.py (glob) mercurial-3.7.3/tests/test-i18n.t0000644000175000017500000000232012676531525016207 0ustar mpmmpm00000000000000(Translations are optional) #if gettext no-outer-repo Test that translations are compiled and installed correctly. Default encoding in tests is "ascii" and the translation is encoded using the "replace" error handler: $ LANGUAGE=pt_BR hg tip abortado: n?o foi encontrado um reposit?rio em '$TESTTMP' (.hg n?o encontrado)! [255] Using a more accommodating encoding: $ HGENCODING=UTF-8 LANGUAGE=pt_BR hg tip abortado: n\xc3\xa3o foi encontrado um reposit\xc3\xb3rio em '$TESTTMP' (.hg n\xc3\xa3o encontrado)! (esc) [255] Different encoding: $ HGENCODING=Latin-1 LANGUAGE=pt_BR hg tip abortado: n\xe3o foi encontrado um reposit\xf3rio em '$TESTTMP' (.hg n\xe3o encontrado)! (esc) [255] #endif #if gettext Test keyword search in translated help text: $ HGENCODING=UTF-8 LANGUAGE=de hg help -k blättern Themen: extensions Benutzung erweiterter Funktionen Erweiterungen: pager Verwendet einen externen Pager zum Bl\xc3\xa4ttern in der Ausgabe von Befehlen (esc) #endif Check Mercurial specific translation problems in each *.po files, and tool itself by doctest $ cd "$TESTDIR"/../i18n $ python check-translation.py *.po $ python check-translation.py --doctest $ cd $TESTTMP mercurial-3.7.3/tests/test-graft.t0000644000175000017500000005147612676531525016553 0ustar mpmmpm00000000000000Create a repo with some stuff in it: $ hg init a $ cd a $ echo a > a $ echo a > d $ echo a > e $ hg ci -qAm0 $ echo b > a $ hg ci -m1 -u bar $ hg mv a b $ hg ci -m2 $ hg cp b c $ hg ci -m3 -u baz $ echo b > d $ echo f > e $ hg ci -m4 $ hg up -q 3 $ echo b > e $ hg branch -q stable $ hg ci -m5 $ hg merge -q default --tool internal:local $ hg branch -q default $ hg ci -m6 $ hg phase --public 3 $ hg phase --force --secret 6 $ hg log -G --template '{author}@{rev}.{phase}: {desc}\n' @ test@6.secret: 6 |\ | o test@5.draft: 5 | | o | test@4.draft: 4 |/ o baz@3.public: 3 | o test@2.public: 2 | o bar@1.public: 1 | o test@0.public: 0 Need to specify a rev: $ hg graft abort: no revisions specified [255] Can't graft ancestor: $ hg graft 1 2 skipping ancestor revision 1:5d205f8b35b6 skipping ancestor revision 2:5c095ad7e90f [255] Specify revisions with -r: $ hg graft -r 1 -r 2 skipping ancestor revision 1:5d205f8b35b6 skipping ancestor revision 2:5c095ad7e90f [255] $ hg graft -r 1 2 warning: inconsistent use of --rev might give unexpected revision ordering! skipping ancestor revision 2:5c095ad7e90f skipping ancestor revision 1:5d205f8b35b6 [255] Can't graft with dirty wd: $ hg up -q 0 $ echo foo > a $ hg graft 1 abort: uncommitted changes [255] $ hg revert a Graft a rename: (this also tests that editor is invoked if '--edit' is specified) $ hg status --rev "2^1" --rev 2 A b R a $ HGEDITOR=cat hg graft 2 -u foo --edit grafting 2:5c095ad7e90f "2" merging a and b to b 2 HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: foo HG: branch 'default' HG: added b HG: removed a $ hg export tip --git # HG changeset patch # User foo # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID ef0ef43d49e79e81ddafdc7997401ba0041efc82 # Parent 68795b066622ca79a25816a662041d8f78f3cd9e 2 diff --git a/a b/b rename from a rename to b Look for extra:source $ hg log --debug -r tip changeset: 7:ef0ef43d49e79e81ddafdc7997401ba0041efc82 tag: tip phase: draft parent: 0:68795b066622ca79a25816a662041d8f78f3cd9e parent: -1:0000000000000000000000000000000000000000 manifest: 7:e59b6b228f9cbf9903d5e9abf996e083a1f533eb user: foo date: Thu Jan 01 00:00:00 1970 +0000 files+: b files-: a extra: branch=default extra: source=5c095ad7e90f871700f02dd1fa5012cb4498a2d4 description: 2 Graft out of order, skipping a merge and a duplicate (this also tests that editor is not invoked if '--edit' is not specified) $ hg graft 1 5 4 3 'merge()' 2 -n skipping ungraftable merge revision 6 skipping revision 2:5c095ad7e90f (already grafted to 7:ef0ef43d49e7) grafting 1:5d205f8b35b6 "1" grafting 5:97f8bfe72746 "5" grafting 4:9c233e8e184d "4" grafting 3:4c60f11aa304 "3" $ HGEDITOR=cat hg graft 1 5 'merge()' 2 --debug skipping ungraftable merge revision 6 scanning for duplicate grafts skipping revision 2:5c095ad7e90f (already grafted to 7:ef0ef43d49e7) grafting 1:5d205f8b35b6 "1" searching for copies back to rev 1 unmatched files in local: b all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' * checking for directory renames resolving manifests branchmerge: True, force: True, partial: False ancestor: 68795b066622, local: ef0ef43d49e7+, remote: 5d205f8b35b6 preserving b for resolve of b b: local copied/moved from a -> m (premerge) picked tool ':merge' for b (binary False symlink False changedelete False) merging b and a to b my b@ef0ef43d49e7+ other a@5d205f8b35b6 ancestor a@68795b066622 premerge successful committing files: b committing manifest committing changelog grafting 5:97f8bfe72746 "5" searching for copies back to rev 1 resolving manifests branchmerge: True, force: True, partial: False ancestor: 4c60f11aa304, local: 6b9e5368ca4e+, remote: 97f8bfe72746 e: remote is newer -> g getting e b: remote unchanged -> k committing files: e committing manifest committing changelog $ HGEDITOR=cat hg graft 4 3 --log --debug scanning for duplicate grafts grafting 4:9c233e8e184d "4" searching for copies back to rev 1 resolving manifests branchmerge: True, force: True, partial: False ancestor: 4c60f11aa304, local: 1905859650ec+, remote: 9c233e8e184d preserving e for resolve of e d: remote is newer -> g getting d b: remote unchanged -> k e: versions differ -> m (premerge) picked tool ':merge' for e (binary False symlink False changedelete False) merging e my e@1905859650ec+ other e@9c233e8e184d ancestor e@68795b066622 e: versions differ -> m (merge) picked tool ':merge' for e (binary False symlink False changedelete False) my e@1905859650ec+ other e@9c233e8e184d ancestor e@68795b066622 warning: conflicts while merging e! (edit, then use 'hg resolve --mark') abort: unresolved conflicts, can't continue (use hg resolve and hg graft --continue --log) [255] Summary should mention graft: $ hg summary |grep graft commit: 2 modified, 2 unknown, 1 unresolved (graft in progress) Commit while interrupted should fail: $ hg ci -m 'commit interrupted graft' abort: graft in progress (use 'hg graft --continue' or 'hg update' to abort) [255] Abort the graft and try committing: $ hg up -C . 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo c >> e $ hg ci -mtest $ hg strip . --config extensions.strip= 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/a/.hg/strip-backup/*-backup.hg (glob) Graft again: $ hg graft 1 5 4 3 'merge()' 2 skipping ungraftable merge revision 6 skipping revision 2:5c095ad7e90f (already grafted to 7:ef0ef43d49e7) skipping revision 1:5d205f8b35b6 (already grafted to 8:6b9e5368ca4e) skipping revision 5:97f8bfe72746 (already grafted to 9:1905859650ec) grafting 4:9c233e8e184d "4" merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') abort: unresolved conflicts, can't continue (use hg resolve and hg graft --continue) [255] Continue without resolve should fail: $ hg graft -c grafting 4:9c233e8e184d "4" abort: unresolved merge conflicts (see "hg help resolve") [255] Fix up: $ echo b > e $ hg resolve -m e (no more unresolved files) continue: hg graft --continue Continue with a revision should fail: $ hg graft -c 6 abort: can't specify --continue and revisions [255] $ hg graft -c -r 6 abort: can't specify --continue and revisions [255] Continue for real, clobber usernames $ hg graft -c -U grafting 4:9c233e8e184d "4" grafting 3:4c60f11aa304 "3" Compare with original: $ hg diff -r 6 $ hg status --rev 0:. -C M d M e A b a A c a R a View graph: $ hg log -G --template '{author}@{rev}.{phase}: {desc}\n' @ test@11.draft: 3 | o test@10.draft: 4 | o test@9.draft: 5 | o bar@8.draft: 1 | o foo@7.draft: 2 | | o test@6.secret: 6 | |\ | | o test@5.draft: 5 | | | | o | test@4.draft: 4 | |/ | o baz@3.public: 3 | | | o test@2.public: 2 | | | o bar@1.public: 1 |/ o test@0.public: 0 Graft again onto another branch should preserve the original source $ hg up -q 0 $ echo 'g'>g $ hg add g $ hg ci -m 7 created new head $ hg graft 7 grafting 7:ef0ef43d49e7 "2" $ hg log -r 7 --template '{rev}:{node}\n' 7:ef0ef43d49e79e81ddafdc7997401ba0041efc82 $ hg log -r 2 --template '{rev}:{node}\n' 2:5c095ad7e90f871700f02dd1fa5012cb4498a2d4 $ hg log --debug -r tip changeset: 13:7a4785234d87ec1aa420ed6b11afe40fa73e12a9 tag: tip phase: draft parent: 12:b592ea63bb0c19a6c5c44685ee29a2284f9f1b8f parent: -1:0000000000000000000000000000000000000000 manifest: 13:dc313617b8c32457c0d589e0dbbedfe71f3cd637 user: foo date: Thu Jan 01 00:00:00 1970 +0000 files+: b files-: a extra: branch=default extra: intermediate-source=ef0ef43d49e79e81ddafdc7997401ba0041efc82 extra: source=5c095ad7e90f871700f02dd1fa5012cb4498a2d4 description: 2 Disallow grafting an already grafted cset onto its original branch $ hg up -q 6 $ hg graft 7 skipping already grafted revision 7:ef0ef43d49e7 (was grafted from 2:5c095ad7e90f) [255] $ hg extdiff --config extensions.extdiff= --patch -r 2 -r 13 --- */hg-5c095ad7e90f.patch * +0000 (glob) +++ */hg-7a4785234d87.patch * +0000 (glob) @@ -1,18 +1,18 @@ # HG changeset patch -# User test +# User foo # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 -# Node ID 5c095ad7e90f871700f02dd1fa5012cb4498a2d4 -# Parent 5d205f8b35b66bc36375c9534ffd3237730e8f04 +# Node ID 7a4785234d87ec1aa420ed6b11afe40fa73e12a9 +# Parent b592ea63bb0c19a6c5c44685ee29a2284f9f1b8f 2 -diff -r 5d205f8b35b6 -r 5c095ad7e90f a +diff -r b592ea63bb0c -r 7a4785234d87 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ --b -diff -r 5d205f8b35b6 -r 5c095ad7e90f b +-a +diff -r b592ea63bb0c -r 7a4785234d87 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ -+b ++a [1] $ hg extdiff --config extensions.extdiff= --patch -r 2 -r 13 -X . --- */hg-5c095ad7e90f.patch * +0000 (glob) +++ */hg-7a4785234d87.patch * +0000 (glob) @@ -1,8 +1,8 @@ # HG changeset patch -# User test +# User foo # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 -# Node ID 5c095ad7e90f871700f02dd1fa5012cb4498a2d4 -# Parent 5d205f8b35b66bc36375c9534ffd3237730e8f04 +# Node ID 7a4785234d87ec1aa420ed6b11afe40fa73e12a9 +# Parent b592ea63bb0c19a6c5c44685ee29a2284f9f1b8f 2 [1] Disallow grafting already grafted csets with the same origin onto each other $ hg up -q 13 $ hg graft 2 skipping revision 2:5c095ad7e90f (already grafted to 13:7a4785234d87) [255] $ hg graft 7 skipping already grafted revision 7:ef0ef43d49e7 (13:7a4785234d87 also has origin 2:5c095ad7e90f) [255] $ hg up -q 7 $ hg graft 2 skipping revision 2:5c095ad7e90f (already grafted to 7:ef0ef43d49e7) [255] $ hg graft tip skipping already grafted revision 13:7a4785234d87 (7:ef0ef43d49e7 also has origin 2:5c095ad7e90f) [255] Graft with --log $ hg up -Cq 1 $ hg graft 3 --log -u foo grafting 3:4c60f11aa304 "3" warning: can't find ancestor for 'c' copied from 'b'! $ hg log --template '{rev} {parents} {desc}\n' -r tip 14 1:5d205f8b35b6 3 (grafted from 4c60f11aa304a54ae1c199feb94e7fc771e51ed8) Resolve conflicted graft $ hg up -q 0 $ echo b > a $ hg ci -m 8 created new head $ echo c > a $ hg ci -m 9 $ hg graft 1 --tool internal:fail grafting 1:5d205f8b35b6 "1" abort: unresolved conflicts, can't continue (use hg resolve and hg graft --continue) [255] $ hg resolve --all merging a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') [1] $ cat a <<<<<<< local: aaa4406d4f0a - test: 9 c ======= b >>>>>>> other: 5d205f8b35b6 - bar: 1 $ echo b > a $ hg resolve -m a (no more unresolved files) continue: hg graft --continue $ hg graft -c grafting 1:5d205f8b35b6 "1" $ hg export tip --git # HG changeset patch # User bar # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID f67661df0c4804d301f064f332b57e7d5ddaf2be # Parent aaa4406d4f0ae9befd6e58c82ec63706460cbca6 1 diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,1 @@ -c +b Resolve conflicted graft with rename $ echo c > a $ hg ci -m 10 $ hg graft 2 --tool internal:fail grafting 2:5c095ad7e90f "2" abort: unresolved conflicts, can't continue (use hg resolve and hg graft --continue) [255] $ hg resolve --all merging a and b to b (no more unresolved files) continue: hg graft --continue $ hg graft -c grafting 2:5c095ad7e90f "2" $ hg export tip --git # HG changeset patch # User test # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID 9627f653b421c61fc1ea4c4e366745070fa3d2bc # Parent ee295f490a40b97f3d18dd4c4f1c8936c233b612 2 diff --git a/a b/b rename from a rename to b Test simple origin(), with and without args $ hg log -r 'origin()' changeset: 1:5d205f8b35b6 user: bar date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 changeset: 2:5c095ad7e90f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 3:4c60f11aa304 user: baz date: Thu Jan 01 00:00:00 1970 +0000 summary: 3 changeset: 4:9c233e8e184d user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 4 changeset: 5:97f8bfe72746 branch: stable parent: 3:4c60f11aa304 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 5 $ hg log -r 'origin(7)' changeset: 2:5c095ad7e90f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 Now transplant a graft to test following through copies $ hg up -q 0 $ hg branch -q dev $ hg ci -qm "dev branch" $ hg --config extensions.transplant= transplant -q 7 $ hg log -r 'origin(.)' changeset: 2:5c095ad7e90f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 Test that the graft and transplant markers in extra are converted, allowing origin() to still work. Note that these recheck the immediately preceeding two tests. $ hg --quiet --config extensions.convert= --config convert.hg.saverev=True convert . ../converted The graft case $ hg -R ../converted log -r 7 --template "{rev}: {node}\n{join(extras, '\n')}\n" 7: 7ae846e9111fc8f57745634250c7b9ac0a60689b branch=default convert_revision=ef0ef43d49e79e81ddafdc7997401ba0041efc82 source=e0213322b2c1a5d5d236c74e79666441bee67a7d $ hg -R ../converted log -r 'origin(7)' changeset: 2:e0213322b2c1 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 Test that template correctly expands more than one 'extra' (issue4362), and that 'intermediate-source' is converted. $ hg -R ../converted log -r 13 --template "{extras % ' Extra: {extra}\n'}" Extra: branch=default Extra: convert_revision=7a4785234d87ec1aa420ed6b11afe40fa73e12a9 Extra: intermediate-source=7ae846e9111fc8f57745634250c7b9ac0a60689b Extra: source=e0213322b2c1a5d5d236c74e79666441bee67a7d The transplant case $ hg -R ../converted log -r tip --template "{rev}: {node}\n{join(extras, '\n')}\n" 21: fbb6c5cc81002f2b4b49c9d731404688bcae5ade branch=dev convert_revision=7e61b508e709a11d28194a5359bc3532d910af21 transplant_source=z\xe8F\xe9\x11\x1f\xc8\xf5wEcBP\xc7\xb9\xac (esc) `h\x9b (esc) $ hg -R ../converted log -r 'origin(tip)' changeset: 2:e0213322b2c1 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 Test simple destination $ hg log -r 'destination()' changeset: 7:ef0ef43d49e7 parent: 0:68795b066622 user: foo date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 8:6b9e5368ca4e user: bar date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 changeset: 9:1905859650ec user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 5 changeset: 10:52dc0b4c6907 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 4 changeset: 11:882b35362a6b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 3 changeset: 13:7a4785234d87 user: foo date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 14:f64defefacee parent: 1:5d205f8b35b6 user: foo date: Thu Jan 01 00:00:00 1970 +0000 summary: 3 changeset: 17:f67661df0c48 user: bar date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 changeset: 19:9627f653b421 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 21:7e61b508e709 branch: dev tag: tip user: foo date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 $ hg log -r 'destination(2)' changeset: 7:ef0ef43d49e7 parent: 0:68795b066622 user: foo date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 13:7a4785234d87 user: foo date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 19:9627f653b421 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 21:7e61b508e709 branch: dev tag: tip user: foo date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 Transplants of grafts can find a destination... $ hg log -r 'destination(7)' changeset: 21:7e61b508e709 branch: dev tag: tip user: foo date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 ... grafts of grafts unfortunately can't $ hg graft -q 13 warning: can't find ancestor for 'b' copied from 'a'! $ hg log -r 'destination(13)' All copies of a cset $ hg log -r 'origin(13) or destination(origin(13))' changeset: 2:5c095ad7e90f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 7:ef0ef43d49e7 parent: 0:68795b066622 user: foo date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 13:7a4785234d87 user: foo date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 19:9627f653b421 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 21:7e61b508e709 branch: dev user: foo date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 22:d1cb6591fa4b branch: dev tag: tip user: foo date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 graft works on complex revset $ hg graft 'origin(13) or destination(origin(13))' skipping ancestor revision 21:7e61b508e709 skipping ancestor revision 22:d1cb6591fa4b skipping revision 2:5c095ad7e90f (already grafted to 22:d1cb6591fa4b) grafting 7:ef0ef43d49e7 "2" warning: can't find ancestor for 'b' copied from 'a'! grafting 13:7a4785234d87 "2" warning: can't find ancestor for 'b' copied from 'a'! grafting 19:9627f653b421 "2" merging b warning: can't find ancestor for 'b' copied from 'a'! graft with --force (still doesn't graft merges) $ hg graft 19 0 6 skipping ungraftable merge revision 6 skipping ancestor revision 0:68795b066622 skipping already grafted revision 19:9627f653b421 (22:d1cb6591fa4b also has origin 2:5c095ad7e90f) [255] $ hg graft 19 0 6 --force skipping ungraftable merge revision 6 grafting 19:9627f653b421 "2" merging b warning: can't find ancestor for 'b' copied from 'a'! grafting 0:68795b066622 "0" graft --force after backout $ echo abc > a $ hg ci -m 28 $ hg backout 28 reverting a changeset 29:53177ba928f6 backs out changeset 28:50a516bb8b57 $ hg graft 28 skipping ancestor revision 28:50a516bb8b57 [255] $ hg graft 28 --force grafting 28:50a516bb8b57 "28" merging a $ cat a abc graft --continue after --force $ echo def > a $ hg ci -m 31 $ hg graft 28 --force --tool internal:fail grafting 28:50a516bb8b57 "28" abort: unresolved conflicts, can't continue (use hg resolve and hg graft --continue) [255] $ hg resolve --all merging a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') [1] $ echo abc > a $ hg resolve -m a (no more unresolved files) continue: hg graft --continue $ hg graft -c grafting 28:50a516bb8b57 "28" $ cat a abc Continue testing same origin policy, using revision numbers from test above but do some destructive editing of the repo: $ hg up -qC 7 $ hg tag -l -r 13 tmp $ hg --config extensions.strip= strip 2 saved backup bundle to $TESTTMP/a/.hg/strip-backup/5c095ad7e90f-d323a1e4-backup.hg (glob) $ hg graft tmp skipping already grafted revision 8:7a4785234d87 (2:ef0ef43d49e7 also has unknown origin 5c095ad7e90f) [255] Empty graft $ hg up -qr 26 $ hg tag -f something $ hg graft -qr 27 $ hg graft -f 27 grafting 27:ed6c7e54e319 "28" note: graft of 27:ed6c7e54e319 created no changes to commit $ cd .. Graft to duplicate a commit $ hg init graftsibling $ cd graftsibling $ touch a $ hg commit -qAm a $ touch b $ hg commit -qAm b $ hg log -G -T '{rev}\n' @ 1 | o 0 $ hg up -q 0 $ hg graft -r 1 grafting 1:0e067c57feba "b" (tip) $ hg log -G -T '{rev}\n' @ 2 | | o 1 |/ o 0 Graft to duplicate a commit twice $ hg up -q 0 $ hg graft -r 2 grafting 2:044ec77f6389 "b" (tip) $ hg log -G -T '{rev}\n' @ 3 | | o 2 |/ | o 1 |/ o 0 mercurial-3.7.3/tests/test-casecollision.t0000644000175000017500000000244612676531525020270 0ustar mpmmpm00000000000000#require no-icasefs test file addition with colliding case $ hg init repo1 $ cd repo1 $ echo a > a $ echo A > A $ hg add a $ hg st A a ? A $ hg add --config ui.portablefilenames=abort A abort: possible case-folding collision for A [255] $ hg st A a ? A $ hg add A warning: possible case-folding collision for A $ hg st A A A a $ hg forget A $ hg st A a ? A $ hg add --config ui.portablefilenames=no A $ hg st A A A a $ mkdir b $ touch b/c b/D $ hg add b adding b/D adding b/c $ touch b/d b/C $ hg add b/C warning: possible case-folding collision for b/C $ hg add b/d warning: possible case-folding collision for b/d $ touch b/a1 b/a2 $ hg add b adding b/a1 adding b/a2 $ touch b/A2 b/a1.1 $ hg add b/a1.1 b/A2 warning: possible case-folding collision for b/A2 $ touch b/f b/F $ hg add b/f b/F warning: possible case-folding collision for b/f $ touch g G $ hg add g G warning: possible case-folding collision for g $ mkdir h H $ touch h/x H/x $ hg add h/x H/x warning: possible case-folding collision for h/x $ touch h/s H/s $ hg add h/s $ hg add H/s warning: possible case-folding collision for H/s case changing rename must not warn or abort $ echo c > c $ hg ci -qAmx $ hg mv c C $ cd .. mercurial-3.7.3/tests/test-push-cgi.t0000644000175000017500000000525012676531525017154 0ustar mpmmpm00000000000000#require no-msys # MSYS will translate web paths as if they were file paths This is a test of the push wire protocol over CGI-based hgweb. initialize repository $ hg init r $ cd r $ echo a > a $ hg ci -A -m "0" adding a $ echo '[web]' > .hg/hgrc $ echo 'allow_push = *' >> .hg/hgrc $ echo 'push_ssl = false' >> .hg/hgrc create hgweb invocation script $ cat >hgweb.cgi < import cgitb > cgitb.enable() > from mercurial import demandimport; demandimport.enable() > from mercurial.hgweb import hgweb > from mercurial.hgweb import wsgicgi > application = hgweb('.', 'test repository') > wsgicgi.launch(application) > HGWEB $ chmod 755 hgweb.cgi test preparation $ . "$TESTDIR/cgienv" $ REQUEST_METHOD="POST"; export REQUEST_METHOD $ CONTENT_TYPE="application/octet-stream"; export CONTENT_TYPE $ hg bundle --type v1 --all bundle.hg 1 changesets found $ CONTENT_LENGTH=279; export CONTENT_LENGTH; expect failure because heads doesn't match (formerly known as 'unsynced changes') $ QUERY_STRING="cmd=unbundle&heads=0000000000000000000000000000000000000000"; export QUERY_STRING $ python hgweb.cgi page1 2>&1 $ cat page1 Status: 200 Script output follows\r (esc) Content-Type: application/mercurial-0.1\r (esc) Content-Length: 64\r (esc) \r (esc) 0 repository changed while preparing changes - please try again successful force push $ QUERY_STRING="cmd=unbundle&heads=666f726365"; export QUERY_STRING $ python hgweb.cgi page2 2>&1 $ cat page2 Status: 200 Script output follows\r (esc) Content-Type: application/mercurial-0.1\r (esc) Content-Length: 102\r (esc) \r (esc) 1 adding changesets adding manifests adding file changes added 0 changesets with 0 changes to 1 files successful push, list of heads $ QUERY_STRING="cmd=unbundle&heads=f7b1eb17ad24730a1651fccd46c43826d1bbc2ac"; export QUERY_STRING $ python hgweb.cgi page3 2>&1 $ cat page3 Status: 200 Script output follows\r (esc) Content-Type: application/mercurial-0.1\r (esc) Content-Length: 102\r (esc) \r (esc) 1 adding changesets adding manifests adding file changes added 0 changesets with 0 changes to 1 files successful push, SHA1 hash of heads (unbundlehash capability) $ QUERY_STRING="cmd=unbundle&heads=686173686564 5a785a5f9e0d433b88ed862b206b011b0c3a9d13"; export QUERY_STRING $ python hgweb.cgi page4 2>&1 $ cat page4 Status: 200 Script output follows\r (esc) Content-Type: application/mercurial-0.1\r (esc) Content-Length: 102\r (esc) \r (esc) 1 adding changesets adding manifests adding file changes added 0 changesets with 0 changes to 1 files $ cd .. mercurial-3.7.3/tests/test-patch.t0000644000175000017500000000441612676531525016537 0ustar mpmmpm00000000000000 $ cat > patchtool.py < import sys > print 'Using custom patch' > if '--binary' in sys.argv: > print '--binary found !' > EOF $ echo "[ui]" >> $HGRCPATH $ echo "patch=python ../patchtool.py" >> $HGRCPATH $ hg init a $ cd a $ echo a > a $ hg commit -Ama -d '1 0' adding a $ echo b >> a $ hg commit -Amb -d '2 0' $ cd .. This test checks that: - custom patch commands with arguments actually work - patch code does not try to add weird arguments like --binary when custom patch commands are used. For instance --binary is added by default under win32. check custom patch options are honored $ hg --cwd a export -o ../a.diff tip $ hg clone -r 0 a b adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --cwd b import -v ../a.diff applying ../a.diff Using custom patch applied to working directory Issue2417: hg import with # comments in description Prepare source repo and patch: $ rm $HGRCPATH $ hg init c $ cd c $ printf "a\rc" > a $ hg ci -A -m 0 a -d '0 0' $ printf "a\rb\rc" > a $ cat << eof > log > first line which can't start with '# ' > # second line is a comment but that shouldn't be a problem. > A patch marker like this was more problematic even after d7452292f9d3: > # HG changeset patch > # User lines looks like this - but it _is_ just a comment > eof $ hg ci -l log -d '0 0' $ hg export -o p 1 $ cd .. Clone and apply patch: $ hg clone -r 0 c d adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd d $ hg import ../c/p applying ../c/p $ hg log -v -r 1 changeset: 1:cd0bde79c428 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: a description: first line which can't start with '# ' # second line is a comment but that shouldn't be a problem. A patch marker like this was more problematic even after d7452292f9d3: # HG changeset patch # User lines looks like this - but it _is_ just a comment $ cd .. mercurial-3.7.3/tests/test-contrib-check-code.t0000644000175000017500000001241212676531525021056 0ustar mpmmpm00000000000000 $ cat > correct.py < def toto(arg1, arg2): > del arg2 > return (5 + 6, 9) > EOF $ cat > wrong.py < def toto( arg1, arg2): > del(arg2) > return ( 5+6, 9) > EOF $ cat > quote.py < # let's use quote in comments > (''' ( 4x5 ) > but """\\''' and finally''', > """let's fool checkpatch""", '1+2', > '"""', 42+1, """and > ( 4-1 ) """, "( 1+1 )\" and ") > a, '\\\\\\\\', "\\\\\\" x-2", "c-1" > EOF $ cat > classstyle.py < class newstyle_class(object): > pass > > class oldstyle_class: > pass > > class empty(): > pass > > no_class = 1: > pass > EOF $ check_code="$TESTDIR"/../contrib/check-code.py $ "$check_code" ./wrong.py ./correct.py ./quote.py ./classstyle.py ./wrong.py:1: > def toto( arg1, arg2): gratuitous whitespace in () or [] ./wrong.py:2: > del(arg2) Python keyword is not a function ./wrong.py:3: > return ( 5+6, 9) gratuitous whitespace in () or [] missing whitespace in expression ./quote.py:5: > '"""', 42+1, """and missing whitespace in expression ./classstyle.py:4: > class oldstyle_class: old-style class, use class foo(object) ./classstyle.py:7: > class empty(): class foo() creates old style object, use class foo(object) [1] $ cat > python3-compat.py << EOF > foo <> bar > reduce(lambda a, b: a + b, [1, 2, 3, 4]) > dict(key=value) > EOF $ "$check_code" python3-compat.py python3-compat.py:1: > foo <> bar <> operator is not available in Python 3+, use != python3-compat.py:2: > reduce(lambda a, b: a + b, [1, 2, 3, 4]) reduce is not available in Python 3+ python3-compat.py:3: > dict(key=value) dict() is different in Py2 and 3 and is slower than {} [1] $ cat > is-op.py < # is-operator comparing number or string literal > x = None > y = x is 'foo' > y = x is "foo" > y = x is 5346 > y = x is -6 > y = x is not 'foo' > y = x is not "foo" > y = x is not 5346 > y = x is not -6 > EOF $ "$check_code" ./is-op.py ./is-op.py:3: > y = x is 'foo' object comparison with literal ./is-op.py:4: > y = x is "foo" object comparison with literal ./is-op.py:5: > y = x is 5346 object comparison with literal ./is-op.py:6: > y = x is -6 object comparison with literal ./is-op.py:7: > y = x is not 'foo' object comparison with literal ./is-op.py:8: > y = x is not "foo" object comparison with literal ./is-op.py:9: > y = x is not 5346 object comparison with literal ./is-op.py:10: > y = x is not -6 object comparison with literal [1] $ cat > for-nolineno.py < except: > EOF $ "$check_code" for-nolineno.py --nolineno for-nolineno.py:0: > except: naked except clause [1] $ cat > warning.t < $ function warnonly { > > } > $ diff -N aaa > $ function onwarn {} > EOF $ "$check_code" warning.t $ "$check_code" --warn warning.t warning.t:1: > $ function warnonly { warning: don't use 'function', use old style warning.t:3: > $ diff -N aaa warning: don't use 'diff -N' warning.t:4: > $ function onwarn {} warning: don't use 'function', use old style [1] $ cat > raise-format.py < raise SomeException, message > # this next line is okay > raise SomeException(arg1, arg2) > EOF $ "$check_code" not-existing.py raise-format.py Skipping*not-existing.py* (glob) raise-format.py:1: > raise SomeException, message don't use old-style two-argument raise, use Exception(message) [1] $ cat > rst.py < """problematic rst text > > .. note:: > wrong > """ > > ''' > > .. note:: > > valid > > new text > > .. note:: > > also valid > ''' > > """mixed > > .. note:: > > good > > .. note:: > plus bad > """ > EOF $ $check_code -w rst.py rst.py:3: > .. note:: warning: add two newlines after '.. note::' rst.py:26: > .. note:: warning: add two newlines after '.. note::' [1] $ cat > ./map-inside-gettext.py < print _("map inside gettext %s" % v) > > print _("concatenating " " by " " space %s" % v) > print _("concatenating " + " by " + " '+' %s" % v) > > print _("mapping operation in different line %s" > % v) > > print _( > "leading spaces inside of '(' %s" % v) > EOF $ "$check_code" ./map-inside-gettext.py ./map-inside-gettext.py:1: > print _("map inside gettext %s" % v) don't use % inside _() ./map-inside-gettext.py:3: > print _("concatenating " " by " " space %s" % v) don't use % inside _() ./map-inside-gettext.py:4: > print _("concatenating " + " by " + " '+' %s" % v) don't use % inside _() ./map-inside-gettext.py:6: > print _("mapping operation in different line %s" don't use % inside _() ./map-inside-gettext.py:9: > print _( don't use % inside _() [1] web templates $ mkdir -p mercurial/templates $ cat > mercurial/templates/example.tmpl < {desc} > {desc|escape} > {desc|firstline} > {desc|websub} > EOF $ "$check_code" --warnings mercurial/templates/example.tmpl mercurial/templates/example.tmpl:2: > {desc|escape} warning: follow desc keyword with either firstline or websub [1] mercurial-3.7.3/tests/test-commit-multiple.t0000644000175000017500000000701612676531525020560 0ustar mpmmpm00000000000000# reproduce issue2264, issue2516 create test repo $ cat <> $HGRCPATH > [extensions] > transplant = > EOF $ hg init repo $ cd repo $ template="{rev} {desc|firstline} [{branch}]\n" # we need to start out with two changesets on the default branch # in order to avoid the cute little optimization where transplant # pulls rather than transplants add initial changesets $ echo feature1 > file1 $ hg ci -Am"feature 1" adding file1 $ echo feature2 >> file2 $ hg ci -Am"feature 2" adding file2 # The changes to 'bugfix' are enough to show the bug: in fact, with only # those changes, it's a very noisy crash ("RuntimeError: nothing # committed after transplant"). But if we modify a second file in the # transplanted changesets, the bug is much more subtle: transplant # silently drops the second change to 'bugfix' on the floor, and we only # see it when we run 'hg status' after transplanting. Subtle data loss # bugs are worse than crashes, so reproduce the subtle case here. commit bug fixes on bug fix branch $ hg branch fixes marked working directory as branch fixes (branches are permanent and global, did you want a bookmark?) $ echo fix1 > bugfix $ echo fix1 >> file1 $ hg ci -Am"fix 1" adding bugfix $ echo fix2 > bugfix $ echo fix2 >> file1 $ hg ci -Am"fix 2" $ hg log -G --template="$template" @ 3 fix 2 [fixes] | o 2 fix 1 [fixes] | o 1 feature 2 [default] | o 0 feature 1 [default] transplant bug fixes onto release branch $ hg update 0 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg branch release marked working directory as branch release $ hg transplant 2 3 applying [0-9a-f]{12} (re) [0-9a-f]{12} transplanted to [0-9a-f]{12} (re) applying [0-9a-f]{12} (re) [0-9a-f]{12} transplanted to [0-9a-f]{12} (re) $ hg log -G --template="$template" @ 5 fix 2 [release] | o 4 fix 1 [release] | | o 3 fix 2 [fixes] | | | o 2 fix 1 [fixes] | | | o 1 feature 2 [default] |/ o 0 feature 1 [default] $ hg status $ hg status --rev 0:4 M file1 A bugfix $ hg status --rev 4:5 M bugfix M file1 now test that we fixed the bug for all scripts/extensions $ cat > $TESTTMP/committwice.py <<__EOF__ > from mercurial import ui, hg, match, node > from time import sleep > > def replacebyte(fn, b): > f = open(fn, "rb+") > f.seek(0, 0) > f.write(b) > f.close() > > def printfiles(repo, rev): > print "revision %s files: %s" % (rev, repo[rev].files()) > > repo = hg.repository(ui.ui(), '.') > assert len(repo) == 6, \ > "initial: len(repo): %d, expected: 6" % len(repo) > > replacebyte("bugfix", "u") > sleep(2) > try: > print "PRE: len(repo): %d" % len(repo) > wlock = repo.wlock() > lock = repo.lock() > replacebyte("file1", "x") > repo.commit(text="x", user="test", date=(0, 0)) > replacebyte("file1", "y") > repo.commit(text="y", user="test", date=(0, 0)) > print "POST: len(repo): %d" % len(repo) > finally: > lock.release() > wlock.release() > printfiles(repo, 6) > printfiles(repo, 7) > __EOF__ $ $PYTHON $TESTTMP/committwice.py PRE: len(repo): 6 POST: len(repo): 8 revision 6 files: ['bugfix', 'file1'] revision 7 files: ['file1'] Do a size-preserving modification outside of that process $ echo abcd > bugfix $ hg status M bugfix $ hg log --template "{rev} {desc} {files}\n" -r5: 5 fix 2 bugfix file1 6 x bugfix file1 7 y file1 $ cd .. mercurial-3.7.3/tests/test-progress.t0000644000175000017500000003160012676531525017277 0ustar mpmmpm00000000000000 $ cat > loop.py < from mercurial import cmdutil, commands > import time > > cmdtable = {} > command = cmdutil.command(cmdtable) > > class incrementingtime(object): > def __init__(self): > self._time = 0.0 > def __call__(self): > self._time += 0.25 > return self._time > time.time = incrementingtime() > > @command('loop', > [('', 'total', '', 'override for total'), > ('', 'nested', False, 'show nested results'), > ('', 'parallel', False, 'show parallel sets of results')], > 'hg loop LOOPS', > norepo=True) > def loop(ui, loops, **opts): > loops = int(loops) > total = None > if loops >= 0: > total = loops > if opts.get('total', None): > total = int(opts.get('total')) > nested = False > if opts.get('nested', None): > nested = True > loops = abs(loops) > > for i in range(loops): > ui.progress(topiclabel, i, getloopitem(i), 'loopnum', total) > if opts.get('parallel'): > ui.progress('other', i, 'other.%d' % i, 'othernum', total) > if nested: > nested_steps = 2 > if i and i % 4 == 0: > nested_steps = 5 > for j in range(nested_steps): > ui.progress( > 'nested', j, 'nested.%d' % j, 'nestnum', nested_steps) > ui.progress( > 'nested', None, 'nested.done', 'nestnum', nested_steps) > ui.progress(topiclabel, None, 'loop.done', 'loopnum', total) > > topiclabel = 'loop' > def getloopitem(i): > return 'loop.%d' % i > > EOF $ cp $HGRCPATH $HGRCPATH.orig $ echo "[extensions]" >> $HGRCPATH $ echo "progress=" >> $HGRCPATH $ echo "loop=`pwd`/loop.py" >> $HGRCPATH $ echo "[progress]" >> $HGRCPATH $ echo "format = topic bar number" >> $HGRCPATH $ echo "assume-tty=1" >> $HGRCPATH $ echo "width=60" >> $HGRCPATH test default params, display nothing because of delay $ hg -y loop 3 $ echo "delay=0" >> $HGRCPATH $ echo "refresh=0" >> $HGRCPATH test with delay=0, refresh=0 $ hg -y loop 3 \r (no-eol) (esc) loop [ ] 0/3\r (no-eol) (esc) loop [===============> ] 1/3\r (no-eol) (esc) loop [===============================> ] 2/3\r (no-eol) (esc) \r (no-eol) (esc) no progress with --quiet $ hg -y loop 3 --quiet test nested short-lived topics (which shouldn't display with nestdelay): $ hg -y loop 3 --nested \r (no-eol) (esc) loop [ ] 0/3\r (no-eol) (esc) loop [===============> ] 1/3\r (no-eol) (esc) loop [===============================> ] 2/3\r (no-eol) (esc) \r (no-eol) (esc) Test nested long-lived topic which has the same name as a short-lived peer. We shouldn't get stuck showing the short-lived inner steps, and should go back to skipping the inner steps when the slow nested step finishes. $ hg -y loop 7 --nested \r (no-eol) (esc) loop [ ] 0/7\r (no-eol) (esc) loop [=====> ] 1/7\r (no-eol) (esc) loop [============> ] 2/7\r (no-eol) (esc) loop [===================> ] 3/7\r (no-eol) (esc) loop [==========================> ] 4/7\r (no-eol) (esc) nested [==========================> ] 3/5\r (no-eol) (esc) nested [===================================> ] 4/5\r (no-eol) (esc) loop [=================================> ] 5/7\r (no-eol) (esc) loop [========================================> ] 6/7\r (no-eol) (esc) \r (no-eol) (esc) $ hg --config progress.changedelay=0 -y loop 3 --nested \r (no-eol) (esc) loop [ ] 0/3\r (no-eol) (esc) nested [ ] 0/2\r (no-eol) (esc) nested [======================> ] 1/2\r (no-eol) (esc) loop [===============> ] 1/3\r (no-eol) (esc) nested [ ] 0/2\r (no-eol) (esc) nested [======================> ] 1/2\r (no-eol) (esc) loop [===============================> ] 2/3\r (no-eol) (esc) nested [ ] 0/2\r (no-eol) (esc) nested [======================> ] 1/2\r (no-eol) (esc) \r (no-eol) (esc) test two topics being printed in parallel (as when we're doing a local --pull clone, where you get the unbundle and bundle progress at the same time): $ hg loop 3 --parallel \r (no-eol) (esc) loop [ ] 0/3\r (no-eol) (esc) loop [===============> ] 1/3\r (no-eol) (esc) loop [===============================> ] 2/3\r (no-eol) (esc) \r (no-eol) (esc) test refresh is taken in account $ hg -y --config progress.refresh=100 loop 3 test format options 1 $ hg -y --config 'progress.format=number topic item+2' loop 2 \r (no-eol) (esc) 0/2 loop lo\r (no-eol) (esc) 1/2 loop lo\r (no-eol) (esc) \r (no-eol) (esc) test format options 2 $ hg -y --config 'progress.format=number item-3 bar' loop 2 \r (no-eol) (esc) 0/2 p.0 [ ]\r (no-eol) (esc) 1/2 p.1 [=======================> ]\r (no-eol) (esc) \r (no-eol) (esc) test format options and indeterminate progress $ hg -y --config 'progress.format=number item bar' loop -- -2 \r (no-eol) (esc) 0 loop.0 [ <=> ]\r (no-eol) (esc) 1 loop.1 [ <=> ]\r (no-eol) (esc) \r (no-eol) (esc) make sure things don't fall over if count > total $ hg -y loop --total 4 6 \r (no-eol) (esc) loop [ ] 0/4\r (no-eol) (esc) loop [===========> ] 1/4\r (no-eol) (esc) loop [=======================> ] 2/4\r (no-eol) (esc) loop [===================================> ] 3/4\r (no-eol) (esc) loop [===============================================>] 4/4\r (no-eol) (esc) loop [ <=> ] 5/4\r (no-eol) (esc) \r (no-eol) (esc) test immediate progress completion $ hg -y loop 0 test delay time estimates $ cat > mocktime.py < import os > import time > > class mocktime(object): > def __init__(self, increment): > self.time = 0 > self.increment = increment > def __call__(self): > self.time += self.increment > return self.time > > def uisetup(ui): > time.time = mocktime(int(os.environ.get('MOCKTIME', '11'))) > EOF $ cp $HGRCPATH.orig $HGRCPATH $ echo "[extensions]" >> $HGRCPATH $ echo "mocktime=`pwd`/mocktime.py" >> $HGRCPATH $ echo "progress=" >> $HGRCPATH $ echo "loop=`pwd`/loop.py" >> $HGRCPATH $ echo "[progress]" >> $HGRCPATH $ echo "assume-tty=1" >> $HGRCPATH $ echo "delay=25" >> $HGRCPATH $ echo "width=60" >> $HGRCPATH $ hg -y loop 8 \r (no-eol) (esc) loop [=========> ] 2/8 1m07s\r (no-eol) (esc) loop [===============> ] 3/8 56s\r (no-eol) (esc) loop [=====================> ] 4/8 45s\r (no-eol) (esc) loop [==========================> ] 5/8 34s\r (no-eol) (esc) loop [================================> ] 6/8 23s\r (no-eol) (esc) loop [=====================================> ] 7/8 12s\r (no-eol) (esc) \r (no-eol) (esc) $ MOCKTIME=10000 hg -y loop 4 \r (no-eol) (esc) loop [ ] 0/4\r (no-eol) (esc) loop [=========> ] 1/4 8h21m\r (no-eol) (esc) loop [====================> ] 2/4 5h34m\r (no-eol) (esc) loop [==============================> ] 3/4 2h47m\r (no-eol) (esc) \r (no-eol) (esc) $ MOCKTIME=1000000 hg -y loop 4 \r (no-eol) (esc) loop [ ] 0/4\r (no-eol) (esc) loop [=========> ] 1/4 5w00d\r (no-eol) (esc) loop [====================> ] 2/4 3w03d\r (no-eol) (esc) loop [=============================> ] 3/4 11d14h\r (no-eol) (esc) \r (no-eol) (esc) $ MOCKTIME=14000000 hg -y loop 4 \r (no-eol) (esc) loop [ ] 0/4\r (no-eol) (esc) loop [=========> ] 1/4 1y18w\r (no-eol) (esc) loop [===================> ] 2/4 46w03d\r (no-eol) (esc) loop [=============================> ] 3/4 23w02d\r (no-eol) (esc) \r (no-eol) (esc) Time estimates should not fail when there's no end point: $ hg -y loop -- -4 \r (no-eol) (esc) loop [ <=> ] 2\r (no-eol) (esc) loop [ <=> ] 3\r (no-eol) (esc) \r (no-eol) (esc) test line trimming by '[progress] width', when progress topic contains multi-byte characters, of which length of byte sequence and columns in display are different from each other. $ cp $HGRCPATH.orig $HGRCPATH $ cat >> $HGRCPATH < [extensions] > progress= > loop=`pwd`/loop.py > [progress] > assume-tty = 1 > delay = 0 > refresh = 0 > EOF $ rm -f loop.pyc $ cat >> loop.py < # use non-ascii characters as topic label of progress > # 2 x 4 = 8 columns, but 3 x 4 = 12 bytes > topiclabel = u'\u3042\u3044\u3046\u3048'.encode('utf-8') > EOF $ cat >> $HGRCPATH < [progress] > format = topic number > width= 12 > EOF $ hg --encoding utf-8 -y loop --total 3 3 \r (no-eol) (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88 0/3\r (no-eol) (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88 1/3\r (no-eol) (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88 2/3\r (no-eol) (esc) \r (no-eol) (esc) test calculation of bar width, when progress topic contains multi-byte characters, of which length of byte sequence and columns in display are different from each other. $ cat >> $HGRCPATH < [progress] > format = topic bar > width= 21 > # progwidth should be 9 (= 21 - (8+1) - 3) > EOF $ hg --encoding utf-8 -y loop --total 3 3 \r (no-eol) (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88 [ ]\r (no-eol) (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88 [==> ]\r (no-eol) (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88 [=====> ]\r (no-eol) (esc) \r (no-eol) (esc) test trimming progress items, when they contain multi-byte characters, of which length of byte sequence and columns in display are different from each other. $ rm -f loop.pyc $ cat >> loop.py < # use non-ascii characters as loop items of progress > loopitems = [ > u'\u3042\u3044'.encode('utf-8'), # 2 x 2 = 4 columns > u'\u3042\u3044\u3046'.encode('utf-8'), # 2 x 3 = 6 columns > u'\u3042\u3044\u3046\u3048'.encode('utf-8'), # 2 x 4 = 8 columns > ] > def getloopitem(i): > return loopitems[i % len(loopitems)] > EOF $ cat >> $HGRCPATH < [progress] > # trim at tail side > format = item+6 > EOF $ hg --encoding utf-8 -y loop --total 3 3 \r (no-eol) (esc) \xe3\x81\x82\xe3\x81\x84 \r (no-eol) (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\r (no-eol) (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\r (no-eol) (esc) \r (no-eol) (esc) $ cat >> $HGRCPATH < [progress] > # trim at left side > format = item-6 > EOF $ hg --encoding utf-8 -y loop --total 3 3 \r (no-eol) (esc) \xe3\x81\x82\xe3\x81\x84 \r (no-eol) (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\r (no-eol) (esc) \xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\r (no-eol) (esc) \r (no-eol) (esc) mercurial-3.7.3/tests/test-lock.py0000644000175000017500000002115712676531525016556 0ustar mpmmpm00000000000000from __future__ import absolute_import import copy import os import silenttestrunner import tempfile import types import unittest from mercurial import ( error, lock, scmutil, ) testlockname = 'testlock' # work around http://bugs.python.org/issue1515 if types.MethodType not in copy._deepcopy_dispatch: def _deepcopy_method(x, memo): return type(x)(x.im_func, copy.deepcopy(x.im_self, memo), x.im_class) copy._deepcopy_dispatch[types.MethodType] = _deepcopy_method class lockwrapper(lock.lock): def __init__(self, pidoffset, *args, **kwargs): # lock.lock.__init__() calls lock(), so the pidoffset assignment needs # to be earlier self._pidoffset = pidoffset super(lockwrapper, self).__init__(*args, **kwargs) def _getpid(self): return os.getpid() + self._pidoffset class teststate(object): def __init__(self, testcase, dir, pidoffset=0): self._testcase = testcase self._acquirecalled = False self._releasecalled = False self._postreleasecalled = False self.vfs = scmutil.vfs(dir, audit=False) self._pidoffset = pidoffset def makelock(self, *args, **kwargs): l = lockwrapper(self._pidoffset, self.vfs, testlockname, releasefn=self.releasefn, acquirefn=self.acquirefn, *args, **kwargs) l.postrelease.append(self.postreleasefn) return l def acquirefn(self): self._acquirecalled = True def releasefn(self): self._releasecalled = True def postreleasefn(self): self._postreleasecalled = True def assertacquirecalled(self, called): self._testcase.assertEqual( self._acquirecalled, called, 'expected acquire to be %s but was actually %s' % ( self._tocalled(called), self._tocalled(self._acquirecalled), )) def resetacquirefn(self): self._acquirecalled = False def assertreleasecalled(self, called): self._testcase.assertEqual( self._releasecalled, called, 'expected release to be %s but was actually %s' % ( self._tocalled(called), self._tocalled(self._releasecalled), )) def assertpostreleasecalled(self, called): self._testcase.assertEqual( self._postreleasecalled, called, 'expected postrelease to be %s but was actually %s' % ( self._tocalled(called), self._tocalled(self._postreleasecalled), )) def assertlockexists(self, exists): actual = self.vfs.lexists(testlockname) self._testcase.assertEqual( actual, exists, 'expected lock to %s but actually did %s' % ( self._toexists(exists), self._toexists(actual), )) def _tocalled(self, called): if called: return 'called' else: return 'not called' def _toexists(self, exists): if exists: return 'exist' else: return 'not exist' class testlock(unittest.TestCase): def testlock(self): state = teststate(self, tempfile.mkdtemp(dir=os.getcwd())) lock = state.makelock() state.assertacquirecalled(True) lock.release() state.assertreleasecalled(True) state.assertpostreleasecalled(True) state.assertlockexists(False) def testrecursivelock(self): state = teststate(self, tempfile.mkdtemp(dir=os.getcwd())) lock = state.makelock() state.assertacquirecalled(True) state.resetacquirefn() lock.lock() # recursive lock should not call acquirefn again state.assertacquirecalled(False) lock.release() # brings lock refcount down from 2 to 1 state.assertreleasecalled(False) state.assertpostreleasecalled(False) state.assertlockexists(True) lock.release() # releases the lock state.assertreleasecalled(True) state.assertpostreleasecalled(True) state.assertlockexists(False) def testlockfork(self): state = teststate(self, tempfile.mkdtemp(dir=os.getcwd())) lock = state.makelock() state.assertacquirecalled(True) # fake a fork forklock = copy.deepcopy(lock) forklock._pidoffset = 1 forklock.release() state.assertreleasecalled(False) state.assertpostreleasecalled(False) state.assertlockexists(True) # release the actual lock lock.release() state.assertreleasecalled(True) state.assertpostreleasecalled(True) state.assertlockexists(False) def testinheritlock(self): d = tempfile.mkdtemp(dir=os.getcwd()) parentstate = teststate(self, d) parentlock = parentstate.makelock() parentstate.assertacquirecalled(True) # set up lock inheritance with parentlock.inherit() as lockname: parentstate.assertreleasecalled(True) parentstate.assertpostreleasecalled(False) parentstate.assertlockexists(True) childstate = teststate(self, d, pidoffset=1) childlock = childstate.makelock(parentlock=lockname) childstate.assertacquirecalled(True) childlock.release() childstate.assertreleasecalled(True) childstate.assertpostreleasecalled(False) childstate.assertlockexists(True) parentstate.resetacquirefn() parentstate.assertacquirecalled(True) parentlock.release() parentstate.assertreleasecalled(True) parentstate.assertpostreleasecalled(True) parentstate.assertlockexists(False) def testmultilock(self): d = tempfile.mkdtemp(dir=os.getcwd()) state0 = teststate(self, d) lock0 = state0.makelock() state0.assertacquirecalled(True) with lock0.inherit() as lock0name: state0.assertreleasecalled(True) state0.assertpostreleasecalled(False) state0.assertlockexists(True) state1 = teststate(self, d, pidoffset=1) lock1 = state1.makelock(parentlock=lock0name) state1.assertacquirecalled(True) # from within lock1, acquire another lock with lock1.inherit() as lock1name: # since the file on disk is lock0's this should have the same # name self.assertEqual(lock0name, lock1name) state2 = teststate(self, d, pidoffset=2) lock2 = state2.makelock(parentlock=lock1name) state2.assertacquirecalled(True) lock2.release() state2.assertreleasecalled(True) state2.assertpostreleasecalled(False) state2.assertlockexists(True) state1.resetacquirefn() state1.assertacquirecalled(True) lock1.release() state1.assertreleasecalled(True) state1.assertpostreleasecalled(False) state1.assertlockexists(True) lock0.release() def testinheritlockfork(self): d = tempfile.mkdtemp(dir=os.getcwd()) parentstate = teststate(self, d) parentlock = parentstate.makelock() parentstate.assertacquirecalled(True) # set up lock inheritance with parentlock.inherit() as lockname: childstate = teststate(self, d, pidoffset=1) childlock = childstate.makelock(parentlock=lockname) childstate.assertacquirecalled(True) # fork the child lock forkchildlock = copy.deepcopy(childlock) forkchildlock._pidoffset += 1 forkchildlock.release() childstate.assertreleasecalled(False) childstate.assertpostreleasecalled(False) childstate.assertlockexists(True) # release the child lock childlock.release() childstate.assertreleasecalled(True) childstate.assertpostreleasecalled(False) childstate.assertlockexists(True) parentlock.release() def testinheritcheck(self): d = tempfile.mkdtemp(dir=os.getcwd()) state = teststate(self, d) def check(): raise error.LockInheritanceContractViolation('check failed') lock = state.makelock(inheritchecker=check) state.assertacquirecalled(True) def tryinherit(): with lock.inherit(): pass self.assertRaises(error.LockInheritanceContractViolation, tryinherit) lock.release() if __name__ == '__main__': silenttestrunner.main(__name__) mercurial-3.7.3/tests/test-ctxmanager.py0000644000175000017500000000467512676531525017765 0ustar mpmmpm00000000000000from __future__ import absolute_import import silenttestrunner import unittest from mercurial.util import ctxmanager class contextmanager(object): def __init__(self, name, trace): self.name = name self.entered = False self.exited = False self.trace = trace def __enter__(self): self.entered = True self.trace(('enter', self.name)) return self def __exit__(self, exc_type, exc_val, exc_tb): self.exited = exc_type, exc_val, exc_tb self.trace(('exit', self.name)) def __repr__(self): return '' % self.name class ctxerror(Exception): pass class raise_on_enter(contextmanager): def __enter__(self): self.trace(('raise', self.name)) raise ctxerror(self.name) class raise_on_exit(contextmanager): def __exit__(self, exc_type, exc_val, exc_tb): self.trace(('raise', self.name)) raise ctxerror(self.name) def ctxmgr(name, trace): return lambda: contextmanager(name, trace) class test_ctxmanager(unittest.TestCase): def test_basics(self): trace = [] addtrace = trace.append with ctxmanager(ctxmgr('a', addtrace), ctxmgr('b', addtrace)) as c: a, b = c.enter() c.atexit(addtrace, ('atexit', 'x')) c.atexit(addtrace, ('atexit', 'y')) self.assertEqual(trace, [('enter', 'a'), ('enter', 'b'), ('atexit', 'y'), ('atexit', 'x'), ('exit', 'b'), ('exit', 'a')]) def test_raise_on_enter(self): trace = [] addtrace = trace.append def go(): with ctxmanager(ctxmgr('a', addtrace), lambda: raise_on_enter('b', addtrace)) as c: c.enter() addtrace('unreachable') self.assertRaises(ctxerror, go) self.assertEqual(trace, [('enter', 'a'), ('raise', 'b'), ('exit', 'a')]) def test_raise_on_exit(self): trace = [] addtrace = trace.append def go(): with ctxmanager(ctxmgr('a', addtrace), lambda: raise_on_exit('b', addtrace)) as c: c.enter() addtrace('running') self.assertRaises(ctxerror, go) self.assertEqual(trace, [('enter', 'a'), ('enter', 'b'), 'running', ('raise', 'b'), ('exit', 'a')]) if __name__ == '__main__': silenttestrunner.main(__name__) mercurial-3.7.3/tests/test-custom-filters.t0000644000175000017500000000304412676531525020414 0ustar mpmmpm00000000000000 $ hg init $ cat > .hg/hgrc < [extensions] > prefixfilter = prefix.py > [encode] > *.txt = stripprefix: Copyright 2046, The Masters > [decode] > *.txt = insertprefix: Copyright 2046, The Masters > EOF $ cat > prefix.py < from mercurial import error > def stripprefix(s, cmd, filename, **kwargs): > header = '%s\n' % cmd > if s[:len(header)] != header: > raise error.Abort('missing header "%s" in %s' % (cmd, filename)) > return s[len(header):] > def insertprefix(s, cmd): > return '%s\n%s' % (cmd, s) > def reposetup(ui, repo): > repo.adddatafilter('stripprefix:', stripprefix) > repo.adddatafilter('insertprefix:', insertprefix) > EOF $ cat > .hgignore < .hgignore > prefix.py > prefix.pyc > EOF $ cat > stuff.txt < Copyright 2046, The Masters > Some stuff to ponder very carefully. > EOF $ hg add stuff.txt $ hg ci -m stuff Repository data: $ hg cat stuff.txt Some stuff to ponder very carefully. Fresh checkout: $ rm stuff.txt $ hg up -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat stuff.txt Copyright 2046, The Masters Some stuff to ponder very carefully. $ echo "Very very carefully." >> stuff.txt $ hg stat M stuff.txt $ echo "Unauthorized material subject to destruction." > morestuff.txt Problem encoding: $ hg add morestuff.txt $ hg ci -m morestuff abort: missing header "Copyright 2046, The Masters" in morestuff.txt [255] $ hg stat M stuff.txt A morestuff.txt mercurial-3.7.3/tests/test-branches.t0000644000175000017500000005021312676531525017221 0ustar mpmmpm00000000000000 $ hg init a $ cd a $ echo 'root' >root $ hg add root $ hg commit -d '0 0' -m "Adding root node" $ echo 'a' >a $ hg add a $ hg branch a marked working directory as branch a (branches are permanent and global, did you want a bookmark?) $ hg commit -d '1 0' -m "Adding a branch" $ hg branch q marked working directory as branch q $ echo 'aa' >a $ hg branch -C reset working directory to branch a $ hg commit -d '2 0' -m "Adding to a branch" $ hg update -C 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo 'b' >b $ hg add b $ hg branch b marked working directory as branch b $ hg commit -d '2 0' -m "Adding b branch" $ echo 'bh1' >bh1 $ hg add bh1 $ hg commit -d '3 0' -m "Adding b branch head 1" $ hg update -C 2 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo 'bh2' >bh2 $ hg add bh2 $ hg commit -d '4 0' -m "Adding b branch head 2" $ echo 'c' >c $ hg add c $ hg branch c marked working directory as branch c $ hg commit -d '5 0' -m "Adding c branch" reserved names $ hg branch tip abort: the name 'tip' is reserved [255] $ hg branch null abort: the name 'null' is reserved [255] $ hg branch . abort: the name '.' is reserved [255] invalid characters $ hg branch 'foo:bar' abort: ':' cannot be used in a name [255] $ hg branch 'foo > bar' abort: '\n' cannot be used in a name [255] trailing or leading spaces should be stripped before testing duplicates $ hg branch 'b ' abort: a branch of the same name already exists (use 'hg update' to switch to it) [255] $ hg branch ' b' abort: a branch of the same name already exists (use 'hg update' to switch to it) [255] verify update will accept invalid legacy branch names $ hg init test-invalid-branch-name $ cd test-invalid-branch-name $ hg pull -u "$TESTDIR"/bundles/test-invalid-branch-name.hg pulling from *test-invalid-branch-name.hg (glob) requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 2 files 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg update '"colon:test"' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. $ echo 'd' >d $ hg add d $ hg branch 'a branch name much longer than the default justification used by branches' marked working directory as branch a branch name much longer than the default justification used by branches $ hg commit -d '6 0' -m "Adding d branch" $ hg branches a branch name much longer than the default justification used by branches 7:10ff5895aa57 b 4:aee39cd168d0 c 6:589736a22561 (inactive) a 5:d8cbc61dbaa6 (inactive) default 0:19709c5a4e75 (inactive) ------- $ hg branches -a a branch name much longer than the default justification used by branches 7:10ff5895aa57 b 4:aee39cd168d0 --- Branch a $ hg log -b a changeset: 5:d8cbc61dbaa6 branch: a parent: 2:881fe2b92ad0 user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: Adding b branch head 2 changeset: 2:881fe2b92ad0 branch: a user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: Adding to a branch changeset: 1:dd6b440dd85a branch: a user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: Adding a branch ---- Branch b $ hg log -b b changeset: 4:aee39cd168d0 branch: b user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: Adding b branch head 1 changeset: 3:ac22033332d1 branch: b parent: 0:19709c5a4e75 user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: Adding b branch ---- going to test branch closing $ hg branches a branch name much longer than the default justification used by branches 7:10ff5895aa57 b 4:aee39cd168d0 c 6:589736a22561 (inactive) a 5:d8cbc61dbaa6 (inactive) default 0:19709c5a4e75 (inactive) $ hg up -C b 2 files updated, 0 files merged, 4 files removed, 0 files unresolved $ echo 'xxx1' >> b $ hg commit -d '7 0' -m 'adding cset to branch b' $ hg up -C aee39cd168d0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 'xxx2' >> b $ hg commit -d '8 0' -m 'adding head to branch b' created new head $ echo 'xxx3' >> b $ hg commit -d '9 0' -m 'adding another cset to branch b' $ hg branches b 10:bfbe841b666e a branch name much longer than the default justification used by branches 7:10ff5895aa57 c 6:589736a22561 (inactive) a 5:d8cbc61dbaa6 (inactive) default 0:19709c5a4e75 (inactive) $ hg heads --closed changeset: 10:bfbe841b666e branch: b tag: tip user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: adding another cset to branch b changeset: 8:eebb944467c9 branch: b parent: 4:aee39cd168d0 user: test date: Thu Jan 01 00:00:07 1970 +0000 summary: adding cset to branch b changeset: 7:10ff5895aa57 branch: a branch name much longer than the default justification used by branches user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: Adding d branch changeset: 6:589736a22561 branch: c user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: Adding c branch changeset: 5:d8cbc61dbaa6 branch: a parent: 2:881fe2b92ad0 user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: Adding b branch head 2 changeset: 0:19709c5a4e75 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Adding root node $ hg heads changeset: 10:bfbe841b666e branch: b tag: tip user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: adding another cset to branch b changeset: 8:eebb944467c9 branch: b parent: 4:aee39cd168d0 user: test date: Thu Jan 01 00:00:07 1970 +0000 summary: adding cset to branch b changeset: 7:10ff5895aa57 branch: a branch name much longer than the default justification used by branches user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: Adding d branch changeset: 6:589736a22561 branch: c user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: Adding c branch changeset: 5:d8cbc61dbaa6 branch: a parent: 2:881fe2b92ad0 user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: Adding b branch head 2 changeset: 0:19709c5a4e75 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Adding root node $ hg commit -d '9 0' --close-branch -m 'prune bad branch' $ hg branches -a b 8:eebb944467c9 a branch name much longer than the default justification used by branches 7:10ff5895aa57 $ hg up -C b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg commit -d '9 0' --close-branch -m 'close this part branch too' $ hg commit -d '9 0' --close-branch -m 're-closing this branch' abort: can only close branch heads [255] $ hg log -r tip --debug changeset: 12:e3d49c0575d8fc2cb1cd6859c747c14f5f6d499f branch: b tag: tip phase: draft parent: 8:eebb944467c9fb9651ed232aeaf31b3c0a7fc6c1 parent: -1:0000000000000000000000000000000000000000 manifest: 8:6f9ed32d2b310e391a4f107d5f0f071df785bfee user: test date: Thu Jan 01 00:00:09 1970 +0000 extra: branch=b extra: close=1 description: close this part branch too --- b branch should be inactive $ hg branches a branch name much longer than the default justification used by branches 7:10ff5895aa57 c 6:589736a22561 (inactive) a 5:d8cbc61dbaa6 (inactive) default 0:19709c5a4e75 (inactive) $ hg branches -c a branch name much longer than the default justification used by branches 7:10ff5895aa57 b 12:e3d49c0575d8 (closed) c 6:589736a22561 (inactive) a 5:d8cbc61dbaa6 (inactive) default 0:19709c5a4e75 (inactive) $ hg branches -a a branch name much longer than the default justification used by branches 7:10ff5895aa57 $ hg branches -q a branch name much longer than the default justification used by branches c a default $ hg heads b no open branch heads found on branches b [1] $ hg heads --closed b changeset: 12:e3d49c0575d8 branch: b tag: tip parent: 8:eebb944467c9 user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: close this part branch too changeset: 11:d3f163457ebf branch: b user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: prune bad branch $ echo 'xxx4' >> b $ hg commit -d '9 0' -m 'reopen branch with a change' reopening closed branch head 12 --- branch b is back in action $ hg branches -a b 13:e23b5505d1ad a branch name much longer than the default justification used by branches 7:10ff5895aa57 ---- test heads listings $ hg heads changeset: 13:e23b5505d1ad branch: b tag: tip user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: reopen branch with a change changeset: 7:10ff5895aa57 branch: a branch name much longer than the default justification used by branches user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: Adding d branch changeset: 6:589736a22561 branch: c user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: Adding c branch changeset: 5:d8cbc61dbaa6 branch: a parent: 2:881fe2b92ad0 user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: Adding b branch head 2 changeset: 0:19709c5a4e75 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Adding root node branch default $ hg heads default changeset: 0:19709c5a4e75 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Adding root node branch a $ hg heads a changeset: 5:d8cbc61dbaa6 branch: a parent: 2:881fe2b92ad0 user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: Adding b branch head 2 $ hg heads --active a no open branch heads found on branches a [1] branch b $ hg heads b changeset: 13:e23b5505d1ad branch: b tag: tip user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: reopen branch with a change $ hg heads --closed b changeset: 13:e23b5505d1ad branch: b tag: tip user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: reopen branch with a change changeset: 11:d3f163457ebf branch: b user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: prune bad branch default branch colors: $ cat <> $HGRCPATH > [extensions] > color = > [color] > mode = ansi > EOF $ hg up -C c 3 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg commit -d '9 0' --close-branch -m 'reclosing this branch' $ hg up -C b 2 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg branches --color=always \x1b[0;32mb\x1b[0m\x1b[0;33m 13:e23b5505d1ad\x1b[0m (esc) \x1b[0;0ma branch name much longer than the default justification used by branches\x1b[0m\x1b[0;33m 7:10ff5895aa57\x1b[0m (esc) \x1b[0;0ma\x1b[0m\x1b[0;33m 5:d8cbc61dbaa6\x1b[0m (inactive) (esc) \x1b[0;0mdefault\x1b[0m\x1b[0;33m 0:19709c5a4e75\x1b[0m (inactive) (esc) default closed branch color: $ hg branches --color=always --closed \x1b[0;32mb\x1b[0m\x1b[0;33m 13:e23b5505d1ad\x1b[0m (esc) \x1b[0;0ma branch name much longer than the default justification used by branches\x1b[0m\x1b[0;33m 7:10ff5895aa57\x1b[0m (esc) \x1b[0;30;1mc\x1b[0m\x1b[0;33m 14:f894c25619d3\x1b[0m (closed) (esc) \x1b[0;0ma\x1b[0m\x1b[0;33m 5:d8cbc61dbaa6\x1b[0m (inactive) (esc) \x1b[0;0mdefault\x1b[0m\x1b[0;33m 0:19709c5a4e75\x1b[0m (inactive) (esc) $ cat <> $HGRCPATH > [extensions] > color = > [color] > branches.active = green > branches.closed = blue > branches.current = red > branches.inactive = magenta > log.changeset = cyan > EOF custom branch colors: $ hg branches --color=always \x1b[0;31mb\x1b[0m\x1b[0;36m 13:e23b5505d1ad\x1b[0m (esc) \x1b[0;32ma branch name much longer than the default justification used by branches\x1b[0m\x1b[0;36m 7:10ff5895aa57\x1b[0m (esc) \x1b[0;35ma\x1b[0m\x1b[0;36m 5:d8cbc61dbaa6\x1b[0m (inactive) (esc) \x1b[0;35mdefault\x1b[0m\x1b[0;36m 0:19709c5a4e75\x1b[0m (inactive) (esc) custom closed branch color: $ hg branches --color=always --closed \x1b[0;31mb\x1b[0m\x1b[0;36m 13:e23b5505d1ad\x1b[0m (esc) \x1b[0;32ma branch name much longer than the default justification used by branches\x1b[0m\x1b[0;36m 7:10ff5895aa57\x1b[0m (esc) \x1b[0;34mc\x1b[0m\x1b[0;36m 14:f894c25619d3\x1b[0m (closed) (esc) \x1b[0;35ma\x1b[0m\x1b[0;36m 5:d8cbc61dbaa6\x1b[0m (inactive) (esc) \x1b[0;35mdefault\x1b[0m\x1b[0;36m 0:19709c5a4e75\x1b[0m (inactive) (esc) template output: $ hg branches -Tjson --closed [ { "active": true, "branch": "b", "closed": false, "current": true, "node": "e23b5505d1ad24aab6f84fd8c7cb8cd8e5e93be0", "rev": 13 }, { "active": true, "branch": "a branch name much longer than the default justification used by branches", "closed": false, "current": false, "node": "10ff5895aa5793bd378da574af8cec8ea408d831", "rev": 7 }, { "active": false, "branch": "c", "closed": true, "current": false, "node": "f894c25619d3f1484639d81be950e0a07bc6f1f6", "rev": 14 }, { "active": false, "branch": "a", "closed": false, "current": false, "node": "d8cbc61dbaa6dc817175d1e301eecb863f280832", "rev": 5 }, { "active": false, "branch": "default", "closed": false, "current": false, "node": "19709c5a4e75bf938f8e349aff97438539bb729e", "rev": 0 } ] Tests of revision branch name caching We rev branch cache is updated automatically. In these tests we use a trick to trigger rebuilds. We remove the branch head cache and run 'hg head' to cause a rebuild that also will populate the rev branch cache. revision branch cache is created when building the branch head cache $ rm -rf .hg/cache; hg head a -T '{rev}\n' 5 $ f --hexdump --size .hg/cache/rbc-* .hg/cache/rbc-names-v1: size=87 0000: 64 65 66 61 75 6c 74 00 61 00 62 00 63 00 61 20 |default.a.b.c.a | 0010: 62 72 61 6e 63 68 20 6e 61 6d 65 20 6d 75 63 68 |branch name much| 0020: 20 6c 6f 6e 67 65 72 20 74 68 61 6e 20 74 68 65 | longer than the| 0030: 20 64 65 66 61 75 6c 74 20 6a 75 73 74 69 66 69 | default justifi| 0040: 63 61 74 69 6f 6e 20 75 73 65 64 20 62 79 20 62 |cation used by b| 0050: 72 61 6e 63 68 65 73 |ranches| .hg/cache/rbc-revs-v1: size=120 0000: 19 70 9c 5a 00 00 00 00 dd 6b 44 0d 00 00 00 01 |.p.Z.....kD.....| 0010: 88 1f e2 b9 00 00 00 01 ac 22 03 33 00 00 00 02 |.........".3....| 0020: ae e3 9c d1 00 00 00 02 d8 cb c6 1d 00 00 00 01 |................| 0030: 58 97 36 a2 00 00 00 03 10 ff 58 95 00 00 00 04 |X.6.......X.....| 0040: ee bb 94 44 00 00 00 02 5f 40 61 bb 00 00 00 02 |...D...._@a.....| 0050: bf be 84 1b 00 00 00 02 d3 f1 63 45 80 00 00 02 |..........cE....| 0060: e3 d4 9c 05 80 00 00 02 e2 3b 55 05 00 00 00 02 |.........;U.....| 0070: f8 94 c2 56 80 00 00 03 |...V....| #if unix-permissions no-root no errors when revbranchcache is not writable $ echo >> .hg/cache/rbc-revs-v1 $ chmod a-w .hg/cache/rbc-revs-v1 $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' 5 $ chmod a+w .hg/cache/rbc-revs-v1 #endif recovery from invalid cache revs file with trailing data $ echo >> .hg/cache/rbc-revs-v1 $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug 5 truncating cache/rbc-revs-v1 to 120 $ f --size .hg/cache/rbc-revs* .hg/cache/rbc-revs-v1: size=120 recovery from invalid cache file with partial last record $ mv .hg/cache/rbc-revs-v1 . $ f -qDB 119 rbc-revs-v1 > .hg/cache/rbc-revs-v1 $ f --size .hg/cache/rbc-revs* .hg/cache/rbc-revs-v1: size=119 $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug 5 truncating cache/rbc-revs-v1 to 112 $ f --size .hg/cache/rbc-revs* .hg/cache/rbc-revs-v1: size=120 recovery from invalid cache file with missing record - no truncation $ mv .hg/cache/rbc-revs-v1 . $ f -qDB 112 rbc-revs-v1 > .hg/cache/rbc-revs-v1 $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug 5 $ f --size .hg/cache/rbc-revs* .hg/cache/rbc-revs-v1: size=120 recovery from invalid cache file with some bad records $ mv .hg/cache/rbc-revs-v1 . $ f -qDB 8 rbc-revs-v1 > .hg/cache/rbc-revs-v1 $ f --size .hg/cache/rbc-revs* .hg/cache/rbc-revs-v1: size=8 $ f -qDB 112 rbc-revs-v1 >> .hg/cache/rbc-revs-v1 $ f --size .hg/cache/rbc-revs* .hg/cache/rbc-revs-v1: size=120 $ hg log -r 'branch(.)' -T '{rev} ' --debug 3 4 8 9 10 11 12 13 truncating cache/rbc-revs-v1 to 8 $ rm -f .hg/cache/branch* && hg head a -T '{rev}\n' --debug 5 truncating cache/rbc-revs-v1 to 104 $ f --size --hexdump --bytes=16 .hg/cache/rbc-revs* .hg/cache/rbc-revs-v1: size=120 0000: 19 70 9c 5a 00 00 00 00 dd 6b 44 0d 00 00 00 01 |.p.Z.....kD.....| cache is updated when committing $ hg branch i-will-regret-this marked working directory as branch i-will-regret-this $ hg ci -m regrets $ f --size .hg/cache/rbc-* .hg/cache/rbc-names-v1: size=106 .hg/cache/rbc-revs-v1: size=128 update after rollback - the cache will be correct but rbc-names will will still contain the branch name even though it no longer is used $ hg up -qr '.^' $ hg rollback -qf $ f --size --hexdump .hg/cache/rbc-* .hg/cache/rbc-names-v1: size=106 0000: 64 65 66 61 75 6c 74 00 61 00 62 00 63 00 61 20 |default.a.b.c.a | 0010: 62 72 61 6e 63 68 20 6e 61 6d 65 20 6d 75 63 68 |branch name much| 0020: 20 6c 6f 6e 67 65 72 20 74 68 61 6e 20 74 68 65 | longer than the| 0030: 20 64 65 66 61 75 6c 74 20 6a 75 73 74 69 66 69 | default justifi| 0040: 63 61 74 69 6f 6e 20 75 73 65 64 20 62 79 20 62 |cation used by b| 0050: 72 61 6e 63 68 65 73 00 69 2d 77 69 6c 6c 2d 72 |ranches.i-will-r| 0060: 65 67 72 65 74 2d 74 68 69 73 |egret-this| .hg/cache/rbc-revs-v1: size=120 0000: 19 70 9c 5a 00 00 00 00 dd 6b 44 0d 00 00 00 01 |.p.Z.....kD.....| 0010: 88 1f e2 b9 00 00 00 01 ac 22 03 33 00 00 00 02 |.........".3....| 0020: ae e3 9c d1 00 00 00 02 d8 cb c6 1d 00 00 00 01 |................| 0030: 58 97 36 a2 00 00 00 03 10 ff 58 95 00 00 00 04 |X.6.......X.....| 0040: ee bb 94 44 00 00 00 02 5f 40 61 bb 00 00 00 02 |...D...._@a.....| 0050: bf be 84 1b 00 00 00 02 d3 f1 63 45 80 00 00 02 |..........cE....| 0060: e3 d4 9c 05 80 00 00 02 e2 3b 55 05 00 00 00 02 |.........;U.....| 0070: f8 94 c2 56 80 00 00 03 |...V....| cache is updated/truncated when stripping - it is thus very hard to get in a situation where the cache is out of sync and the hash check detects it $ hg --config extensions.strip= strip -r tip --nob $ f --size .hg/cache/rbc-revs* .hg/cache/rbc-revs-v1: size=112 $ cd .. mercurial-3.7.3/tests/test-casefolding.t0000644000175000017500000001117612676531525017717 0ustar mpmmpm00000000000000#require icasefs $ hg debugfs | grep 'case-sensitive:' case-sensitive: no test file addition with bad case $ hg init repo1 $ cd repo1 $ echo a > a $ hg add A adding a $ hg st A a $ hg ci -m adda $ hg manifest a $ cd .. test case collision on rename (issue750) $ hg init repo2 $ cd repo2 $ echo a > a $ hg --debug ci -Am adda adding a committing files: a committing manifest committing changelog committed changeset 0:07f4944404050f47db2e5c5071e0e84e7a27bba9 Case-changing renames should work: $ hg mv a A $ hg mv A a $ hg st addremove after case-changing rename has no effect (issue4590) $ hg mv a A $ hg addremove recording removal of a as rename to A (100% similar) $ hg revert --all forgetting A undeleting a test changing case of path components $ mkdir D $ echo b > D/b $ hg ci -Am addb D/b $ hg mv D/b d/b D/b: not overwriting - file exists $ hg mv D/b d/c $ hg st A D/c R D/b $ mv D temp $ mv temp d $ hg st A D/c R D/b $ hg revert -aq $ rm d/c $ echo c > D/c $ hg add D/c $ hg st A D/c $ hg ci -m addc D/c $ hg mv d/b d/e moving D/b to D/e (glob) $ hg st A D/e R D/b $ hg revert -aq $ rm d/e $ hg mv d/b D/B moving D/b to D/B (glob) $ hg st A D/B R D/b $ cd .. test case collision between revisions (issue912) $ hg init repo3 $ cd repo3 $ echo a > a $ hg ci -Am adda adding a $ hg rm a $ hg ci -Am removea $ echo A > A on linux hfs keeps the old case stored, force it $ mv a aa $ mv aa A $ hg ci -Am addA adding A used to fail under case insensitive fs $ hg up -C 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg up -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved no clobbering of untracked files with wrong casing $ hg up -r null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo gold > a $ hg up A: untracked file differs abort: untracked files in working directory differ from files in requested revision [255] $ cat a gold $ rm a test that normal file in different case on target context is not unlinked by largefiles extension. $ cat >> .hg/hgrc < [extensions] > largefiles= > EOF $ hg update -q -C 1 $ hg status -A $ echo 'A as largefiles' > A $ hg add --large A $ hg commit -m '#3' created new head $ hg manifest -r 3 .hglf/A $ hg manifest -r 0 a $ hg update -q -C 0 $ hg status -A C a $ hg update -q -C 3 $ hg update -q 0 $ hg up -C -r 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg mv A a $ hg diff -g > rename.diff $ hg ci -m 'A -> a' $ hg up -q '.^' $ hg import rename.diff -m "import rename A -> a" applying rename.diff $ hg st ? rename.diff $ hg files a $ find * | sort a rename.diff $ rm rename.diff $ cd .. issue 3342: file in nested directory causes unexpected abort $ hg init issue3342 $ cd issue3342 $ mkdir -p a/B/c/D $ echo e > a/B/c/D/e $ hg add a/B/c/D/e $ hg ci -m 'add e' issue 4481: revert across case only renames $ hg mv a/B/c/D/e a/B/c/d/E $ hg ci -m "uppercase E" $ echo 'foo' > a/B/c/D/E $ hg ci -m 'e content change' $ hg revert --all -r 0 removing a/B/c/D/E (glob) adding a/B/c/D/e (glob) $ find * | sort a a/B a/B/c a/B/c/D a/B/c/D/e a/B/c/D/e.orig $ cd .. issue 3340: mq does not handle case changes correctly in addition to reported case, 'hg qrefresh' is also tested against case changes. $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg init issue3340 $ cd issue3340 $ echo a > mIxEdCaSe $ hg add mIxEdCaSe $ hg commit -m '#0' $ hg rename mIxEdCaSe tmp $ hg rename tmp MiXeDcAsE $ hg status -A A MiXeDcAsE mIxEdCaSe R mIxEdCaSe $ hg qnew changecase $ hg status -A C MiXeDcAsE $ hg qpop -a popping changecase patch queue now empty $ hg qnew refresh-casechange $ hg status -A C mIxEdCaSe $ hg rename mIxEdCaSe tmp $ hg rename tmp MiXeDcAsE $ hg status -A A MiXeDcAsE mIxEdCaSe R mIxEdCaSe $ hg qrefresh $ hg status -A C MiXeDcAsE $ hg qpop -a popping refresh-casechange patch queue now empty $ hg qnew refresh-pattern $ hg status $ echo A > A $ hg add adding A $ hg qrefresh a # issue 3271, qrefresh with file handled case wrong $ hg status # empty status means the qrefresh worked #if osx We assume anyone running the tests on a case-insensitive volume on OS X will be using HFS+. If that's not true, this test will fail. $ rm A >>> open(u'a\u200c'.encode('utf-8'), 'w').write('unicode is fun') $ hg status M A #endif $ cd .. mercurial-3.7.3/tests/test-convert.t0000644000175000017500000005012312676531525017114 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > convert= > [convert] > hg.saverev=False > EOF $ hg help convert hg convert [OPTION]... SOURCE [DEST [REVMAP]] convert a foreign SCM repository to a Mercurial one. Accepted source formats [identifiers]: - Mercurial [hg] - CVS [cvs] - Darcs [darcs] - git [git] - Subversion [svn] - Monotone [mtn] - GNU Arch [gnuarch] - Bazaar [bzr] - Perforce [p4] Accepted destination formats [identifiers]: - Mercurial [hg] - Subversion [svn] (history on branches is not preserved) If no revision is given, all revisions will be converted. Otherwise, convert will only import up to the named revision (given in a format understood by the source). If no destination directory name is specified, it defaults to the basename of the source with "-hg" appended. If the destination repository doesn't exist, it will be created. By default, all sources except Mercurial will use --branchsort. Mercurial uses --sourcesort to preserve original revision numbers order. Sort modes have the following effects: --branchsort convert from parent to child revision when possible, which means branches are usually converted one after the other. It generates more compact repositories. --datesort sort revisions by date. Converted repositories have good- looking changelogs but are often an order of magnitude larger than the same ones generated by --branchsort. --sourcesort try to preserve source revisions order, only supported by Mercurial sources. --closesort try to move closed revisions as close as possible to parent branches, only supported by Mercurial sources. If "REVMAP" isn't given, it will be put in a default location ("/.hg/shamap" by default). The "REVMAP" is a simple text file that maps each source commit ID to the destination ID for that revision, like so: If the file doesn't exist, it's automatically created. It's updated on each commit copied, so 'hg convert' can be interrupted and can be run repeatedly to copy new commits. The authormap is a simple text file that maps each source commit author to a destination commit author. It is handy for source SCMs that use unix logins to identify authors (e.g.: CVS). One line per author mapping and the line format is: source author = destination author Empty lines and lines starting with a "#" are ignored. The filemap is a file that allows filtering and remapping of files and directories. Each line can contain one of the following directives: include path/to/file-or-dir exclude path/to/file-or-dir rename path/to/source path/to/destination Comment lines start with "#". A specified path matches if it equals the full relative name of a file or one of its parent directories. The "include" or "exclude" directive with the longest matching path applies, so line order does not matter. The "include" directive causes a file, or all files under a directory, to be included in the destination repository. The default if there are no "include" statements is to include everything. If there are any "include" statements, nothing else is included. The "exclude" directive causes files or directories to be omitted. The "rename" directive renames a file or directory if it is converted. To rename from a subdirectory into the root of the repository, use "." as the path to rename to. "--full" will make sure the converted changesets contain exactly the right files with the right content. It will make a full conversion of all files, not just the ones that have changed. Files that already are correct will not be changed. This can be used to apply filemap changes when converting incrementally. This is currently only supported for Mercurial and Subversion. The splicemap is a file that allows insertion of synthetic history, letting you specify the parents of a revision. This is useful if you want to e.g. give a Subversion merge two parents, or graft two disconnected series of history together. Each entry contains a key, followed by a space, followed by one or two comma-separated values: key parent1, parent2 The key is the revision ID in the source revision control system whose parents should be modified (same format as a key in .hg/shamap). The values are the revision IDs (in either the source or destination revision control system) that should be used as the new parents for that node. For example, if you have merged "release-1.0" into "trunk", then you should specify the revision on "trunk" as the first parent and the one on the "release-1.0" branch as the second. The branchmap is a file that allows you to rename a branch when it is being brought in from whatever external repository. When used in conjunction with a splicemap, it allows for a powerful combination to help fix even the most badly mismanaged repositories and turn them into nicely structured Mercurial repositories. The branchmap contains lines of the form: original_branch_name new_branch_name where "original_branch_name" is the name of the branch in the source repository, and "new_branch_name" is the name of the branch is the destination repository. No whitespace is allowed in the branch names. This can be used to (for instance) move code in one repository from "default" to a named branch. Mercurial Source ################ The Mercurial source recognizes the following configuration options, which you can set on the command line with "--config": convert.hg.ignoreerrors ignore integrity errors when reading. Use it to fix Mercurial repositories with missing revlogs, by converting from and to Mercurial. Default is False. convert.hg.saverev store original revision ID in changeset (forces target IDs to change). It takes a boolean argument and defaults to False. convert.hg.startrev specify the initial Mercurial revision. The default is 0. convert.hg.revs revset specifying the source revisions to convert. CVS Source ########## CVS source will use a sandbox (i.e. a checked-out copy) from CVS to indicate the starting point of what will be converted. Direct access to the repository files is not needed, unless of course the repository is ":local:". The conversion uses the top level directory in the sandbox to find the CVS repository, and then uses CVS rlog commands to find files to convert. This means that unless a filemap is given, all files under the starting directory will be converted, and that any directory reorganization in the CVS sandbox is ignored. The following options can be used with "--config": convert.cvsps.cache Set to False to disable remote log caching, for testing and debugging purposes. Default is True. convert.cvsps.fuzz Specify the maximum time (in seconds) that is allowed between commits with identical user and log message in a single changeset. When very large files were checked in as part of a changeset then the default may not be long enough. The default is 60. convert.cvsps.mergeto Specify a regular expression to which commit log messages are matched. If a match occurs, then the conversion process will insert a dummy revision merging the branch on which this log message occurs to the branch indicated in the regex. Default is "{{mergetobranch ([-\w]+)}}" convert.cvsps.mergefrom Specify a regular expression to which commit log messages are matched. If a match occurs, then the conversion process will add the most recent revision on the branch indicated in the regex as the second parent of the changeset. Default is "{{mergefrombranch ([-\w]+)}}" convert.localtimezone use local time (as determined by the TZ environment variable) for changeset date/times. The default is False (use UTC). hooks.cvslog Specify a Python function to be called at the end of gathering the CVS log. The function is passed a list with the log entries, and can modify the entries in-place, or add or delete them. hooks.cvschangesets Specify a Python function to be called after the changesets are calculated from the CVS log. The function is passed a list with the changeset entries, and can modify the changesets in-place, or add or delete them. An additional "debugcvsps" Mercurial command allows the builtin changeset merging code to be run without doing a conversion. Its parameters and output are similar to that of cvsps 2.1. Please see the command help for more details. Subversion Source ################# Subversion source detects classical trunk/branches/tags layouts. By default, the supplied "svn://repo/path/" source URL is converted as a single branch. If "svn://repo/path/trunk" exists it replaces the default branch. If "svn://repo/path/branches" exists, its subdirectories are listed as possible branches. If "svn://repo/path/tags" exists, it is looked for tags referencing converted branches. Default "trunk", "branches" and "tags" values can be overridden with following options. Set them to paths relative to the source URL, or leave them blank to disable auto detection. The following options can be set with "--config": convert.svn.branches specify the directory containing branches. The default is "branches". convert.svn.tags specify the directory containing tags. The default is "tags". convert.svn.trunk specify the name of the trunk branch. The default is "trunk". convert.localtimezone use local time (as determined by the TZ environment variable) for changeset date/times. The default is False (use UTC). Source history can be retrieved starting at a specific revision, instead of being integrally converted. Only single branch conversions are supported. convert.svn.startrev specify start Subversion revision number. The default is 0. Git Source ########## The Git importer converts commits from all reachable branches (refs in refs/heads) and remotes (refs in refs/remotes) to Mercurial. Branches are converted to bookmarks with the same name, with the leading 'refs/heads' stripped. Git submodules are converted to Git subrepos in Mercurial. The following options can be set with "--config": convert.git.similarity specify how similar files modified in a commit must be to be imported as renames or copies, as a percentage between "0" (disabled) and "100" (files must be identical). For example, "90" means that a delete/add pair will be imported as a rename if more than 90% of the file hasn't changed. The default is "50". convert.git.findcopiesharder while detecting copies, look at all files in the working copy instead of just changed ones. This is very expensive for large projects, and is only effective when "convert.git.similarity" is greater than 0. The default is False. convert.git.remoteprefix remote refs are converted as bookmarks with "convert.git.remoteprefix" as a prefix followed by a /. The default is 'remote'. convert.git.skipsubmodules does not convert root level .gitmodules files or files with 160000 mode indicating a submodule. Default is False. Perforce Source ############### The Perforce (P4) importer can be given a p4 depot path or a client specification as source. It will convert all files in the source to a flat Mercurial repository, ignoring labels, branches and integrations. Note that when a depot path is given you then usually should specify a target directory, because otherwise the target may be named "...-hg". The following options can be set with "--config": convert.p4.encoding specify the encoding to use when decoding standard output of the Perforce command line tool. The default is default system encoding. convert.p4.startrev specify initial Perforce revision (a Perforce changelist number). Mercurial Destination ##################### The Mercurial destination will recognize Mercurial subrepositories in the destination directory, and update the .hgsubstate file automatically if the destination subrepositories contain the //.hg/shamap file. Converting a repository with subrepositories requires converting a single repository at a time, from the bottom up. The following options are supported: convert.hg.clonebranches dispatch source branches in separate clones. The default is False. convert.hg.tagsbranch branch name for tag revisions, defaults to "default". convert.hg.usebranchnames preserve branch names. The default is True. convert.hg.sourcename records the given string as a 'convert_source' extra value on each commit made in the target repository. The default is None. All Destinations ################ All destination types accept the following options: convert.skiptags does not convert tags from the source repo to the target repo. The default is False. options ([+] can be repeated): -s --source-type TYPE source repository type -d --dest-type TYPE destination repository type -r --rev REV [+] import up to source revision REV -A --authormap FILE remap usernames using this file --filemap FILE remap file names using contents of file --full apply filemap changes by converting all files again --splicemap FILE splice synthesized history into place --branchmap FILE change branch names while converting --branchsort try to sort changesets by branches --datesort try to sort changesets by date --sourcesort preserve source changesets order --closesort try to reorder closed revisions (some details hidden, use --verbose to show complete help) $ hg init a $ cd a $ echo a > a $ hg ci -d'0 0' -Ama adding a $ hg cp a b $ hg ci -d'1 0' -mb $ hg rm a $ hg ci -d'2 0' -mc $ hg mv b a $ hg ci -d'3 0' -md $ echo a >> a $ hg ci -d'4 0' -me $ cd .. $ hg convert a 2>&1 | grep -v 'subversion python bindings could not be loaded' assuming destination a-hg initializing destination a-hg repository scanning source... sorting... converting... 4 a 3 b 2 c 1 d 0 e $ hg --cwd a-hg pull ../a pulling from ../a searching for changes no changes found conversion to existing file should fail $ touch bogusfile $ hg convert a bogusfile initializing destination bogusfile repository abort: cannot create new bundle repository [255] #if unix-permissions no-root conversion to dir without permissions should fail $ mkdir bogusdir $ chmod 000 bogusdir $ hg convert a bogusdir abort: Permission denied: 'bogusdir' [255] user permissions should succeed $ chmod 700 bogusdir $ hg convert a bogusdir initializing destination bogusdir repository scanning source... sorting... converting... 4 a 3 b 2 c 1 d 0 e #endif test pre and post conversion actions $ echo 'include b' > filemap $ hg convert --debug --filemap filemap a partialb | \ > grep 'run hg' run hg source pre-conversion action run hg sink pre-conversion action run hg sink post-conversion action run hg source post-conversion action converting empty dir should fail "nicely $ mkdir emptydir override $PATH to ensure p4 not visible; use $PYTHON in case we're running from a devel copy, not a temp installation $ PATH="$BINDIR" $PYTHON "$BINDIR"/hg convert emptydir assuming destination emptydir-hg initializing destination emptydir-hg repository emptydir does not look like a CVS checkout emptydir does not look like a Git repository emptydir does not look like a Subversion repository emptydir is not a local Mercurial repository emptydir does not look like a darcs repository emptydir does not look like a monotone repository emptydir does not look like a GNU Arch repository emptydir does not look like a Bazaar repository cannot find required "p4" tool abort: emptydir: missing or unsupported repository [255] convert with imaginary source type $ hg convert --source-type foo a a-foo initializing destination a-foo repository abort: foo: invalid source repository type [255] convert with imaginary sink type $ hg convert --dest-type foo a a-foo abort: foo: invalid destination repository type [255] testing: convert must not produce duplicate entries in fncache $ hg convert a b initializing destination b repository scanning source... sorting... converting... 4 a 3 b 2 c 1 d 0 e contents of fncache file: $ cat b/.hg/store/fncache | sort data/a.i data/b.i test bogus URL $ hg convert -q bzr+ssh://foobar@selenic.com/baz baz abort: bzr+ssh://foobar@selenic.com/baz: missing or unsupported repository [255] test revset converted() lookup $ hg --config convert.hg.saverev=True convert a c initializing destination c repository scanning source... sorting... converting... 4 a 3 b 2 c 1 d 0 e $ echo f > c/f $ hg -R c ci -d'0 0' -Amf adding f created new head $ hg -R c log -r "converted(09d945a62ce6)" changeset: 1:98c3dd46a874 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b $ hg -R c log -r "converted()" changeset: 0:31ed57b2037c user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a changeset: 1:98c3dd46a874 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b changeset: 2:3b9ca06ef716 user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: c changeset: 3:4e0debd37cf2 user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: d changeset: 4:9de3bc9349c5 user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: e test specifying a sourcename $ echo g > a/g $ hg -R a ci -d'0 0' -Amg adding g $ hg --config convert.hg.sourcename=mysource --config convert.hg.saverev=True convert a c scanning source... sorting... converting... 0 g $ hg -R c log -r tip --template '{extras % "{extra}\n"}' branch=default convert_revision=a3bc6100aa8ec03e00aaf271f1f50046fb432072 convert_source=mysource mercurial-3.7.3/tests/test-status-inprocess.py.out0000644000175000017500000000007612676531525021757 0ustar mpmmpm00000000000000% creating repo % add and commit C test.py % change M test.py mercurial-3.7.3/tests/test-hgwebdirsym.t0000644000175000017500000000316212676531525017761 0ustar mpmmpm00000000000000#require serve symlink Tests whether or not hgwebdir properly handles various symlink topologies. hide outer repo $ hg init $ hg init a $ echo a > a/a $ hg --cwd a ci -Ama -d'1 0' adding a $ mkdir webdir $ cd webdir $ hg init b $ echo b > b/b $ hg --cwd b ci -Amb -d'2 0' adding b $ hg init c $ echo c > c/c $ hg --cwd c ci -Amc -d'3 0' adding c $ ln -s ../a al $ ln -s ../webdir circle $ root=`pwd` $ cd .. $ cat > collections.conf < [collections] > $root=$root > EOF $ hg serve -p $HGPORT -d --pid-file=hg.pid --webdir-conf collections.conf \ > -A access-collections.log -E error-collections.log $ cat hg.pid >> $DAEMON_PIDS should succeed $ get-with-headers.py localhost:$HGPORT '?style=raw' 200 Script output follows /al/ /b/ /c/ $ get-with-headers.py localhost:$HGPORT 'al/file/tip/a?style=raw' 200 Script output follows a $ get-with-headers.py localhost:$HGPORT 'b/file/tip/b?style=raw' 200 Script output follows b $ get-with-headers.py localhost:$HGPORT 'c/file/tip/c?style=raw' 200 Script output follows c should fail $ get-with-headers.py localhost:$HGPORT 'circle/al/file/tip/a?style=raw' 404 Not Found error: repository circle/al/file/tip/a not found [1] $ get-with-headers.py localhost:$HGPORT 'circle/b/file/tip/a?style=raw' 404 Not Found error: repository circle/b/file/tip/a not found [1] $ get-with-headers.py localhost:$HGPORT 'circle/c/file/tip/a?style=raw' 404 Not Found error: repository circle/c/file/tip/a not found [1] collections errors $ cat error-collections.log mercurial-3.7.3/tests/test-rollback.t0000644000175000017500000001252212676531525017226 0ustar mpmmpm00000000000000setup repo $ hg init t $ cd t $ echo a > a $ hg commit -Am'add a' adding a $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions $ hg parents changeset: 0:1f0dee641bb7 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add a rollback to null revision $ hg status $ hg rollback repository tip rolled back to revision -1 (undo commit) working directory now based on revision -1 $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 0 files, 0 changesets, 0 total revisions $ hg parents $ hg status A a Two changesets this time so we rollback to a real changeset $ hg commit -m'add a again' $ echo a >> a $ hg commit -m'modify a' Test issue 902 (current branch is preserved) $ hg branch test marked working directory as branch test (branches are permanent and global, did you want a bookmark?) $ hg rollback repository tip rolled back to revision 0 (undo commit) working directory now based on revision 0 $ hg branch default Test issue 1635 (commit message saved) $ cat .hg/last-message.txt ; echo modify a Test rollback of hg before issue 902 was fixed $ hg commit -m "test3" $ hg branch test marked working directory as branch test (branches are permanent and global, did you want a bookmark?) $ rm .hg/undo.branch $ hg rollback repository tip rolled back to revision 0 (undo commit) named branch could not be reset: current branch is still 'test' working directory now based on revision 0 $ hg branch test working dir unaffected by rollback: do not restore dirstate et. al. $ hg log --template '{rev} {branch} {desc|firstline}\n' 0 default add a again $ hg status M a $ hg bookmark foo $ hg commit -m'modify a again' $ echo b > b $ hg bookmark bar -r default #making bar active, before the transaction $ hg commit -Am'add b' adding b $ hg log --template '{rev} {branch} {desc|firstline}\n' 2 test add b 1 test modify a again 0 default add a again $ hg update bar 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (activating bookmark bar) $ cat .hg/undo.branch ; echo test $ hg rollback -f repository tip rolled back to revision 1 (undo commit) $ hg id -n 0 $ hg branch default $ cat .hg/bookmarks.current ; echo bar $ hg bookmark --delete foo bar rollback by pretxncommit saves commit message (issue1635) $ echo a >> a $ hg --config hooks.pretxncommit=false commit -m"precious commit message" transaction abort! rollback completed abort: pretxncommit hook exited with status * (glob) [255] $ cat .hg/last-message.txt ; echo precious commit message same thing, but run $EDITOR $ cat > editor.sh << '__EOF__' > echo "another precious commit message" > "$1" > __EOF__ $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg --config hooks.pretxncommit=false commit 2>&1 note: commit message saved in .hg/last-message.txt transaction abort! rollback completed abort: pretxncommit hook exited with status * (glob) [255] $ cat .hg/last-message.txt another precious commit message test rollback on served repository #if serve $ hg commit -m "precious commit message" $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ cd .. $ hg clone http://localhost:$HGPORT u requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 1 files (+1 heads) updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd u $ hg id default 068774709090 now rollback and observe that 'hg serve' reloads the repository and presents the correct tip changeset: $ hg -R ../t rollback repository tip rolled back to revision 1 (undo commit) working directory now based on revision 0 $ hg id default 791dd2169706 #endif update to older changeset and then refuse rollback, because that would lose data (issue2998) $ cd ../t $ hg -q update $ rm `hg status -un` $ template='{rev}:{node|short} [{branch}] {desc|firstline}\n' $ echo 'valuable new file' > b $ echo 'valuable modification' >> a $ hg commit -A -m'a valuable change' adding b $ hg update 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg rollback abort: rollback of last commit while not checked out may lose data (use -f to force) [255] $ hg tip -q 2:4d9cd3795eea $ hg rollback -f repository tip rolled back to revision 1 (undo commit) $ hg status $ hg log --removed b # yep, it's gone same again, but emulate an old client that doesn't write undo.desc $ hg -q update $ echo 'valuable modification redux' >> a $ hg commit -m'a valuable change redux' $ rm .hg/undo.desc $ hg update 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg rollback rolling back unknown transaction $ cat a a corrupt journal test $ echo "foo" > .hg/store/journal $ hg recover rolling back interrupted transaction couldn't read journal entry 'foo\n'! checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions mercurial-3.7.3/tests/test-hgignore.t0000644000175000017500000001272012676531525017237 0ustar mpmmpm00000000000000 $ hg init ignorerepo $ cd ignorerepo Issue562: .hgignore requires newline at end: $ touch foo $ touch bar $ touch baz $ cat > makeignore.py < f = open(".hgignore", "w") > f.write("ignore\n") > f.write("foo\n") > # No EOL here > f.write("bar") > f.close() > EOF $ python makeignore.py Should display baz only: $ hg status ? baz $ rm foo bar baz .hgignore makeignore.py $ touch a.o $ touch a.c $ touch syntax $ mkdir dir $ touch dir/a.o $ touch dir/b.o $ touch dir/c.o $ hg add dir/a.o $ hg commit -m 0 $ hg add dir/b.o $ hg status A dir/b.o ? a.c ? a.o ? dir/c.o ? syntax $ echo "*.o" > .hgignore $ hg status abort: $TESTTMP/ignorerepo/.hgignore: invalid pattern (relre): *.o (glob) [255] $ echo ".*\.o" > .hgignore $ hg status A dir/b.o ? .hgignore ? a.c ? syntax Ensure that comments work: $ touch 'foo#bar' 'quux#' #if no-windows $ touch 'baz\#wat' #endif $ cat <<'EOF' >> .hgignore > # full-line comment > # whitespace-only comment line > syntax# pattern, no whitespace, then comment > a.c # pattern, then whitespace, then comment > baz\\# # escaped comment character > foo\#b # escaped comment character > quux\## escaped comment character at end of name > EOF $ hg status A dir/b.o ? .hgignore $ rm 'foo#bar' 'quux#' #if no-windows $ rm 'baz\#wat' #endif Check it does not ignore the current directory '.': $ echo "^\." > .hgignore $ hg status A dir/b.o ? a.c ? a.o ? dir/c.o ? syntax Test that patterns from ui.ignore options are read: $ echo > .hgignore $ cat >> $HGRCPATH << EOF > [ui] > ignore.other = $TESTTMP/ignorerepo/.hg/testhgignore > EOF $ echo "glob:**.o" > .hg/testhgignore $ hg status A dir/b.o ? .hgignore ? a.c ? syntax empty out testhgignore $ echo > .hg/testhgignore Test relative ignore path (issue4473): $ cat >> $HGRCPATH << EOF > [ui] > ignore.relative = .hg/testhgignorerel > EOF $ echo "glob:*.o" > .hg/testhgignorerel $ cd dir $ hg status A dir/b.o ? .hgignore ? a.c ? syntax $ cd .. $ echo > .hg/testhgignorerel $ echo "syntax: glob" > .hgignore $ echo "re:.*\.o" >> .hgignore $ hg status A dir/b.o ? .hgignore ? a.c ? syntax $ echo "syntax: invalid" > .hgignore $ hg status $TESTTMP/ignorerepo/.hgignore: ignoring invalid syntax 'invalid' (glob) A dir/b.o ? .hgignore ? a.c ? a.o ? dir/c.o ? syntax $ echo "syntax: glob" > .hgignore $ echo "*.o" >> .hgignore $ hg status A dir/b.o ? .hgignore ? a.c ? syntax $ echo "relglob:syntax*" > .hgignore $ hg status A dir/b.o ? .hgignore ? a.c ? a.o ? dir/c.o $ echo "relglob:*" > .hgignore $ hg status A dir/b.o $ cd dir $ hg status . A b.o $ hg debugignore (?:(?:|.*/)[^/]*(?:/|$)) $ hg debugignore b.o b.o is ignored (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 1: '*') (glob) $ cd .. Check patterns that match only the directory $ echo "^dir\$" > .hgignore $ hg status A dir/b.o ? .hgignore ? a.c ? a.o ? syntax Check recursive glob pattern matches no directories (dir/**/c.o matches dir/c.o) $ echo "syntax: glob" > .hgignore $ echo "dir/**/c.o" >> .hgignore $ touch dir/c.o $ mkdir dir/subdir $ touch dir/subdir/c.o $ hg status A dir/b.o ? .hgignore ? a.c ? a.o ? syntax $ hg debugignore a.c a.c is not ignored $ hg debugignore dir/c.o dir/c.o is ignored (ignore rule in $TESTTMP/ignorerepo/.hgignore, line 2: 'dir/**/c.o') (glob) Check using 'include:' in ignore file $ hg purge --all --config extensions.purge= $ touch foo.included $ echo ".*.included" > otherignore $ hg status -I "include:otherignore" ? foo.included $ echo "include:otherignore" >> .hgignore $ hg status A dir/b.o ? .hgignore ? otherignore Check recursive uses of 'include:' $ echo "include:nested/ignore" >> otherignore $ mkdir nested $ echo "glob:*ignore" > nested/ignore $ hg status A dir/b.o $ cp otherignore goodignore $ echo "include:badignore" >> otherignore $ hg status skipping unreadable pattern file 'badignore': No such file or directory A dir/b.o $ mv goodignore otherignore Check using 'include:' while in a non-root directory $ cd .. $ hg -R ignorerepo status A dir/b.o $ cd ignorerepo Check including subincludes $ hg revert -q --all $ hg purge --all --config extensions.purge= $ echo ".hgignore" > .hgignore $ mkdir dir1 dir2 $ touch dir1/file1 dir1/file2 dir2/file1 dir2/file2 $ echo "subinclude:dir2/.hgignore" >> .hgignore $ echo "glob:file*2" > dir2/.hgignore $ hg status ? dir1/file1 ? dir1/file2 ? dir2/file1 Check including subincludes with regexs $ echo "subinclude:dir1/.hgignore" >> .hgignore $ echo "regexp:f.le1" > dir1/.hgignore $ hg status ? dir1/file2 ? dir2/file1 Check multiple levels of sub-ignores $ mkdir dir1/subdir $ touch dir1/subdir/subfile1 dir1/subdir/subfile3 dir1/subdir/subfile4 $ echo "subinclude:subdir/.hgignore" >> dir1/.hgignore $ echo "glob:subfil*3" >> dir1/subdir/.hgignore $ hg status ? dir1/file2 ? dir1/subdir/subfile4 ? dir2/file1 Check include subignore at the same level $ mv dir1/subdir/.hgignore dir1/.hgignoretwo $ echo "regexp:f.le1" > dir1/.hgignore $ echo "subinclude:.hgignoretwo" >> dir1/.hgignore $ echo "glob:file*2" > dir1/.hgignoretwo $ hg status | grep file2 [1] $ hg debugignore dir1/file2 dir1/file2 is ignored (ignore rule in dir2/.hgignore, line 1: 'file*2') mercurial-3.7.3/tests/test-empty-dir.t0000644000175000017500000000061512676531525017347 0ustar mpmmpm00000000000000 $ hg init $ echo 123 > a $ hg add a $ hg commit -m "first" a $ mkdir sub $ echo 321 > sub/b $ hg add sub/b $ hg commit -m "second" sub/b $ cat sub/b 321 $ hg co 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ cat sub/b 2>/dev/null || echo "sub/b not present" sub/b not present $ test -d sub || echo "sub not present" sub not present mercurial-3.7.3/tests/test-lock-badness.t0000644000175000017500000000370712676531525020007 0ustar mpmmpm00000000000000#require unix-permissions no-root no-windows Prepare $ hg init a $ echo a > a/a $ hg -R a ci -A -m a adding a $ hg clone a b updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Test that raising an exception in the release function doesn't cause the lock to choke $ cat > testlock.py << EOF > from mercurial import cmdutil, error, error > > cmdtable = {} > command = cmdutil.command(cmdtable) > > def acquiretestlock(repo, releaseexc): > def unlock(): > if releaseexc: > raise error.Abort('expected release exception') > l = repo._lock(repo.vfs, 'testlock', False, unlock, None, 'test lock') > return l > > @command('testlockexc') > def testlockexc(ui, repo): > testlock = acquiretestlock(repo, True) > try: > testlock.release() > finally: > try: > testlock = acquiretestlock(repo, False) > except error.LockHeld: > raise error.Abort('lockfile on disk even after releasing!') > testlock.release() > EOF $ cat >> $HGRCPATH << EOF > [extensions] > testlock=$TESTTMP/testlock.py > EOF $ hg -R b testlockexc abort: expected release exception [255] One process waiting for another $ cat > hooks.py << EOF > import time > def sleepone(**x): time.sleep(1) > def sleephalf(**x): time.sleep(0.5) > EOF $ echo b > b/b $ hg -R b ci -A -m b --config hooks.precommit="python:`pwd`/hooks.py:sleepone" > stdout & $ hg -R b up -q --config hooks.pre-update="python:`pwd`/hooks.py:sleephalf" waiting for lock on working directory of b held by '*:*' (glob) got lock after ? seconds (glob) $ wait $ cat stdout adding b Pushing to a local read-only repo that can't be locked $ chmod 100 a/.hg/store $ hg -R b push a pushing to a searching for changes abort: could not lock repository a: Permission denied [255] $ chmod 700 a/.hg/store mercurial-3.7.3/tests/test-rebase-pull.t0000644000175000017500000000674112676531525017656 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > > [alias] > tglog = log -G --template "{rev}: '{desc}' {branches}\n" > EOF $ hg init a $ cd a $ echo C1 > C1 $ hg ci -Am C1 adding C1 $ echo C2 > C2 $ hg ci -Am C2 adding C2 $ cd .. $ hg clone a b updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone a c updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd b $ echo L1 > L1 $ hg ci -Am L1 adding L1 $ cd ../a $ echo R1 > R1 $ hg ci -Am R1 adding R1 $ cd ../b Now b has one revision to be pulled from a: $ hg pull --rebase pulling from $TESTTMP/a (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) rebasing 2:ff8d69a621f9 "L1" saved backup bundle to $TESTTMP/b/.hg/strip-backup/ff8d69a621f9-160fa373-backup.hg (glob) $ hg tglog @ 3: 'L1' | o 2: 'R1' | o 1: 'C2' | o 0: 'C1' Re-run: $ hg pull --rebase pulling from $TESTTMP/a (glob) searching for changes no changes found Invoke pull --rebase and nothing to rebase: $ cd ../c $ hg book norebase $ hg pull --rebase pulling from $TESTTMP/a (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files nothing to rebase - working directory parent is already an ancestor of destination 77ae9631bcca 1 files updated, 0 files merged, 0 files removed, 0 files unresolved updating bookmark norebase $ hg tglog -l 1 @ 2: 'R1' | pull --rebase --update should ignore --update: $ hg pull --rebase --update pulling from $TESTTMP/a (glob) searching for changes no changes found pull --rebase doesn't update if nothing has been pulled: $ hg up -q 1 $ hg pull --rebase pulling from $TESTTMP/a (glob) searching for changes no changes found $ hg tglog -l 1 o 2: 'R1' | $ cd .. pull --rebase works when a specific revision is pulled (issue3619) $ cd a $ hg tglog @ 2: 'R1' | o 1: 'C2' | o 0: 'C1' $ echo R2 > R2 $ hg ci -Am R2 adding R2 $ echo R3 > R3 $ hg ci -Am R3 adding R3 $ cd ../c $ hg tglog o 2: 'R1' | @ 1: 'C2' | o 0: 'C1' $ echo L1 > L1 $ hg ci -Am L1 adding L1 created new head $ hg pull --rev tip --rebase pulling from $TESTTMP/a (glob) searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files rebasing 3:ff8d69a621f9 "L1" saved backup bundle to $TESTTMP/c/.hg/strip-backup/ff8d69a621f9-160fa373-backup.hg (glob) $ hg tglog @ 5: 'L1' | o 4: 'R3' | o 3: 'R2' | o 2: 'R1' | o 1: 'C2' | o 0: 'C1' pull --rebase works with bundle2 turned on $ cd ../a $ echo R4 > R4 $ hg ci -Am R4 adding R4 $ hg tglog @ 5: 'R4' | o 4: 'R3' | o 3: 'R2' | o 2: 'R1' | o 1: 'C2' | o 0: 'C1' $ cd ../c $ hg pull --rebase pulling from $TESTTMP/a (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) rebasing 5:518d153c0ba3 "L1" saved backup bundle to $TESTTMP/c/.hg/strip-backup/518d153c0ba3-73407f14-backup.hg (glob) $ hg tglog @ 6: 'L1' | o 5: 'R4' | o 4: 'R3' | o 3: 'R2' | o 2: 'R1' | o 1: 'C2' | o 0: 'C1' mercurial-3.7.3/tests/test-casecollision-merge.t0000644000175000017500000001540012676531525021357 0ustar mpmmpm00000000000000#require icasefs ################################ test for branch merging ################################ test for rename awareness of case-folding collision check: (1) colliding file is one renamed from collided file: this is also case for issue3370. $ hg init branch_merge_renaming $ cd branch_merge_renaming $ echo a > a $ hg add a $ echo b > b $ hg add b $ hg commit -m '#0' $ hg tag -l A $ hg rename a tmp $ hg rename tmp A $ hg commit -m '#1' $ hg tag -l B $ hg update -q 0 $ touch x $ hg add x $ hg commit -m '#2' created new head $ hg tag -l C $ hg merge -q $ hg status -A M A R a C b C x $ hg update -q --clean 1 $ hg merge -q $ hg status -A M x C A C b $ hg commit -m '(D)' $ hg tag -l D additional test for issue3452: | this assumes the history below. | | (A) -- (C) -- (E) ------- | \ \ \ | \ \ \ | (B) -- (D) -- (F) -- (G) | | A: add file 'a' | B: rename from 'a' to 'A' | C: add 'x' (or operation other than modification of 'a') | D: merge C into B | E: modify 'a' | F: modify 'A' | G: merge E into F | | issue3452 occurs when (B) is recorded before (C) $ hg update -q --clean C $ echo "modify 'a' at (E)" > a $ echo "modify 'b' at (E)" > b $ hg commit -m '(E)' created new head $ hg tag -l E $ hg update -q --clean D $ echo "modify 'A' at (F)" > A $ hg commit -m '(F)' $ hg tag -l F $ hg merge -q --tool internal:other E $ hg status -A M A a M b C x $ cat A modify 'a' at (E) test also the case that (B) is recorded after (C), to prevent regression by changes in the future. to avoid unexpected (successful) behavior by filelog unification, target file is not 'a'/'A' but 'b'/'B' in this case. $ hg update -q --clean A $ hg rename b tmp $ hg rename tmp B $ hg commit -m '(B1)' created new head $ hg tag -l B1 $ hg merge -q C $ hg status -A M x C B C a $ hg commit -m '(D1)' $ hg tag -l D1 $ echo "modify 'B' at (F1)" > B $ hg commit -m '(F1)' $ hg tag -l F1 $ hg merge -q --tool internal:other E $ hg status -A M B b M a C x $ cat B modify 'b' at (E) $ cd .. (2) colliding file is not related to collided file $ hg init branch_merge_collding $ cd branch_merge_collding $ echo a > a $ hg add a $ hg commit -m '#0' $ hg remove a $ hg commit -m '#1' $ echo A > A $ hg add A $ hg commit -m '#2' $ hg update --clean 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo x > x $ hg add x $ hg commit -m '#3' created new head $ echo 'modified at #4' > a $ hg commit -m '#4' $ hg merge abort: case-folding collision between a and A [255] $ hg parents --template '{rev}\n' 4 $ hg status -A C a C x $ cat a modified at #4 $ hg update --clean 2 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg merge abort: case-folding collision between a and A [255] $ hg parents --template '{rev}\n' 2 $ hg status -A C A $ cat A A test for deletion awareness of case-folding collision check (issue3648): revision '#3' doesn't change 'a', so 'a' should be recognized as safely removed in merging between #2 and #3. $ hg update --clean 3 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 2 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status -A M A R a C x $ hg update --clean 2 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status -A M x C A $ cd .. Prepare for tests of directory case-folding collisions $ hg init directory-casing $ cd directory-casing $ touch 0 # test: file without directory $ mkdir 0a $ touch 0a/f $ mkdir aA $ touch aA/a $ hg ci -Aqm0 Directory/file case-folding collision: $ hg up -q null $ touch 00 # test: starts as '0' $ mkdir 000 # test: starts as '0' $ touch 000/f $ touch Aa # test: collision with 'aA/a' $ hg ci -Aqm1 $ hg merge 0 abort: case-folding collision between Aa and directory of aA/a [255] (note: no collision between 0 and 00 or 000/f) Directory case-folding collision: $ hg up -qC null $ hg --config extensions.purge= purge $ mkdir 0A0 $ touch 0A0/f # test: starts as '0a' $ mkdir Aa $ touch Aa/b # test: collision with 'aA/a' $ hg ci -Aqm2 $ hg merge 0 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cd .. ################################ test for linear updates ################################ test for rename awareness of case-folding collision check: (1) colliding file is one renamed from collided file $ hg init linearupdate_renameaware_1 $ cd linearupdate_renameaware_1 $ echo a > a $ hg add a $ hg commit -m '#0' $ hg rename a tmp $ hg rename tmp A $ hg commit -m '#1' $ hg update 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo 'this is added line' >> a $ hg update 1 merging a and A to A 0 files updated, 1 files merged, 0 files removed, 0 files unresolved $ hg status -A M A $ cat A a this is added line $ cd .. (2) colliding file is not related to collided file $ hg init linearupdate_renameaware_2 $ cd linearupdate_renameaware_2 $ echo a > a $ hg add a $ hg commit -m '#0' $ hg remove a $ hg commit -m '#1' $ echo A > A $ hg add A $ hg commit -m '#2' $ hg update 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg parents --template '{rev}\n' 0 $ hg status -A C a $ cat A a $ hg up -qC 2 $ hg update --check 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg parents --template '{rev}\n' 0 $ hg status -A C a $ cat a a $ hg update --clean 2 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg parents --template '{rev}\n' 2 $ hg status -A C A $ cat A A $ cd .. (3) colliding file is not related to collided file: added in working dir $ hg init linearupdate_renameaware_3 $ cd linearupdate_renameaware_3 $ echo a > a $ hg add a $ hg commit -m '#0' $ hg rename a b $ hg commit -m '#1' $ hg update 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo B > B $ hg add B $ hg status A B $ hg update abort: case-folding collision between b and B [255] $ hg update --check abort: uncommitted changes [255] $ hg update --clean 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg parents --template '{rev}\n' 1 $ hg status -A C b $ cat b a $ cd .. mercurial-3.7.3/tests/test-commandserver.t0000644000175000017500000005266712676531525020320 0ustar mpmmpm00000000000000#if windows $ PYTHONPATH="$TESTDIR/../contrib;$PYTHONPATH" #else $ PYTHONPATH="$TESTDIR/../contrib:$PYTHONPATH" #endif $ export PYTHONPATH typical client does not want echo-back messages, so test without it: $ grep -v '^promptecho ' < $HGRCPATH >> $HGRCPATH.new $ mv $HGRCPATH.new $HGRCPATH $ hg init repo $ cd repo >>> from hgclient import readchannel, runcommand, check >>> @check ... def hellomessage(server): ... ch, data = readchannel(server) ... print '%c, %r' % (ch, data) ... # run an arbitrary command to make sure the next thing the server ... # sends isn't part of the hello message ... runcommand(server, ['id']) o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob) *** runcommand id 000000000000 tip >>> from hgclient import check >>> @check ... def unknowncommand(server): ... server.stdin.write('unknowncommand\n') abort: unknown command unknowncommand >>> from hgclient import readchannel, runcommand, check >>> @check ... def checkruncommand(server): ... # hello block ... readchannel(server) ... ... # no args ... runcommand(server, []) ... ... # global options ... runcommand(server, ['id', '--quiet']) ... ... # make sure global options don't stick through requests ... runcommand(server, ['id']) ... ... # --config ... runcommand(server, ['id', '--config', 'ui.quiet=True']) ... ... # make sure --config doesn't stick ... runcommand(server, ['id']) ... ... # negative return code should be masked ... runcommand(server, ['id', '-runknown']) *** runcommand Mercurial Distributed SCM basic commands: add add the specified files on the next commit annotate show changeset information by line for each file clone make a copy of an existing repository commit commit the specified files or all outstanding changes diff diff repository (or selected files) export dump the header and diffs for one or more changesets forget forget the specified files on the next commit init create a new repository in the given directory log show revision history of entire repository or files merge merge another revision into working directory pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state update update working directory (or switch revisions) (use "hg help" for the full list of commands or "hg -v" for details) *** runcommand id --quiet 000000000000 *** runcommand id 000000000000 tip *** runcommand id --config ui.quiet=True 000000000000 *** runcommand id 000000000000 tip *** runcommand id -runknown abort: unknown revision 'unknown'! [255] >>> from hgclient import readchannel, check >>> @check ... def inputeof(server): ... readchannel(server) ... server.stdin.write('runcommand\n') ... # close stdin while server is waiting for input ... server.stdin.close() ... ... # server exits with 1 if the pipe closed while reading the command ... print 'server exit code =', server.wait() server exit code = 1 >>> import cStringIO >>> from hgclient import readchannel, runcommand, check >>> @check ... def serverinput(server): ... readchannel(server) ... ... patch = """ ... # HG changeset patch ... # User test ... # Date 0 0 ... # Node ID c103a3dec114d882c98382d684d8af798d09d857 ... # Parent 0000000000000000000000000000000000000000 ... 1 ... ... diff -r 000000000000 -r c103a3dec114 a ... --- /dev/null Thu Jan 01 00:00:00 1970 +0000 ... +++ b/a Thu Jan 01 00:00:00 1970 +0000 ... @@ -0,0 +1,1 @@ ... +1 ... """ ... ... runcommand(server, ['import', '-'], input=cStringIO.StringIO(patch)) ... runcommand(server, ['log']) *** runcommand import - applying patch from stdin *** runcommand log changeset: 0:eff892de26ec tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 check that --cwd doesn't persist between requests: $ mkdir foo $ touch foo/bar >>> from hgclient import readchannel, runcommand, check >>> @check ... def cwd(server): ... readchannel(server) ... runcommand(server, ['--cwd', 'foo', 'st', 'bar']) ... runcommand(server, ['st', 'foo/bar']) *** runcommand --cwd foo st bar ? bar *** runcommand st foo/bar ? foo/bar $ rm foo/bar check that local configs for the cached repo aren't inherited when -R is used: $ cat <> .hg/hgrc > [ui] > foo = bar > EOF >>> from hgclient import readchannel, sep, runcommand, check >>> @check ... def localhgrc(server): ... readchannel(server) ... ... # the cached repo local hgrc contains ui.foo=bar, so showconfig should ... # show it ... runcommand(server, ['showconfig'], outfilter=sep) ... ... # but not for this repo ... runcommand(server, ['init', 'foo']) ... runcommand(server, ['-R', 'foo', 'showconfig', 'ui', 'defaults']) *** runcommand showconfig bundle.mainreporoot=$TESTTMP/repo defaults.backout=-d "0 0" defaults.commit=-d "0 0" defaults.shelve=--date "0 0" defaults.tag=-d "0 0" devel.all-warnings=true largefiles.usercache=$TESTTMP/.cache/largefiles ui.slash=True ui.interactive=False ui.mergemarkers=detailed ui.foo=bar ui.nontty=true *** runcommand init foo *** runcommand -R foo showconfig ui defaults defaults.backout=-d "0 0" defaults.commit=-d "0 0" defaults.shelve=--date "0 0" defaults.tag=-d "0 0" ui.slash=True ui.interactive=False ui.mergemarkers=detailed ui.nontty=true $ rm -R foo #if windows $ PYTHONPATH="$TESTTMP/repo;$PYTHONPATH" #else $ PYTHONPATH="$TESTTMP/repo:$PYTHONPATH" #endif $ cat < hook.py > import sys > def hook(**args): > print 'hook talking' > print 'now try to read something: %r' % sys.stdin.read() > EOF >>> import cStringIO >>> from hgclient import readchannel, runcommand, check >>> @check ... def hookoutput(server): ... readchannel(server) ... runcommand(server, ['--config', ... 'hooks.pre-identify=python:hook.hook', ... 'id'], ... input=cStringIO.StringIO('some input')) *** runcommand --config hooks.pre-identify=python:hook.hook id hook talking now try to read something: 'some input' eff892de26ec tip $ rm hook.py* $ echo a >> a >>> import os >>> from hgclient import readchannel, runcommand, check >>> @check ... def outsidechanges(server): ... readchannel(server) ... runcommand(server, ['status']) ... os.system('hg ci -Am2') ... runcommand(server, ['tip']) ... runcommand(server, ['status']) *** runcommand status M a *** runcommand tip changeset: 1:d3a0a68be6de tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 *** runcommand status >>> import os >>> from hgclient import readchannel, runcommand, check >>> @check ... def bookmarks(server): ... readchannel(server) ... runcommand(server, ['bookmarks']) ... ... # changes .hg/bookmarks ... os.system('hg bookmark -i bm1') ... os.system('hg bookmark -i bm2') ... runcommand(server, ['bookmarks']) ... ... # changes .hg/bookmarks.current ... os.system('hg upd bm1 -q') ... runcommand(server, ['bookmarks']) ... ... runcommand(server, ['bookmarks', 'bm3']) ... f = open('a', 'ab') ... f.write('a\n') ... f.close() ... runcommand(server, ['commit', '-Amm']) ... runcommand(server, ['bookmarks']) *** runcommand bookmarks no bookmarks set *** runcommand bookmarks bm1 1:d3a0a68be6de bm2 1:d3a0a68be6de *** runcommand bookmarks * bm1 1:d3a0a68be6de bm2 1:d3a0a68be6de *** runcommand bookmarks bm3 *** runcommand commit -Amm *** runcommand bookmarks bm1 1:d3a0a68be6de bm2 1:d3a0a68be6de * bm3 2:aef17e88f5f0 >>> import os >>> from hgclient import readchannel, runcommand, check >>> @check ... def tagscache(server): ... readchannel(server) ... runcommand(server, ['id', '-t', '-r', '0']) ... os.system('hg tag -r 0 foo') ... runcommand(server, ['id', '-t', '-r', '0']) *** runcommand id -t -r 0 *** runcommand id -t -r 0 foo >>> import os >>> from hgclient import readchannel, runcommand, check >>> @check ... def setphase(server): ... readchannel(server) ... runcommand(server, ['phase', '-r', '.']) ... os.system('hg phase -r . -p') ... runcommand(server, ['phase', '-r', '.']) *** runcommand phase -r . 3: draft *** runcommand phase -r . 3: public $ echo a >> a >>> from hgclient import readchannel, runcommand, check >>> @check ... def rollback(server): ... readchannel(server) ... runcommand(server, ['phase', '-r', '.', '-p']) ... runcommand(server, ['commit', '-Am.']) ... runcommand(server, ['rollback']) ... runcommand(server, ['phase', '-r', '.']) *** runcommand phase -r . -p no phases changed *** runcommand commit -Am. *** runcommand rollback repository tip rolled back to revision 3 (undo commit) working directory now based on revision 3 *** runcommand phase -r . 3: public >>> import os >>> from hgclient import readchannel, runcommand, check >>> @check ... def branch(server): ... readchannel(server) ... runcommand(server, ['branch']) ... os.system('hg branch foo') ... runcommand(server, ['branch']) ... os.system('hg branch default') *** runcommand branch default marked working directory as branch foo (branches are permanent and global, did you want a bookmark?) *** runcommand branch foo marked working directory as branch default (branches are permanent and global, did you want a bookmark?) $ touch .hgignore >>> import os >>> from hgclient import readchannel, runcommand, check >>> @check ... def hgignore(server): ... readchannel(server) ... runcommand(server, ['commit', '-Am.']) ... f = open('ignored-file', 'ab') ... f.write('') ... f.close() ... f = open('.hgignore', 'ab') ... f.write('ignored-file') ... f.close() ... runcommand(server, ['status', '-i', '-u']) *** runcommand commit -Am. adding .hgignore *** runcommand status -i -u I ignored-file cache of non-public revisions should be invalidated on repository change (issue4855): >>> import os >>> from hgclient import readchannel, runcommand, check >>> @check ... def phasesetscacheaftercommit(server): ... readchannel(server) ... # load _phasecache._phaserevs and _phasesets ... runcommand(server, ['log', '-qr', 'draft()']) ... # create draft commits by another process ... for i in xrange(5, 7): ... f = open('a', 'ab') ... f.seek(0, os.SEEK_END) ... f.write('a\n') ... f.close() ... os.system('hg commit -Aqm%d' % i) ... # new commits should be listed as draft revisions ... runcommand(server, ['log', '-qr', 'draft()']) *** runcommand log -qr draft() 4:7966c8e3734d *** runcommand log -qr draft() 4:7966c8e3734d 5:41f6602d1c4f 6:10501e202c35 >>> import os >>> from hgclient import readchannel, runcommand, check >>> @check ... def phasesetscacheafterstrip(server): ... readchannel(server) ... # load _phasecache._phaserevs and _phasesets ... runcommand(server, ['log', '-qr', 'draft()']) ... # strip cached revisions by another process ... os.system('hg --config extensions.strip= strip -q 5') ... # shouldn't abort by "unknown revision '6'" ... runcommand(server, ['log', '-qr', 'draft()']) *** runcommand log -qr draft() 4:7966c8e3734d 5:41f6602d1c4f 6:10501e202c35 *** runcommand log -qr draft() 4:7966c8e3734d cache of phase roots should be invalidated on strip (issue3827): >>> import os >>> from hgclient import readchannel, sep, runcommand, check >>> @check ... def phasecacheafterstrip(server): ... readchannel(server) ... ... # create new head, 5:731265503d86 ... runcommand(server, ['update', '-C', '0']) ... f = open('a', 'ab') ... f.write('a\n') ... f.close() ... runcommand(server, ['commit', '-Am.', 'a']) ... runcommand(server, ['log', '-Gq']) ... ... # make it public; draft marker moves to 4:7966c8e3734d ... runcommand(server, ['phase', '-p', '.']) ... # load _phasecache.phaseroots ... runcommand(server, ['phase', '.'], outfilter=sep) ... ... # strip 1::4 outside server ... os.system('hg -q --config extensions.mq= strip 1') ... ... # shouldn't raise "7966c8e3734d: no node!" ... runcommand(server, ['branches']) *** runcommand update -C 0 1 files updated, 0 files merged, 2 files removed, 0 files unresolved (leaving bookmark bm3) *** runcommand commit -Am. a created new head *** runcommand log -Gq @ 5:731265503d86 | | o 4:7966c8e3734d | | | o 3:b9b85890c400 | | | o 2:aef17e88f5f0 | | | o 1:d3a0a68be6de |/ o 0:eff892de26ec *** runcommand phase -p . *** runcommand phase . 5: public *** runcommand branches default 1:731265503d86 in-memory cache must be reloaded if transaction is aborted. otherwise changelog and manifest would have invalid node: $ echo a >> a >>> from hgclient import readchannel, runcommand, check >>> @check ... def txabort(server): ... readchannel(server) ... runcommand(server, ['commit', '--config', 'hooks.pretxncommit=false', ... '-mfoo']) ... runcommand(server, ['verify']) *** runcommand commit --config hooks.pretxncommit=false -mfoo transaction abort! rollback completed abort: pretxncommit hook exited with status 1 [255] *** runcommand verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions $ hg revert --no-backup -aq $ cat >> .hg/hgrc << EOF > [experimental] > evolution=createmarkers > EOF >>> import os >>> from hgclient import readchannel, runcommand, check >>> @check ... def obsolete(server): ... readchannel(server) ... ... runcommand(server, ['up', 'null']) ... runcommand(server, ['phase', '-df', 'tip']) ... cmd = 'hg debugobsolete `hg log -r tip --template {node}`' ... if os.name == 'nt': ... cmd = 'sh -c "%s"' % cmd # run in sh, not cmd.exe ... os.system(cmd) ... runcommand(server, ['log', '--hidden']) ... runcommand(server, ['log']) *** runcommand up null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved *** runcommand phase -df tip *** runcommand log --hidden changeset: 1:731265503d86 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: . changeset: 0:eff892de26ec bookmark: bm1 bookmark: bm2 bookmark: bm3 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 *** runcommand log changeset: 0:eff892de26ec bookmark: bm1 bookmark: bm2 bookmark: bm3 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 $ cat <> .hg/hgrc > [extensions] > mq = > EOF >>> import os >>> from hgclient import readchannel, runcommand, check >>> @check ... def mqoutsidechanges(server): ... readchannel(server) ... ... # load repo.mq ... runcommand(server, ['qapplied']) ... os.system('hg qnew 0.diff') ... # repo.mq should be invalidated ... runcommand(server, ['qapplied']) ... ... runcommand(server, ['qpop', '--all']) ... os.system('hg qqueue --create foo') ... # repo.mq should be recreated to point to new queue ... runcommand(server, ['qqueue', '--active']) *** runcommand qapplied *** runcommand qapplied 0.diff *** runcommand qpop --all popping 0.diff patch queue now empty *** runcommand qqueue --active foo $ cat < dbgui.py > import os, sys > from mercurial import cmdutil, commands > cmdtable = {} > command = cmdutil.command(cmdtable) > @command("debuggetpass", norepo=True) > def debuggetpass(ui): > ui.write("%s\\n" % ui.getpass()) > @command("debugprompt", norepo=True) > def debugprompt(ui): > ui.write("%s\\n" % ui.prompt("prompt:")) > @command("debugreadstdin", norepo=True) > def debugreadstdin(ui): > ui.write("read: %r\n" % sys.stdin.read(1)) > @command("debugwritestdout", norepo=True) > def debugwritestdout(ui): > os.write(1, "low-level stdout fd and\n") > sys.stdout.write("stdout should be redirected to /dev/null\n") > sys.stdout.flush() > EOF $ cat <> .hg/hgrc > [extensions] > dbgui = dbgui.py > EOF >>> import cStringIO >>> from hgclient import readchannel, runcommand, check >>> @check ... def getpass(server): ... readchannel(server) ... runcommand(server, ['debuggetpass', '--config', ... 'ui.interactive=True'], ... input=cStringIO.StringIO('1234\n')) ... runcommand(server, ['debugprompt', '--config', ... 'ui.interactive=True'], ... input=cStringIO.StringIO('5678\n')) ... runcommand(server, ['debugreadstdin']) ... runcommand(server, ['debugwritestdout']) *** runcommand debuggetpass --config ui.interactive=True password: 1234 *** runcommand debugprompt --config ui.interactive=True prompt: 5678 *** runcommand debugreadstdin read: '' *** runcommand debugwritestdout run commandserver in commandserver, which is silly but should work: >>> import cStringIO >>> from hgclient import readchannel, runcommand, check >>> @check ... def nested(server): ... print '%c, %r' % readchannel(server) ... class nestedserver(object): ... stdin = cStringIO.StringIO('getencoding\n') ... stdout = cStringIO.StringIO() ... runcommand(server, ['serve', '--cmdserver', 'pipe'], ... output=nestedserver.stdout, input=nestedserver.stdin) ... nestedserver.stdout.seek(0) ... print '%c, %r' % readchannel(nestedserver) # hello ... print '%c, %r' % readchannel(nestedserver) # getencoding o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob) *** runcommand serve --cmdserver pipe o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob) r, '*' (glob) start without repository: $ cd .. >>> from hgclient import readchannel, runcommand, check >>> @check ... def hellomessage(server): ... ch, data = readchannel(server) ... print '%c, %r' % (ch, data) ... # run an arbitrary command to make sure the next thing the server ... # sends isn't part of the hello message ... runcommand(server, ['id']) o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob) *** runcommand id abort: there is no Mercurial repository here (.hg not found) [255] >>> from hgclient import readchannel, runcommand, check >>> @check ... def startwithoutrepo(server): ... readchannel(server) ... runcommand(server, ['init', 'repo2']) ... runcommand(server, ['id', '-R', 'repo2']) *** runcommand init repo2 *** runcommand id -R repo2 000000000000 tip don't fall back to cwd if invalid -R path is specified (issue4805): $ cd repo $ hg serve --cmdserver pipe -R ../nonexistent abort: repository ../nonexistent not found! [255] $ cd .. unix domain socket: $ cd repo $ hg update -q #if unix-socket unix-permissions >>> import cStringIO >>> from hgclient import unixserver, readchannel, runcommand, check >>> server = unixserver('.hg/server.sock', '.hg/server.log') >>> def hellomessage(conn): ... ch, data = readchannel(conn) ... print '%c, %r' % (ch, data) ... runcommand(conn, ['id']) >>> check(hellomessage, server.connect) o, 'capabilities: getencoding runcommand\nencoding: *\npid: *' (glob) *** runcommand id eff892de26ec tip bm1/bm2/bm3 >>> def unknowncommand(conn): ... readchannel(conn) ... conn.stdin.write('unknowncommand\n') >>> check(unknowncommand, server.connect) # error sent to server.log >>> def serverinput(conn): ... readchannel(conn) ... patch = """ ... # HG changeset patch ... # User test ... # Date 0 0 ... 2 ... ... diff -r eff892de26ec -r 1ed24be7e7a0 a ... --- a/a ... +++ b/a ... @@ -1,1 +1,2 @@ ... 1 ... +2 ... """ ... runcommand(conn, ['import', '-'], input=cStringIO.StringIO(patch)) ... runcommand(conn, ['log', '-rtip', '-q']) >>> check(serverinput, server.connect) *** runcommand import - applying patch from stdin *** runcommand log -rtip -q 2:1ed24be7e7a0 >>> server.shutdown() $ cat .hg/server.log listening at .hg/server.sock abort: unknown command unknowncommand killed! #endif #if no-unix-socket $ hg serve --cmdserver unix -a .hg/server.sock abort: unsupported platform [255] #endif mercurial-3.7.3/tests/test-ui-color.py0000644000175000017500000000157212676531525017356 0ustar mpmmpm00000000000000import os from hgext import color from mercurial import dispatch, ui # ensure errors aren't buffered testui = color.colorui() testui.pushbuffer() testui.write(('buffered\n')) testui.warn(('warning\n')) testui.write_err('error\n') print repr(testui.popbuffer()) # test dispatch.dispatch with the same ui object hgrc = open(os.environ["HGRCPATH"], 'w') hgrc.write('[extensions]\n') hgrc.write('color=\n') hgrc.close() ui_ = ui.ui() ui_.setconfig('ui', 'formatted', 'True') # we're not interested in the output, so write that to devnull ui_.fout = open(os.devnull, 'w') # call some arbitrary command just so we go through # color's wrapped _runcommand twice. def runcmd(): dispatch.dispatch(dispatch.request(['version', '-q'], ui_)) runcmd() print "colored? " + str(issubclass(ui_.__class__, color.colorui)) runcmd() print "colored? " + str(issubclass(ui_.__class__, color.colorui)) mercurial-3.7.3/tests/fakedirstatewritetime.py0000644000175000017500000000443112676531525021245 0ustar mpmmpm00000000000000# extension to emulate invoking 'dirstate.write()' at the time # specified by '[fakedirstatewritetime] fakenow', only when # 'dirstate.write()' is invoked via functions below: # # - 'workingctx._checklookup()' (= 'repo.status()') # - 'committablectx.markcommitted()' from __future__ import absolute_import from mercurial import ( context, dirstate, extensions, parsers, util, ) def pack_dirstate(fakenow, orig, dmap, copymap, pl, now): # execute what original parsers.pack_dirstate should do actually # for consistency actualnow = int(now) for f, e in dmap.iteritems(): if e[0] == 'n' and e[3] == actualnow: e = parsers.dirstatetuple(e[0], e[1], e[2], -1) dmap[f] = e return orig(dmap, copymap, pl, fakenow) def fakewrite(ui, func): # fake "now" of 'pack_dirstate' only if it is invoked while 'func' fakenow = ui.config('fakedirstatewritetime', 'fakenow') if not fakenow: # Execute original one, if fakenow isn't configured. This is # useful to prevent subrepos from executing replaced one, # because replacing 'parsers.pack_dirstate' is also effective # in subrepos. return func() # parsing 'fakenow' in YYYYmmddHHMM format makes comparison between # 'fakenow' value and 'touch -t YYYYmmddHHMM' argument easy fakenow = util.parsedate(fakenow, ['%Y%m%d%H%M'])[0] orig_pack_dirstate = parsers.pack_dirstate orig_dirstate_getfsnow = dirstate._getfsnow wrapper = lambda *args: pack_dirstate(fakenow, orig_pack_dirstate, *args) parsers.pack_dirstate = wrapper dirstate._getfsnow = lambda *args: fakenow try: return func() finally: parsers.pack_dirstate = orig_pack_dirstate dirstate._getfsnow = orig_dirstate_getfsnow def _checklookup(orig, workingctx, files): ui = workingctx.repo().ui return fakewrite(ui, lambda : orig(workingctx, files)) def markcommitted(orig, committablectx, node): ui = committablectx.repo().ui return fakewrite(ui, lambda : orig(committablectx, node)) def extsetup(ui): extensions.wrapfunction(context.workingctx, '_checklookup', _checklookup) extensions.wrapfunction(context.committablectx, 'markcommitted', markcommitted) mercurial-3.7.3/tests/test-obsolete-divergent.t0000644000175000017500000002247512676531525021246 0ustar mpmmpm00000000000000Test file dedicated to testing the divergent troubles from obsolete changeset. This is the most complex troubles from far so we isolate it in a dedicated file. Enable obsolete $ cat >> $HGRCPATH << EOF > [ui] > logtemplate = {rev}:{node|short} {desc}\n > [experimental] > evolution=createmarkers > [alias] > debugobsolete = debugobsolete -d '0 0' > [phases] > publish=False > EOF $ mkcommit() { > echo "$1" > "$1" > hg add "$1" > hg ci -m "$1" > } $ getid() { > hg log --hidden -r "desc('$1')" -T '{node}\n' > } setup repo $ hg init reference $ cd reference $ mkcommit base $ mkcommit A_0 $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ mkcommit A_1 created new head $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ mkcommit A_2 created new head $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ cd .. $ newcase() { > hg clone -u 0 -q reference $1 > cd $1 > } direct divergence ----------------- A_1 have two direct and divergent successors A_1 and A_1 $ newcase direct $ hg debugobsolete `getid A_0` `getid A_1` $ hg debugobsolete `getid A_0` `getid A_2` $ hg log -G --hidden o 3:392fd25390da A_2 | | o 2:82623d38b9ba A_1 |/ | x 1:007dc284c1f8 A_0 |/ @ 0:d20a80d4def3 base $ hg debugsuccessorssets --hidden 'all()' d20a80d4def3 d20a80d4def3 007dc284c1f8 82623d38b9ba 392fd25390da 82623d38b9ba 82623d38b9ba 392fd25390da 392fd25390da $ hg log -r 'divergent()' 2:82623d38b9ba A_1 3:392fd25390da A_2 check that mercurial refuse to push $ hg init ../other $ hg push ../other pushing to ../other searching for changes abort: push includes divergent changeset: 392fd25390da! [255] $ cd .. indirect divergence with known changeset ------------------------------------------- $ newcase indirect_known $ hg debugobsolete `getid A_0` `getid A_1` $ hg debugobsolete `getid A_0` `getid A_2` $ mkcommit A_3 created new head $ hg debugobsolete `getid A_2` `getid A_3` $ hg log -G --hidden @ 4:01f36c5a8fda A_3 | | x 3:392fd25390da A_2 |/ | o 2:82623d38b9ba A_1 |/ | x 1:007dc284c1f8 A_0 |/ o 0:d20a80d4def3 base $ hg debugsuccessorssets --hidden 'all()' d20a80d4def3 d20a80d4def3 007dc284c1f8 82623d38b9ba 01f36c5a8fda 82623d38b9ba 82623d38b9ba 392fd25390da 01f36c5a8fda 01f36c5a8fda 01f36c5a8fda $ hg log -r 'divergent()' 2:82623d38b9ba A_1 4:01f36c5a8fda A_3 $ cd .. indirect divergence with known changeset ------------------------------------------- $ newcase indirect_unknown $ hg debugobsolete `getid A_0` aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa `getid A_1` $ hg debugobsolete `getid A_0` `getid A_2` $ hg log -G --hidden o 3:392fd25390da A_2 | | o 2:82623d38b9ba A_1 |/ | x 1:007dc284c1f8 A_0 |/ @ 0:d20a80d4def3 base $ hg debugsuccessorssets --hidden 'all()' d20a80d4def3 d20a80d4def3 007dc284c1f8 82623d38b9ba 392fd25390da 82623d38b9ba 82623d38b9ba 392fd25390da 392fd25390da $ hg log -r 'divergent()' 2:82623d38b9ba A_1 3:392fd25390da A_2 $ cd .. do not take unknown node in account if they are final ----------------------------------------------------- $ newcase final-unknown $ hg debugobsolete `getid A_0` `getid A_1` $ hg debugobsolete `getid A_1` `getid A_2` $ hg debugobsolete `getid A_0` bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb $ hg debugobsolete bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb cccccccccccccccccccccccccccccccccccccccc $ hg debugobsolete `getid A_1` dddddddddddddddddddddddddddddddddddddddd $ hg debugsuccessorssets --hidden 'desc('A_0')' 007dc284c1f8 392fd25390da $ cd .. divergence that converge again is not divergence anymore ----------------------------------------------------- $ newcase converged_divergence $ hg debugobsolete `getid A_0` `getid A_1` $ hg debugobsolete `getid A_0` `getid A_2` $ mkcommit A_3 created new head $ hg debugobsolete `getid A_1` `getid A_3` $ hg debugobsolete `getid A_2` `getid A_3` $ hg log -G --hidden @ 4:01f36c5a8fda A_3 | | x 3:392fd25390da A_2 |/ | x 2:82623d38b9ba A_1 |/ | x 1:007dc284c1f8 A_0 |/ o 0:d20a80d4def3 base $ hg debugsuccessorssets --hidden 'all()' d20a80d4def3 d20a80d4def3 007dc284c1f8 01f36c5a8fda 82623d38b9ba 01f36c5a8fda 392fd25390da 01f36c5a8fda 01f36c5a8fda 01f36c5a8fda $ hg log -r 'divergent()' $ cd .. split is not divergences ----------------------------- $ newcase split $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2` $ hg log -G --hidden o 3:392fd25390da A_2 | | o 2:82623d38b9ba A_1 |/ | x 1:007dc284c1f8 A_0 |/ @ 0:d20a80d4def3 base $ hg debugsuccessorssets --hidden 'all()' d20a80d4def3 d20a80d4def3 007dc284c1f8 82623d38b9ba 392fd25390da 82623d38b9ba 82623d38b9ba 392fd25390da 392fd25390da $ hg log -r 'divergent()' Even when subsequent rewriting happen $ mkcommit A_3 created new head $ hg debugobsolete `getid A_1` `getid A_3` $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ mkcommit A_4 created new head $ hg debugobsolete `getid A_2` `getid A_4` $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ mkcommit A_5 created new head $ hg debugobsolete `getid A_4` `getid A_5` $ hg log -G --hidden @ 6:e442cfc57690 A_5 | | x 5:6a411f0d7a0a A_4 |/ | o 4:01f36c5a8fda A_3 |/ | x 3:392fd25390da A_2 |/ | x 2:82623d38b9ba A_1 |/ | x 1:007dc284c1f8 A_0 |/ o 0:d20a80d4def3 base $ hg debugsuccessorssets --hidden 'all()' d20a80d4def3 d20a80d4def3 007dc284c1f8 01f36c5a8fda e442cfc57690 82623d38b9ba 01f36c5a8fda 392fd25390da e442cfc57690 01f36c5a8fda 01f36c5a8fda 6a411f0d7a0a e442cfc57690 e442cfc57690 e442cfc57690 $ hg log -r 'divergent()' Check more complex obsolescence graft (with divergence) $ mkcommit B_0; hg up 0 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg debugobsolete `getid B_0` `getid A_2` $ mkcommit A_7; hg up 0 created new head 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ mkcommit A_8; hg up 0 created new head 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg debugobsolete `getid A_5` `getid A_7` `getid A_8` $ mkcommit A_9; hg up 0 created new head 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg debugobsolete `getid A_5` `getid A_9` $ hg log -G --hidden o 10:bed64f5d2f5a A_9 | | o 9:14608b260df8 A_8 |/ | o 8:7ae126973a96 A_7 |/ | x 7:3750ebee865d B_0 | | | x 6:e442cfc57690 A_5 |/ | x 5:6a411f0d7a0a A_4 |/ | o 4:01f36c5a8fda A_3 |/ | x 3:392fd25390da A_2 |/ | x 2:82623d38b9ba A_1 |/ | x 1:007dc284c1f8 A_0 |/ @ 0:d20a80d4def3 base $ hg debugsuccessorssets --hidden 'all()' d20a80d4def3 d20a80d4def3 007dc284c1f8 01f36c5a8fda bed64f5d2f5a 01f36c5a8fda 7ae126973a96 14608b260df8 82623d38b9ba 01f36c5a8fda 392fd25390da bed64f5d2f5a 7ae126973a96 14608b260df8 01f36c5a8fda 01f36c5a8fda 6a411f0d7a0a bed64f5d2f5a 7ae126973a96 14608b260df8 e442cfc57690 bed64f5d2f5a 7ae126973a96 14608b260df8 3750ebee865d bed64f5d2f5a 7ae126973a96 14608b260df8 7ae126973a96 7ae126973a96 14608b260df8 14608b260df8 bed64f5d2f5a bed64f5d2f5a $ hg log -r 'divergent()' 4:01f36c5a8fda A_3 8:7ae126973a96 A_7 9:14608b260df8 A_8 10:bed64f5d2f5a A_9 fix the divergence $ mkcommit A_A; hg up 0 created new head 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg debugobsolete `getid A_9` `getid A_A` $ hg debugobsolete `getid A_7` `getid A_A` $ hg debugobsolete `getid A_8` `getid A_A` $ hg log -G --hidden o 11:a139f71be9da A_A | | x 10:bed64f5d2f5a A_9 |/ | x 9:14608b260df8 A_8 |/ | x 8:7ae126973a96 A_7 |/ | x 7:3750ebee865d B_0 | | | x 6:e442cfc57690 A_5 |/ | x 5:6a411f0d7a0a A_4 |/ | o 4:01f36c5a8fda A_3 |/ | x 3:392fd25390da A_2 |/ | x 2:82623d38b9ba A_1 |/ | x 1:007dc284c1f8 A_0 |/ @ 0:d20a80d4def3 base $ hg debugsuccessorssets --hidden 'all()' d20a80d4def3 d20a80d4def3 007dc284c1f8 01f36c5a8fda a139f71be9da 82623d38b9ba 01f36c5a8fda 392fd25390da a139f71be9da 01f36c5a8fda 01f36c5a8fda 6a411f0d7a0a a139f71be9da e442cfc57690 a139f71be9da 3750ebee865d a139f71be9da 7ae126973a96 a139f71be9da 14608b260df8 a139f71be9da bed64f5d2f5a a139f71be9da a139f71be9da a139f71be9da $ hg log -r 'divergent()' $ cd .. Subset does not diverge ------------------------------ Do not report divergent successors-set if it is a subset of another successors-set. (report [A,B] not [A] + [A,B]) $ newcase subset $ hg debugobsolete `getid A_0` `getid A_2` $ hg debugobsolete `getid A_0` `getid A_1` `getid A_2` $ hg debugsuccessorssets --hidden 'desc('A_0')' 007dc284c1f8 82623d38b9ba 392fd25390da $ cd .. mercurial-3.7.3/tests/test-hgweb-json.t0000644000175000017500000006007512676531525017506 0ustar mpmmpm00000000000000#require json #require serve $ request() { > get-with-headers.py --json localhost:$HGPORT "$1" > } $ hg init test $ cd test $ mkdir da $ echo foo > da/foo $ echo foo > foo $ hg -q ci -A -m initial $ echo bar > foo $ hg ci -m 'modify foo' $ echo bar > da/foo $ hg ci -m 'modify da/foo' $ hg bookmark bookmark1 $ hg up default 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (leaving bookmark bookmark1) $ hg mv foo foo-new $ hg commit -m 'move foo' $ hg tag -m 'create tag' tag1 $ hg phase --public -r . $ echo baz > da/foo $ hg commit -m 'another commit to da/foo' $ hg tag -m 'create tag2' tag2 $ hg bookmark bookmark2 $ hg -q up -r 0 $ hg -q branch test-branch $ echo branch > foo $ hg commit -m 'create test branch' $ echo branch_commit_2 > foo $ hg commit -m 'another commit in test-branch' $ hg -q up default $ hg merge --tool :local test-branch 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m 'merge test-branch into default' $ hg log -G @ changeset: 9:cc725e08502a |\ tag: tip | | parent: 6:ceed296fe500 | | parent: 8:ed66c30e87eb | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: merge test-branch into default | | | o changeset: 8:ed66c30e87eb | | branch: test-branch | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: another commit in test-branch | | | o changeset: 7:6ab967a8ab34 | | branch: test-branch | | parent: 0:06e557f3edf6 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: create test branch | | o | changeset: 6:ceed296fe500 | | bookmark: bookmark2 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: create tag2 | | o | changeset: 5:f2890a05fea4 | | tag: tag2 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: another commit to da/foo | | o | changeset: 4:93a8ce14f891 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: create tag | | o | changeset: 3:78896eb0e102 | | tag: tag1 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: move foo | | o | changeset: 2:8d7c456572ac | | bookmark: bookmark1 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: modify da/foo | | o | changeset: 1:f8bbb9024b10 |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: modify foo | o changeset: 0:06e557f3edf6 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: initial $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E error.log $ cat hg.pid >> $DAEMON_PIDS (Try to keep these in roughly the order they are defined in webcommands.py) (log is handled by filelog/ and changelog/ - ignore it) (rawfile/ doesn't use templating - nothing to test) file/{revision}/{path} shows file revision $ request json-file/06e557f3edf6/foo 200 Script output follows "not yet implemented" file/{revision} shows root directory info $ request json-file/cc725e08502a 200 Script output follows { "abspath": "/", "bookmarks": [], "directories": [ { "abspath": "/da", "basename": "da", "emptydirs": "" } ], "files": [ { "abspath": ".hgtags", "basename": ".hgtags", "date": [ 0.0, 0 ], "flags": "", "size": 92 }, { "abspath": "foo-new", "basename": "foo-new", "date": [ 0.0, 0 ], "flags": "", "size": 4 } ], "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7", "tags": [ "tip" ] } changelog/ shows information about several changesets $ request json-changelog 200 Script output follows { "changeset_count": 10, "changesets": [ { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "merge test-branch into default", "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7", "tags": [ "tip" ], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "another commit in test-branch", "node": "ed66c30e87eb65337c05a4229efaa5f1d5285a90", "tags": [], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "create test branch", "node": "6ab967a8ab3489227a83f80e920faa039a71819f", "tags": [], "user": "test" }, { "bookmarks": [ "bookmark2" ], "date": [ 0.0, 0 ], "desc": "create tag2", "node": "ceed296fe500c3fac9541e31dad860cb49c89e45", "tags": [], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "another commit to da/foo", "node": "f2890a05fea49bfaf9fb27ed5490894eba32da78", "tags": [ "tag2" ], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "create tag", "node": "93a8ce14f89156426b7fa981af8042da53f03aa0", "tags": [], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "move foo", "node": "78896eb0e102174ce9278438a95e12543e4367a7", "tags": [ "tag1" ], "user": "test" }, { "bookmarks": [ "bookmark1" ], "date": [ 0.0, 0 ], "desc": "modify da/foo", "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5", "tags": [], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "modify foo", "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8", "tags": [], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "initial", "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e", "tags": [], "user": "test" } ], "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7" } changelog/{revision} shows information starting at a specific changeset $ request json-changelog/f8bbb9024b10 200 Script output follows { "changeset_count": 10, "changesets": [ { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "modify foo", "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8", "tags": [], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "initial", "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e", "tags": [], "user": "test" } ], "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8" } shortlog/ shows information about a set of changesets $ request json-shortlog 200 Script output follows { "changeset_count": 10, "changesets": [ { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "merge test-branch into default", "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7", "tags": [ "tip" ], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "another commit in test-branch", "node": "ed66c30e87eb65337c05a4229efaa5f1d5285a90", "tags": [], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "create test branch", "node": "6ab967a8ab3489227a83f80e920faa039a71819f", "tags": [], "user": "test" }, { "bookmarks": [ "bookmark2" ], "date": [ 0.0, 0 ], "desc": "create tag2", "node": "ceed296fe500c3fac9541e31dad860cb49c89e45", "tags": [], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "another commit to da/foo", "node": "f2890a05fea49bfaf9fb27ed5490894eba32da78", "tags": [ "tag2" ], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "create tag", "node": "93a8ce14f89156426b7fa981af8042da53f03aa0", "tags": [], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "move foo", "node": "78896eb0e102174ce9278438a95e12543e4367a7", "tags": [ "tag1" ], "user": "test" }, { "bookmarks": [ "bookmark1" ], "date": [ 0.0, 0 ], "desc": "modify da/foo", "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5", "tags": [], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "modify foo", "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8", "tags": [], "user": "test" }, { "bookmarks": [], "date": [ 0.0, 0 ], "desc": "initial", "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e", "tags": [], "user": "test" } ], "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7" } changeset/ renders the tip changeset $ request json-rev 200 Script output follows { "bookmarks": [], "branch": "default", "date": [ 0.0, 0 ], "desc": "merge test-branch into default", "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7", "parents": [ "ceed296fe500c3fac9541e31dad860cb49c89e45", "ed66c30e87eb65337c05a4229efaa5f1d5285a90" ], "phase": "draft", "tags": [ "tip" ], "user": "test" } changeset/{revision} shows tags $ request json-rev/78896eb0e102 200 Script output follows { "bookmarks": [], "branch": "default", "date": [ 0.0, 0 ], "desc": "move foo", "node": "78896eb0e102174ce9278438a95e12543e4367a7", "parents": [ "8d7c456572acf3557e8ed8a07286b10c408bcec5" ], "phase": "public", "tags": [ "tag1" ], "user": "test" } changeset/{revision} shows bookmarks $ request json-rev/8d7c456572ac 200 Script output follows { "bookmarks": [ "bookmark1" ], "branch": "default", "date": [ 0.0, 0 ], "desc": "modify da/foo", "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5", "parents": [ "f8bbb9024b10f93cdbb8d940337398291d40dea8" ], "phase": "public", "tags": [], "user": "test" } changeset/{revision} shows branches $ request json-rev/6ab967a8ab34 200 Script output follows { "bookmarks": [], "branch": "test-branch", "date": [ 0.0, 0 ], "desc": "create test branch", "node": "6ab967a8ab3489227a83f80e920faa039a71819f", "parents": [ "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e" ], "phase": "draft", "tags": [], "user": "test" } manifest/{revision}/{path} shows info about a directory at a revision $ request json-manifest/06e557f3edf6/ 200 Script output follows { "abspath": "/", "bookmarks": [], "directories": [ { "abspath": "/da", "basename": "da", "emptydirs": "" } ], "files": [ { "abspath": "foo", "basename": "foo", "date": [ 0.0, 0 ], "flags": "", "size": 4 } ], "node": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e", "tags": [] } tags/ shows tags info $ request json-tags 200 Script output follows { "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7", "tags": [ { "date": [ 0.0, 0 ], "node": "f2890a05fea49bfaf9fb27ed5490894eba32da78", "tag": "tag2" }, { "date": [ 0.0, 0 ], "node": "78896eb0e102174ce9278438a95e12543e4367a7", "tag": "tag1" } ] } bookmarks/ shows bookmarks info $ request json-bookmarks 200 Script output follows { "bookmarks": [ { "bookmark": "bookmark1", "date": [ 0.0, 0 ], "node": "8d7c456572acf3557e8ed8a07286b10c408bcec5" }, { "bookmark": "bookmark2", "date": [ 0.0, 0 ], "node": "ceed296fe500c3fac9541e31dad860cb49c89e45" } ], "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7" } branches/ shows branches info $ request json-branches 200 Script output follows { "branches": [ { "branch": "default", "date": [ 0.0, 0 ], "node": "cc725e08502a79dd1eda913760fbe06ed7a9abc7", "status": "open" }, { "branch": "test-branch", "date": [ 0.0, 0 ], "node": "ed66c30e87eb65337c05a4229efaa5f1d5285a90", "status": "inactive" } ] } summary/ shows a summary of repository state $ request json-summary 200 Script output follows "not yet implemented" filediff/{revision}/{path} shows changes to a file in a revision $ request json-diff/f8bbb9024b10/foo 200 Script output follows { "author": "test", "children": [], "date": [ 0.0, 0 ], "desc": "modify foo", "diff": [ { "blockno": 1, "lines": [ { "l": "--- a/foo\tThu Jan 01 00:00:00 1970 +0000\n", "n": 1, "t": "-" }, { "l": "+++ b/foo\tThu Jan 01 00:00:00 1970 +0000\n", "n": 2, "t": "+" }, { "l": "@@ -1,1 +1,1 @@\n", "n": 3, "t": "@" }, { "l": "-foo\n", "n": 4, "t": "-" }, { "l": "+bar\n", "n": 5, "t": "+" } ] } ], "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8", "parents": [ "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e" ], "path": "foo" } comparison/{revision}/{path} shows information about before and after for a file $ request json-comparison/f8bbb9024b10/foo 200 Script output follows { "author": "test", "children": [], "comparison": [ { "lines": [ { "ll": "foo", "ln": 1, "rl": "bar", "rn": 1, "t": "replace" } ] } ], "date": [ 0.0, 0 ], "desc": "modify foo", "leftnode": "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e", "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8", "parents": [ "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e" ], "path": "foo", "rightnode": "f8bbb9024b10f93cdbb8d940337398291d40dea8" } annotate/{revision}/{path} shows annotations for each line $ request json-annotate/f8bbb9024b10/foo 200 Script output follows { "abspath": "foo", "annotate": [ { "abspath": "foo", "author": "test", "desc": "modify foo", "line": "bar\n", "lineno": 1, "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8", "revdate": [ 0.0, 0 ], "targetline": 1 } ], "author": "test", "children": [], "date": [ 0.0, 0 ], "desc": "modify foo", "node": "f8bbb9024b10f93cdbb8d940337398291d40dea8", "parents": [ "06e557f3edf66faa1ccaba5dd8c203c21cc79f1e" ], "permissions": "" } filelog/{revision}/{path} shows history of a single file $ request json-filelog/f8bbb9024b10/foo 200 Script output follows "not yet implemented" (archive/ doesn't use templating, so ignore it) (static/ doesn't use templating, so ignore it) graph/ shows information that can be used to render a graph of the DAG $ request json-graph 200 Script output follows "not yet implemented" help/ shows help topics $ request json-help 200 Script output follows { "earlycommands": [ { "summary": "add the specified files on the next commit", "topic": "add" }, { "summary": "show changeset information by line for each file", "topic": "annotate" }, { "summary": "make a copy of an existing repository", "topic": "clone" }, { "summary": "commit the specified files or all outstanding changes", "topic": "commit" }, { "summary": "diff repository (or selected files)", "topic": "diff" }, { "summary": "dump the header and diffs for one or more changesets", "topic": "export" }, { "summary": "forget the specified files on the next commit", "topic": "forget" }, { "summary": "create a new repository in the given directory", "topic": "init" }, { "summary": "show revision history of entire repository or files", "topic": "log" }, { "summary": "merge another revision into working directory", "topic": "merge" }, { "summary": "pull changes from the specified source", "topic": "pull" }, { "summary": "push changes to the specified destination", "topic": "push" }, { "summary": "remove the specified files on the next commit", "topic": "remove" }, { "summary": "start stand-alone webserver", "topic": "serve" }, { "summary": "show changed files in the working directory", "topic": "status" }, { "summary": "summarize working directory state", "topic": "summary" }, { "summary": "update working directory (or switch revisions)", "topic": "update" } ], "othercommands": [ { "summary": "add all new files, delete all missing files", "topic": "addremove" }, { "summary": "create an unversioned archive of a repository revision", "topic": "archive" }, { "summary": "reverse effect of earlier changeset", "topic": "backout" }, { "summary": "subdivision search of changesets", "topic": "bisect" }, { "summary": "create a new bookmark or list existing bookmarks", "topic": "bookmarks" }, { "summary": "set or show the current branch name", "topic": "branch" }, { "summary": "list repository named branches", "topic": "branches" }, { "summary": "create a changegroup file", "topic": "bundle" }, { "summary": "output the current or given revision of files", "topic": "cat" }, { "summary": "show combined config settings from all hgrc files", "topic": "config" }, { "summary": "mark files as copied for the next commit", "topic": "copy" }, { "summary": "list tracked files", "topic": "files" }, { "summary": "copy changes from other branches onto the current branch", "topic": "graft" }, { "summary": "search for a pattern in specified files and revisions", "topic": "grep" }, { "summary": "show branch heads", "topic": "heads" }, { "summary": "show help for a given topic or a help overview", "topic": "help" }, { "summary": "identify the working directory or specified revision", "topic": "identify" }, { "summary": "import an ordered set of patches", "topic": "import" }, { "summary": "show new changesets found in source", "topic": "incoming" }, { "summary": "output the current or given revision of the project manifest", "topic": "manifest" }, { "summary": "show changesets not found in the destination", "topic": "outgoing" }, { "summary": "show aliases for remote repositories", "topic": "paths" }, { "summary": "set or show the current phase name", "topic": "phase" }, { "summary": "roll back an interrupted transaction", "topic": "recover" }, { "summary": "rename files; equivalent of copy + remove", "topic": "rename" }, { "summary": "redo merges or set/view the merge status of files", "topic": "resolve" }, { "summary": "restore files to their checkout state", "topic": "revert" }, { "summary": "print the root (top) of the current working directory", "topic": "root" }, { "summary": "add one or more tags for the current or given revision", "topic": "tag" }, { "summary": "list repository tags", "topic": "tags" }, { "summary": "apply one or more changegroup files", "topic": "unbundle" }, { "summary": "verify the integrity of the repository", "topic": "verify" }, { "summary": "output version and copyright information", "topic": "version" } ], "topics": [ { "summary": "Configuration Files", "topic": "config" }, { "summary": "Date Formats", "topic": "dates" }, { "summary": "Diff Formats", "topic": "diffs" }, { "summary": "Environment Variables", "topic": "environment" }, { "summary": "Using Additional Features", "topic": "extensions" }, { "summary": "Specifying File Sets", "topic": "filesets" }, { "summary": "Glossary", "topic": "glossary" }, { "summary": "Syntax for Mercurial Ignore Files", "topic": "hgignore" }, { "summary": "Configuring hgweb", "topic": "hgweb" }, { "summary": "Technical implementation topics", "topic": "internals" }, { "summary": "Merge Tools", "topic": "merge-tools" }, { "summary": "Specifying Multiple Revisions", "topic": "multirevs" }, { "summary": "File Name Patterns", "topic": "patterns" }, { "summary": "Working with Phases", "topic": "phases" }, { "summary": "Specifying Single Revisions", "topic": "revisions" }, { "summary": "Specifying Revision Sets", "topic": "revsets" }, { "summary": "Using Mercurial from scripts and automation", "topic": "scripting" }, { "summary": "Subrepositories", "topic": "subrepos" }, { "summary": "Template Usage", "topic": "templating" }, { "summary": "URL Paths", "topic": "urls" } ] } help/{topic} shows an individual help topic $ request json-help/phases 200 Script output follows { "rawdoc": "Working with Phases\n*", (glob) "topic": "phases" } mercurial-3.7.3/tests/test-convert-hg-svn.t0000644000175000017500000000620712676531525020320 0ustar mpmmpm00000000000000#require svn svn-bindings $ cat <> $HGRCPATH > [extensions] > convert = > mq = > EOF $ SVNREPOPATH=`pwd`/svn-repo #if windows $ SVNREPOURL=file:///`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` #else $ SVNREPOURL=file://`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` #endif $ svnadmin create "$SVNREPOPATH" $ cat > "$SVNREPOPATH"/hooks/pre-revprop-change < #!/bin/sh > > REPOS="$1" > REV="$2" > USER="$3" > PROPNAME="$4" > ACTION="$5" > > if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi > if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi > if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi > > echo "Changing prohibited revision property" >&2 > exit 1 > EOF $ chmod +x "$SVNREPOPATH"/hooks/pre-revprop-change $ svn co "$SVNREPOURL" "$SVNREPOPATH"-wc Checked out revision 0. $ cd "$SVNREPOPATH"-wc $ echo a > a $ svn add a A a $ svn ci -m'added a' a Adding a Transmitting file data . Committed revision 1. $ cd .. initial roundtrip $ hg convert -s svn -d hg "$SVNREPOPATH"-wc "$SVNREPOPATH"-hg | grep -v initializing scanning source... sorting... converting... 0 added a $ hg convert -s hg -d svn "$SVNREPOPATH"-hg "$SVNREPOPATH"-wc scanning source... sorting... converting... second roundtrip should do nothing $ hg convert -s svn -d hg "$SVNREPOPATH"-wc "$SVNREPOPATH"-hg scanning source... sorting... converting... $ hg convert -s hg -d svn "$SVNREPOPATH"-hg "$SVNREPOPATH"-wc scanning source... sorting... converting... new hg rev $ hg clone "$SVNREPOPATH"-hg "$SVNREPOPATH"-work updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd "$SVNREPOPATH"-work $ echo b > b $ hg add b $ hg ci -mb adding an empty revision $ hg qnew -m emtpy empty $ hg qfinish -a $ cd .. echo hg to svn $ hg --cwd "$SVNREPOPATH"-hg pull -q "$SVNREPOPATH"-work $ hg convert -s hg -d svn "$SVNREPOPATH"-hg "$SVNREPOPATH"-wc scanning source... sorting... converting... 1 b 0 emtpy svn back to hg should do nothing $ hg convert -s svn -d hg "$SVNREPOPATH"-wc "$SVNREPOPATH"-hg scanning source... sorting... converting... hg back to svn should do nothing $ hg convert -s hg -d svn "$SVNREPOPATH"-hg "$SVNREPOPATH"-wc scanning source... sorting... converting... verify which shamap format we are storing and must be able to handle $ cat svn-repo-hg/.hg/shamap svn:????????-????-????-????-????????????@1 ???????????????????????????????????????? (glob) svn:????????-????-????-????-????????????@2 ???????????????????????????????????????? (glob) svn:????????-????-????-????-????????????@2 ???????????????????????????????????????? (glob) $ cat svn-repo-wc/.svn/hg-shamap ???????????????????????????????????????? 1 (glob) ???????????????????????????????????????? svn:????????-????-????-????-????????????@2 (glob) ???????????????????????????????????????? svn:????????-????-????-????-????????????@2 (glob) mercurial-3.7.3/tests/test-convert-tagsbranch-topology.t0000644000175000017500000000443712676531525023107 0ustar mpmmpm00000000000000#require git $ echo "[core]" >> $HOME/.gitconfig $ echo "autocrlf = false" >> $HOME/.gitconfig $ echo "[core]" >> $HOME/.gitconfig $ echo "autocrlf = false" >> $HOME/.gitconfig $ cat <> $HGRCPATH > [extensions] > convert = > [convert] > hg.usebranchnames = True > hg.tagsbranch = tags-update > EOF $ GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME $ GIT_AUTHOR_EMAIL='test@example.org'; export GIT_AUTHOR_EMAIL $ GIT_AUTHOR_DATE="2007-01-01 00:00:00 +0000"; export GIT_AUTHOR_DATE $ GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"; export GIT_COMMITTER_NAME $ GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"; export GIT_COMMITTER_EMAIL $ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE $ count=10 $ action() > { > GIT_AUTHOR_DATE="2007-01-01 00:00:$count +0000" > GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE" > git "$@" >/dev/null 2>/dev/null || echo "git command error" > count=`expr $count + 1` > } $ glog() > { > hg log -G --template '{rev} "{desc|firstline}" files: {files}\n' "$@" > } $ convertrepo() > { > hg convert --datesort git-repo hg-repo > } Build a GIT repo with at least 1 tag $ mkdir git-repo $ cd git-repo $ git init >/dev/null 2>&1 $ echo a > a $ git add a $ action commit -m "rev1" $ action tag -m "tag1" tag1 $ cd .. Convert without tags $ hg convert git-repo hg-repo --config convert.skiptags=True initializing destination hg-repo repository scanning source... sorting... converting... 0 rev1 updating bookmarks $ hg -R hg-repo tags tip 0:d98c8ad3a4cf $ rm -rf hg-repo Do a first conversion $ convertrepo initializing destination hg-repo repository scanning source... sorting... converting... 0 rev1 updating tags updating bookmarks Simulate upstream updates after first conversion $ cd git-repo $ echo b > a $ git add a $ action commit -m "rev2" $ action tag -m "tag2" tag2 $ cd .. Perform an incremental conversion $ convertrepo scanning source... sorting... converting... 0 rev2 updating tags updating bookmarks Print the log $ cd hg-repo $ glog o 3 "update tags" files: .hgtags | | o 2 "rev2" files: a | | o | 1 "update tags" files: .hgtags / o 0 "rev1" files: a $ cd .. mercurial-3.7.3/tests/test-largefiles.t0000644000175000017500000014443712676531525017565 0ustar mpmmpm00000000000000This file used to contains all largefile tests. Do not add any new tests in this file as it his already far too long to run. It contains all the testing of the basic concepts of large file in a single block. $ USERCACHE="$TESTTMP/cache"; export USERCACHE $ mkdir "${USERCACHE}" $ cat >> $HGRCPATH < [extensions] > largefiles= > purge= > rebase= > transplant= > [phases] > publish=False > [largefiles] > minsize=2 > patterns=glob:**.dat > usercache=${USERCACHE} > [hooks] > precommit=sh -c "echo \\"Invoking status precommit hook\\"; hg status" > [experimental] > # drop me once bundle2 is the default, > # added to get test change early. > bundle2-exp = True > EOF Create the repo with a couple of revisions of both large and normal files. Test status and dirstate of largefiles and that summary output is correct. $ hg init a $ cd a $ mkdir sub $ echo normal1 > normal1 $ echo normal2 > sub/normal2 $ echo large1 > large1 $ echo large2 > sub/large2 $ hg add normal1 sub/normal2 $ hg add --large large1 sub/large2 $ hg commit -m "add files" Invoking status precommit hook A large1 A normal1 A sub/large2 A sub/normal2 $ touch large1 sub/large2 $ sleep 1 $ hg st $ hg debugstate --nodates n 644 41 set .hglf/large1 n 644 41 set .hglf/sub/large2 n 644 8 set normal1 n 644 8 set sub/normal2 $ hg debugstate --large --nodates n 644 7 set large1 n 644 7 set sub/large2 $ echo normal11 > normal1 $ echo normal22 > sub/normal2 $ echo large11 > large1 $ echo large22 > sub/large2 $ hg commit -m "edit files" Invoking status precommit hook M large1 M normal1 M sub/large2 M sub/normal2 $ hg sum --large parent: 1:ce8896473775 tip edit files branch: default commit: (clean) update: (current) phases: 2 draft largefiles: (no remote repo) Commit preserved largefile contents. $ cat normal1 normal11 $ cat large1 large11 $ cat sub/normal2 normal22 $ cat sub/large2 large22 Test status, subdir and unknown files $ echo unknown > sub/unknown $ hg st --all ? sub/unknown C large1 C normal1 C sub/large2 C sub/normal2 $ hg st --all sub ? sub/unknown C sub/large2 C sub/normal2 $ rm sub/unknown Test messages and exit codes for remove warning cases $ hg remove -A large1 not removing large1: file still exists [1] $ echo 'modified' > large1 $ hg remove large1 not removing large1: file is modified (use -f to force removal) [1] $ echo 'new' > normalnew $ hg add normalnew $ echo 'new' > largenew $ hg add --large normalnew normalnew already tracked! $ hg remove normalnew largenew not removing largenew: file is untracked not removing normalnew: file has been marked for add (use forget to undo) [1] $ rm normalnew largenew $ hg up -Cq Remove both largefiles and normal files. $ hg remove normal1 large1 $ hg status large1 R large1 $ hg commit -m "remove files" Invoking status precommit hook R large1 R normal1 $ ls sub $ echo "testlargefile" > large1-test $ hg add --large large1-test $ hg st A large1-test $ hg rm large1-test not removing large1-test: file has been marked for add (use forget to undo) [1] $ hg st A large1-test $ hg forget large1-test $ hg st ? large1-test $ hg remove large1-test not removing large1-test: file is untracked [1] $ hg forget large1-test not removing large1-test: file is already untracked [1] $ rm large1-test Copy both largefiles and normal files (testing that status output is correct). $ hg cp sub/normal2 normal1 $ hg cp sub/large2 large1 $ hg commit -m "copy files" Invoking status precommit hook A large1 A normal1 $ cat normal1 normal22 $ cat large1 large22 Test moving largefiles and verify that normal files are also unaffected. $ hg mv normal1 normal3 $ hg mv large1 large3 $ hg mv sub/normal2 sub/normal4 $ hg mv sub/large2 sub/large4 $ hg commit -m "move files" Invoking status precommit hook A large3 A normal3 A sub/large4 A sub/normal4 R large1 R normal1 R sub/large2 R sub/normal2 $ cat normal3 normal22 $ cat large3 large22 $ cat sub/normal4 normal22 $ cat sub/large4 large22 #if serve Test display of largefiles in hgweb $ hg serve -d -p $HGPORT --pid-file ../hg.pid $ cat ../hg.pid >> $DAEMON_PIDS $ get-with-headers.py 127.0.0.1:$HGPORT 'file/tip/?style=raw' 200 Script output follows drwxr-xr-x sub -rw-r--r-- 41 large3 -rw-r--r-- 9 normal3 $ get-with-headers.py 127.0.0.1:$HGPORT 'file/tip/sub/?style=raw' 200 Script output follows -rw-r--r-- 41 large4 -rw-r--r-- 9 normal4 $ killdaemons.py #endif Test archiving the various revisions. These hit corner cases known with archiving. $ hg archive -r 0 ../archive0 $ hg archive -r 1 ../archive1 $ hg archive -r 2 ../archive2 $ hg archive -r 3 ../archive3 $ hg archive -r 4 ../archive4 $ cd ../archive0 $ cat normal1 normal1 $ cat large1 large1 $ cat sub/normal2 normal2 $ cat sub/large2 large2 $ cd ../archive1 $ cat normal1 normal11 $ cat large1 large11 $ cat sub/normal2 normal22 $ cat sub/large2 large22 $ cd ../archive2 $ ls sub $ cat sub/normal2 normal22 $ cat sub/large2 large22 $ cd ../archive3 $ cat normal1 normal22 $ cat large1 large22 $ cat sub/normal2 normal22 $ cat sub/large2 large22 $ cd ../archive4 $ cat normal3 normal22 $ cat large3 large22 $ cat sub/normal4 normal22 $ cat sub/large4 large22 Commit corner case: specify files to commit. $ cd ../a $ echo normal3 > normal3 $ echo large3 > large3 $ echo normal4 > sub/normal4 $ echo large4 > sub/large4 $ hg commit normal3 large3 sub/normal4 sub/large4 -m "edit files again" Invoking status precommit hook M large3 M normal3 M sub/large4 M sub/normal4 $ cat normal3 normal3 $ cat large3 large3 $ cat sub/normal4 normal4 $ cat sub/large4 large4 One more commit corner case: commit from a subdirectory. $ cd ../a $ echo normal33 > normal3 $ echo large33 > large3 $ echo normal44 > sub/normal4 $ echo large44 > sub/large4 $ cd sub $ hg commit -m "edit files yet again" Invoking status precommit hook M large3 M normal3 M sub/large4 M sub/normal4 $ cat ../normal3 normal33 $ cat ../large3 large33 $ cat normal4 normal44 $ cat large4 large44 Committing standins is not allowed. $ cd .. $ echo large3 > large3 $ hg commit .hglf/large3 -m "try to commit standin" abort: file ".hglf/large3" is a largefile standin (commit the largefile itself instead) [255] Corner cases for adding largefiles. $ echo large5 > large5 $ hg add --large large5 $ hg add --large large5 large5 already a largefile $ mkdir sub2 $ echo large6 > sub2/large6 $ echo large7 > sub2/large7 $ hg add --large sub2 adding sub2/large6 as a largefile (glob) adding sub2/large7 as a largefile (glob) $ hg st M large3 A large5 A sub2/large6 A sub2/large7 Committing directories containing only largefiles. $ mkdir -p z/y/x/m $ touch z/y/x/m/large1 $ touch z/y/x/large2 $ hg add --large z/y/x/m/large1 z/y/x/large2 $ hg commit -m "Subdir with directory only containing largefiles" z Invoking status precommit hook M large3 A large5 A sub2/large6 A sub2/large7 A z/y/x/large2 A z/y/x/m/large1 (and a bit of log testing) $ hg log -T '{rev}\n' z/y/x/m/large1 7 $ hg log -T '{rev}\n' z/y/x/m # with only a largefile 7 $ hg rollback --quiet $ touch z/y/x/m/normal $ hg add z/y/x/m/normal $ hg commit -m "Subdir with mixed contents" z Invoking status precommit hook M large3 A large5 A sub2/large6 A sub2/large7 A z/y/x/large2 A z/y/x/m/large1 A z/y/x/m/normal $ hg st M large3 A large5 A sub2/large6 A sub2/large7 $ hg rollback --quiet $ hg revert z/y/x/large2 z/y/x/m/large1 $ rm z/y/x/large2 z/y/x/m/large1 $ hg commit -m "Subdir with normal contents" z Invoking status precommit hook M large3 A large5 A sub2/large6 A sub2/large7 A z/y/x/m/normal $ hg st M large3 A large5 A sub2/large6 A sub2/large7 $ hg rollback --quiet $ hg revert --quiet z $ hg commit -m "Empty subdir" z abort: z: no match under directory! [255] $ rm -rf z $ hg ci -m "standin" .hglf abort: file ".hglf" is a largefile standin (commit the largefile itself instead) [255] Test "hg status" with combination of 'file pattern' and 'directory pattern' for largefiles: $ hg status sub2/large6 sub2 A sub2/large6 A sub2/large7 Config settings (pattern **.dat, minsize 2 MB) are respected. $ echo testdata > test.dat $ dd bs=1k count=2k if=/dev/zero of=reallylarge > /dev/null 2> /dev/null $ hg add adding reallylarge as a largefile adding test.dat as a largefile Test that minsize and --lfsize handle float values; also tests that --lfsize overrides largefiles.minsize. (0.250 MB = 256 kB = 262144 B) $ dd if=/dev/zero of=ratherlarge bs=1024 count=256 > /dev/null 2> /dev/null $ dd if=/dev/zero of=medium bs=1024 count=128 > /dev/null 2> /dev/null $ hg --config largefiles.minsize=.25 add adding ratherlarge as a largefile adding medium $ hg forget medium $ hg --config largefiles.minsize=.25 add --lfsize=.125 adding medium as a largefile $ dd if=/dev/zero of=notlarge bs=1024 count=127 > /dev/null 2> /dev/null $ hg --config largefiles.minsize=.25 add --lfsize=.125 adding notlarge $ hg forget notlarge Test forget on largefiles. $ hg forget large3 large5 test.dat reallylarge ratherlarge medium $ hg commit -m "add/edit more largefiles" Invoking status precommit hook A sub2/large6 A sub2/large7 R large3 ? large5 ? medium ? notlarge ? ratherlarge ? reallylarge ? test.dat $ hg st ? large3 ? large5 ? medium ? notlarge ? ratherlarge ? reallylarge ? test.dat Purge with largefiles: verify that largefiles are still in the working dir after a purge. $ hg purge --all $ cat sub/large4 large44 $ cat sub2/large6 large6 $ cat sub2/large7 large7 Test addremove: verify that files that should be added as largefiles are added as such and that already-existing largefiles are not added as normal files by accident. $ rm normal3 $ rm sub/large4 $ echo "testing addremove with patterns" > testaddremove.dat $ echo "normaladdremove" > normaladdremove $ hg addremove removing sub/large4 adding testaddremove.dat as a largefile removing normal3 adding normaladdremove Test addremove with -R $ hg up -C getting changed largefiles 1 largefiles updated, 0 removed 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm normal3 $ rm sub/large4 $ echo "testing addremove with patterns" > testaddremove.dat $ echo "normaladdremove" > normaladdremove $ cd .. $ hg -R a -v addremove removing sub/large4 adding testaddremove.dat as a largefile removing normal3 adding normaladdremove $ cd a Test 3364 $ hg clone . ../addrm updating to branch default getting changed largefiles 3 largefiles updated, 0 removed 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../addrm $ cat >> .hg/hgrc < [hooks] > post-commit.stat=sh -c "echo \\"Invoking status postcommit hook\\"; hg status -A" > EOF $ touch foo $ hg add --large foo $ hg ci -m "add foo" Invoking status precommit hook A foo Invoking status postcommit hook C foo C normal3 C sub/large4 C sub/normal4 C sub2/large6 C sub2/large7 $ rm foo $ hg st ! foo hmm.. no precommit invoked, but there is a postcommit?? $ hg ci -m "will not checkin" nothing changed (1 missing files, see 'hg status') Invoking status postcommit hook ! foo C normal3 C sub/large4 C sub/normal4 C sub2/large6 C sub2/large7 [1] $ hg addremove removing foo $ hg st R foo $ hg ci -m "used to say nothing changed" Invoking status precommit hook R foo Invoking status postcommit hook C normal3 C sub/large4 C sub/normal4 C sub2/large6 C sub2/large7 $ hg st Test 3507 (both normal files and largefiles were a problem) $ touch normal $ touch large $ hg add normal $ hg add --large large $ hg ci -m "added" Invoking status precommit hook A large A normal Invoking status postcommit hook C large C normal C normal3 C sub/large4 C sub/normal4 C sub2/large6 C sub2/large7 $ hg remove normal $ hg addremove --traceback $ hg ci -m "addremoved normal" Invoking status precommit hook R normal Invoking status postcommit hook C large C normal3 C sub/large4 C sub/normal4 C sub2/large6 C sub2/large7 $ hg up -C '.^' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg remove large $ hg addremove --traceback $ hg ci -m "removed large" Invoking status precommit hook R large created new head Invoking status postcommit hook C normal C normal3 C sub/large4 C sub/normal4 C sub2/large6 C sub2/large7 Test commit -A (issue3542) $ echo large8 > large8 $ hg add --large large8 $ hg ci -Am 'this used to add large8 as normal and commit both' Invoking status precommit hook A large8 Invoking status postcommit hook C large8 C normal C normal3 C sub/large4 C sub/normal4 C sub2/large6 C sub2/large7 $ rm large8 $ hg ci -Am 'this used to not notice the rm' removing large8 Invoking status precommit hook R large8 Invoking status postcommit hook C normal C normal3 C sub/large4 C sub/normal4 C sub2/large6 C sub2/large7 Test that a standin can't be added as a large file $ touch large $ hg add --large large $ hg ci -m "add" Invoking status precommit hook A large Invoking status postcommit hook C large C normal C normal3 C sub/large4 C sub/normal4 C sub2/large6 C sub2/large7 $ hg remove large $ touch large $ hg addremove --config largefiles.patterns=**large --traceback adding large as a largefile Test that outgoing --large works (with revsets too) $ hg outgoing --rev '.^' --large comparing with $TESTTMP/a (glob) searching for changes changeset: 8:c02fd3b77ec4 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo changeset: 9:289dd08c9bbb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: used to say nothing changed changeset: 10:34f23ac6ac12 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: added changeset: 12:710c1b2f523c parent: 10:34f23ac6ac12 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: removed large changeset: 13:0a3e75774479 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: this used to add large8 as normal and commit both changeset: 14:84f3d378175c user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: this used to not notice the rm largefiles to upload (1 entities): large8 $ cd ../a Clone a largefiles repo. $ hg clone . ../b updating to branch default getting changed largefiles 3 largefiles updated, 0 removed 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../b $ hg log --template '{rev}:{node|short} {desc|firstline}\n' 7:daea875e9014 add/edit more largefiles 6:4355d653f84f edit files yet again 5:9d5af5072dbd edit files again 4:74c02385b94c move files 3:9e8fbc4bce62 copy files 2:51a0ae4d5864 remove files 1:ce8896473775 edit files 0:30d30fe6a5be add files $ cat normal3 normal33 Test graph log $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' @ 7:daea875e9014 add/edit more largefiles | o 6:4355d653f84f edit files yet again | o 5:9d5af5072dbd edit files again | o 4:74c02385b94c move files | o 3:9e8fbc4bce62 copy files | o 2:51a0ae4d5864 remove files | o 1:ce8896473775 edit files | o 0:30d30fe6a5be add files Test log with --patch $ hg log --patch -r 6::7 changeset: 6:4355d653f84f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: edit files yet again diff -r 9d5af5072dbd -r 4355d653f84f .hglf/large3 --- a/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -baaf12afde9d8d67f25dab6dced0d2bf77dba47c +7838695e10da2bb75ac1156565f40a2595fa2fa0 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/sub/large4 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -aeb2210d19f02886dde00dac279729a48471e2f9 +971fb41e78fea4f8e0ba5244784239371cb00591 diff -r 9d5af5072dbd -r 4355d653f84f normal3 --- a/normal3 Thu Jan 01 00:00:00 1970 +0000 +++ b/normal3 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -normal3 +normal33 diff -r 9d5af5072dbd -r 4355d653f84f sub/normal4 --- a/sub/normal4 Thu Jan 01 00:00:00 1970 +0000 +++ b/sub/normal4 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -normal4 +normal44 changeset: 7:daea875e9014 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add/edit more largefiles diff -r 4355d653f84f -r daea875e9014 .hglf/large3 --- a/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -7838695e10da2bb75ac1156565f40a2595fa2fa0 diff -r 4355d653f84f -r daea875e9014 .hglf/sub2/large6 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/sub2/large6 Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +0d6d75887db61b2c7e6c74b5dd8fc6ad50c0cc30 diff -r 4355d653f84f -r daea875e9014 .hglf/sub2/large7 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/sub2/large7 Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +bb3151689acb10f0c3125c560d5e63df914bc1af $ hg log --patch -r 6::7 sub/ changeset: 6:4355d653f84f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: edit files yet again diff -r 9d5af5072dbd -r 4355d653f84f .hglf/sub/large4 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -aeb2210d19f02886dde00dac279729a48471e2f9 +971fb41e78fea4f8e0ba5244784239371cb00591 diff -r 9d5af5072dbd -r 4355d653f84f sub/normal4 --- a/sub/normal4 Thu Jan 01 00:00:00 1970 +0000 +++ b/sub/normal4 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -normal4 +normal44 log with both --follow and --patch $ hg log --follow --patch --limit 2 changeset: 7:daea875e9014 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add/edit more largefiles diff -r 4355d653f84f -r daea875e9014 .hglf/large3 --- a/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -7838695e10da2bb75ac1156565f40a2595fa2fa0 diff -r 4355d653f84f -r daea875e9014 .hglf/sub2/large6 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/sub2/large6 Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +0d6d75887db61b2c7e6c74b5dd8fc6ad50c0cc30 diff -r 4355d653f84f -r daea875e9014 .hglf/sub2/large7 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/sub2/large7 Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +bb3151689acb10f0c3125c560d5e63df914bc1af changeset: 6:4355d653f84f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: edit files yet again diff -r 9d5af5072dbd -r 4355d653f84f .hglf/large3 --- a/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/large3 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -baaf12afde9d8d67f25dab6dced0d2bf77dba47c +7838695e10da2bb75ac1156565f40a2595fa2fa0 diff -r 9d5af5072dbd -r 4355d653f84f .hglf/sub/large4 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -aeb2210d19f02886dde00dac279729a48471e2f9 +971fb41e78fea4f8e0ba5244784239371cb00591 diff -r 9d5af5072dbd -r 4355d653f84f normal3 --- a/normal3 Thu Jan 01 00:00:00 1970 +0000 +++ b/normal3 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -normal3 +normal33 diff -r 9d5af5072dbd -r 4355d653f84f sub/normal4 --- a/sub/normal4 Thu Jan 01 00:00:00 1970 +0000 +++ b/sub/normal4 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -normal4 +normal44 $ hg log --follow --patch sub/large4 changeset: 6:4355d653f84f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: edit files yet again diff -r 9d5af5072dbd -r 4355d653f84f .hglf/sub/large4 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -aeb2210d19f02886dde00dac279729a48471e2f9 +971fb41e78fea4f8e0ba5244784239371cb00591 changeset: 5:9d5af5072dbd user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: edit files again diff -r 74c02385b94c -r 9d5af5072dbd .hglf/sub/large4 --- a/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 +aeb2210d19f02886dde00dac279729a48471e2f9 changeset: 4:74c02385b94c user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: move files diff -r 9e8fbc4bce62 -r 74c02385b94c .hglf/sub/large4 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/sub/large4 Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 changeset: 1:ce8896473775 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: edit files diff -r 30d30fe6a5be -r ce8896473775 .hglf/sub/large2 --- a/.hglf/sub/large2 Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/sub/large2 Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -1deebade43c8c498a3c8daddac0244dc55d1331d +eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 changeset: 0:30d30fe6a5be user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add files diff -r 000000000000 -r 30d30fe6a5be .hglf/sub/large2 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hglf/sub/large2 Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +1deebade43c8c498a3c8daddac0244dc55d1331d $ cat sub/normal4 normal44 $ cat sub/large4 large44 $ cat sub2/large6 large6 $ cat sub2/large7 large7 $ hg log -qf sub2/large7 7:daea875e9014 $ hg log -Gqf sub2/large7 @ 7:daea875e9014 | $ cd .. Test log from outside repo $ hg log b/sub -T '{rev}:{node|short} {desc|firstline}\n' 6:4355d653f84f edit files yet again 5:9d5af5072dbd edit files again 4:74c02385b94c move files 1:ce8896473775 edit files 0:30d30fe6a5be add files Test clone at revision $ hg clone a -r 3 c adding changesets adding manifests adding file changes added 4 changesets with 10 changes to 4 files updating to branch default getting changed largefiles 2 largefiles updated, 0 removed 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd c $ hg log --template '{rev}:{node|short} {desc|firstline}\n' 3:9e8fbc4bce62 copy files 2:51a0ae4d5864 remove files 1:ce8896473775 edit files 0:30d30fe6a5be add files $ cat normal1 normal22 $ cat large1 large22 $ cat sub/normal2 normal22 $ cat sub/large2 large22 Old revisions of a clone have correct largefiles content (this also tests update). $ hg update -r 1 getting changed largefiles 1 largefiles updated, 0 removed 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat large1 large11 $ cat sub/large2 large22 $ cd .. Test cloning with --all-largefiles flag $ rm "${USERCACHE}"/* $ hg clone --all-largefiles a a-backup updating to branch default getting changed largefiles 3 largefiles updated, 0 removed 5 files updated, 0 files merged, 0 files removed, 0 files unresolved 8 additional largefiles cached $ rm "${USERCACHE}"/* $ hg clone --all-largefiles -u 0 a a-clone0 updating to branch default getting changed largefiles 2 largefiles updated, 0 removed 4 files updated, 0 files merged, 0 files removed, 0 files unresolved 9 additional largefiles cached $ hg -R a-clone0 sum parent: 0:30d30fe6a5be add files branch: default commit: (clean) update: 7 new changesets (update) phases: 8 draft $ rm "${USERCACHE}"/* $ hg clone --all-largefiles -u 1 a a-clone1 updating to branch default getting changed largefiles 2 largefiles updated, 0 removed 4 files updated, 0 files merged, 0 files removed, 0 files unresolved 8 additional largefiles cached $ hg -R a-clone1 verify --large --lfa --lfc checking changesets checking manifests crosschecking files in changesets and manifests checking files 10 files, 8 changesets, 24 total revisions searching 8 changesets for largefiles verified contents of 13 revisions of 6 largefiles $ hg -R a-clone1 sum parent: 1:ce8896473775 edit files branch: default commit: (clean) update: 6 new changesets (update) phases: 8 draft $ rm "${USERCACHE}"/* $ hg clone --all-largefiles -U a a-clone-u 11 additional largefiles cached $ hg -R a-clone-u sum parent: -1:000000000000 (no revision checked out) branch: default commit: (clean) update: 8 new changesets (update) phases: 8 draft Show computed destination directory: $ mkdir xyz $ cd xyz $ hg clone ../a destination directory: a updating to branch default getting changed largefiles 3 largefiles updated, 0 removed 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. Clone URL without path: $ hg clone file:// abort: repository / not found! [255] Ensure base clone command argument validation $ hg clone -U -u 0 a a-clone-failure abort: cannot specify both --noupdate and --updaterev [255] $ hg clone --all-largefiles a ssh://localhost/a abort: --all-largefiles is incompatible with non-local destination ssh://localhost/a [255] Test pulling with --all-largefiles flag. Also test that the largefiles are downloaded from 'default' instead of 'default-push' when no source is specified (issue3584) $ rm -Rf a-backup $ hg clone -r 1 a a-backup adding changesets adding manifests adding file changes added 2 changesets with 8 changes to 4 files updating to branch default getting changed largefiles 2 largefiles updated, 0 removed 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm "${USERCACHE}"/* $ cd a-backup $ hg pull --all-largefiles --config paths.default-push=bogus/path pulling from $TESTTMP/a (glob) searching for changes adding changesets adding manifests adding file changes added 6 changesets with 16 changes to 8 files (run 'hg update' to get a working copy) 6 largefiles cached redo pull with --lfrev and check it pulls largefiles for the right revs $ hg rollback repository tip rolled back to revision 1 (undo pull) $ hg pull -v --lfrev 'heads(pulled())+min(pulled())' pulling from $TESTTMP/a (glob) searching for changes all local heads known remotely 6 changesets found uncompressed size of bundle content: 1333 (changelog) 1599 (manifests) 254 .hglf/large1 564 .hglf/large3 572 .hglf/sub/large4 182 .hglf/sub2/large6 182 .hglf/sub2/large7 212 normal1 457 normal3 465 sub/normal4 adding changesets adding manifests adding file changes added 6 changesets with 16 changes to 8 files calling hook changegroup.lfiles: hgext.largefiles.reposetup.checkrequireslfiles (run 'hg update' to get a working copy) pulling largefiles for revision 7 found 971fb41e78fea4f8e0ba5244784239371cb00591 in store found 0d6d75887db61b2c7e6c74b5dd8fc6ad50c0cc30 in store found bb3151689acb10f0c3125c560d5e63df914bc1af in store pulling largefiles for revision 2 found eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 in store 0 largefiles cached lfpull $ hg lfpull -r : --config largefiles.usercache=usercache-lfpull 2 largefiles cached $ hg lfpull -v -r 4+2 --config largefiles.usercache=usercache-lfpull pulling largefiles for revision 4 found eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 in store found eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 in store pulling largefiles for revision 2 found eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 in store 0 largefiles cached $ ls usercache-lfpull/* | sort usercache-lfpull/1deebade43c8c498a3c8daddac0244dc55d1331d usercache-lfpull/4669e532d5b2c093a78eca010077e708a071bb64 $ cd .. Rebasing between two repositories does not revert largefiles to old revisions (this was a very bad bug that took a lot of work to fix). $ hg clone a d updating to branch default getting changed largefiles 3 largefiles updated, 0 removed 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd b $ echo large4-modified > sub/large4 $ echo normal3-modified > normal3 $ hg commit -m "modify normal file and largefile in repo b" Invoking status precommit hook M normal3 M sub/large4 $ cd ../d $ echo large6-modified > sub2/large6 $ echo normal4-modified > sub/normal4 $ hg commit -m "modify normal file largefile in repo d" Invoking status precommit hook M sub/normal4 M sub2/large6 $ cd .. $ hg clone d e updating to branch default getting changed largefiles 3 largefiles updated, 0 removed 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd d More rebase testing, but also test that the largefiles are downloaded from 'default-push' when no source is specified (issue3584). (The largefile from the pulled revision is however not downloaded but found in the local cache.) Largefiles are fetched for the new pulled revision, not for existing revisions, rebased or not. $ [ ! -f .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 ] $ hg pull --rebase --all-largefiles --config paths.default-push=bogus/path --config paths.default=../b pulling from $TESTTMP/b (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files (+1 heads) rebasing 8:f574fb32bb45 "modify normal file largefile in repo d" Invoking status precommit hook M sub/normal4 M sub2/large6 saved backup bundle to $TESTTMP/d/.hg/strip-backup/f574fb32bb45-dd1d9f80-backup.hg (glob) 0 largefiles cached $ [ -f .hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 ] $ hg log --template '{rev}:{node|short} {desc|firstline}\n' 9:598410d3eb9a modify normal file largefile in repo d 8:a381d2c8c80e modify normal file and largefile in repo b 7:daea875e9014 add/edit more largefiles 6:4355d653f84f edit files yet again 5:9d5af5072dbd edit files again 4:74c02385b94c move files 3:9e8fbc4bce62 copy files 2:51a0ae4d5864 remove files 1:ce8896473775 edit files 0:30d30fe6a5be add files $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' @ 9:598410d3eb9a modify normal file largefile in repo d | o 8:a381d2c8c80e modify normal file and largefile in repo b | o 7:daea875e9014 add/edit more largefiles | o 6:4355d653f84f edit files yet again | o 5:9d5af5072dbd edit files again | o 4:74c02385b94c move files | o 3:9e8fbc4bce62 copy files | o 2:51a0ae4d5864 remove files | o 1:ce8896473775 edit files | o 0:30d30fe6a5be add files $ cat normal3 normal3-modified $ cat sub/normal4 normal4-modified $ cat sub/large4 large4-modified $ cat sub2/large6 large6-modified $ cat sub2/large7 large7 $ cd ../e $ hg pull ../b pulling from ../b searching for changes adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg rebase rebasing 8:f574fb32bb45 "modify normal file largefile in repo d" Invoking status precommit hook M sub/normal4 M sub2/large6 saved backup bundle to $TESTTMP/e/.hg/strip-backup/f574fb32bb45-dd1d9f80-backup.hg (glob) $ hg log --template '{rev}:{node|short} {desc|firstline}\n' 9:598410d3eb9a modify normal file largefile in repo d 8:a381d2c8c80e modify normal file and largefile in repo b 7:daea875e9014 add/edit more largefiles 6:4355d653f84f edit files yet again 5:9d5af5072dbd edit files again 4:74c02385b94c move files 3:9e8fbc4bce62 copy files 2:51a0ae4d5864 remove files 1:ce8896473775 edit files 0:30d30fe6a5be add files $ cat normal3 normal3-modified $ cat sub/normal4 normal4-modified $ cat sub/large4 large4-modified $ cat sub2/large6 large6-modified $ cat sub2/large7 large7 Log on largefiles - same output $ hg log --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub/large4 8:a381d2c8c80e modify normal file and largefile in repo b 6:4355d653f84f edit files yet again 5:9d5af5072dbd edit files again 4:74c02385b94c move files $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub/large4 o 8:a381d2c8c80e modify normal file and largefile in repo b | o 6:4355d653f84f edit files yet again | o 5:9d5af5072dbd edit files again | o 4:74c02385b94c move files | $ hg log --template '{rev}:{node|short} {desc|firstline}\n' sub/large4 8:a381d2c8c80e modify normal file and largefile in repo b 6:4355d653f84f edit files yet again 5:9d5af5072dbd edit files again 4:74c02385b94c move files $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub/large4 o 8:a381d2c8c80e modify normal file and largefile in repo b | o 6:4355d653f84f edit files yet again | o 5:9d5af5072dbd edit files again | o 4:74c02385b94c move files | - .hglf only matches largefiles, without .hglf it matches 9 bco sub/normal $ hg log --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub 8:a381d2c8c80e modify normal file and largefile in repo b 6:4355d653f84f edit files yet again 5:9d5af5072dbd edit files again 4:74c02385b94c move files 1:ce8896473775 edit files 0:30d30fe6a5be add files $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' .hglf/sub o 8:a381d2c8c80e modify normal file and largefile in repo b | o 6:4355d653f84f edit files yet again | o 5:9d5af5072dbd edit files again | o 4:74c02385b94c move files | o 1:ce8896473775 edit files | o 0:30d30fe6a5be add files $ hg log --template '{rev}:{node|short} {desc|firstline}\n' sub 9:598410d3eb9a modify normal file largefile in repo d 8:a381d2c8c80e modify normal file and largefile in repo b 6:4355d653f84f edit files yet again 5:9d5af5072dbd edit files again 4:74c02385b94c move files 1:ce8896473775 edit files 0:30d30fe6a5be add files $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' sub @ 9:598410d3eb9a modify normal file largefile in repo d | o 8:a381d2c8c80e modify normal file and largefile in repo b | o 6:4355d653f84f edit files yet again | o 5:9d5af5072dbd edit files again | o 4:74c02385b94c move files | o 1:ce8896473775 edit files | o 0:30d30fe6a5be add files - globbing gives same result $ hg log --template '{rev}:{node|short} {desc|firstline}\n' 'glob:sub/*' 9:598410d3eb9a modify normal file largefile in repo d 8:a381d2c8c80e modify normal file and largefile in repo b 6:4355d653f84f edit files yet again 5:9d5af5072dbd edit files again 4:74c02385b94c move files 1:ce8896473775 edit files 0:30d30fe6a5be add files $ hg log -G --template '{rev}:{node|short} {desc|firstline}\n' 'glob:sub/*' @ 9:598410d3eb9a modify normal file largefile in repo d | o 8:a381d2c8c80e modify normal file and largefile in repo b | o 6:4355d653f84f edit files yet again | o 5:9d5af5072dbd edit files again | o 4:74c02385b94c move files | o 1:ce8896473775 edit files | o 0:30d30fe6a5be add files Rollback on largefiles. $ echo large4-modified-again > sub/large4 $ hg commit -m "Modify large4 again" Invoking status precommit hook M sub/large4 $ hg rollback repository tip rolled back to revision 9 (undo commit) working directory now based on revision 9 $ hg st M sub/large4 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' 9:598410d3eb9a modify normal file largefile in repo d 8:a381d2c8c80e modify normal file and largefile in repo b 7:daea875e9014 add/edit more largefiles 6:4355d653f84f edit files yet again 5:9d5af5072dbd edit files again 4:74c02385b94c move files 3:9e8fbc4bce62 copy files 2:51a0ae4d5864 remove files 1:ce8896473775 edit files 0:30d30fe6a5be add files $ cat sub/large4 large4-modified-again "update --check" refuses to update with uncommitted changes. $ hg update --check 8 abort: uncommitted changes [255] "update --clean" leaves correct largefiles in working copy, even when there is .orig files from revert in .hglf. $ echo mistake > sub2/large7 $ hg revert sub2/large7 $ cat sub2/large7 large7 $ cat sub2/large7.orig mistake $ test ! -f .hglf/sub2/large7.orig $ hg -q update --clean -r null $ hg update --clean getting changed largefiles 3 largefiles updated, 0 removed 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat normal3 normal3-modified $ cat sub/normal4 normal4-modified $ cat sub/large4 large4-modified $ cat sub2/large6 large6-modified $ cat sub2/large7 large7 $ cat sub2/large7.orig mistake $ test ! -f .hglf/sub2/large7.orig verify that largefile .orig file no longer is overwritten on every update -C: $ hg update --clean 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat sub2/large7.orig mistake $ rm sub2/large7.orig Now "update check" is happy. $ hg update --check 8 getting changed largefiles 1 largefiles updated, 0 removed 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg update --check getting changed largefiles 1 largefiles updated, 0 removed 2 files updated, 0 files merged, 0 files removed, 0 files unresolved Test removing empty largefiles directories on update $ test -d sub2 && echo "sub2 exists" sub2 exists $ hg update -q null $ test -d sub2 && echo "error: sub2 should not exist anymore" [1] $ hg update -q Test hg remove removes empty largefiles directories $ test -d sub2 && echo "sub2 exists" sub2 exists $ hg remove sub2/* $ test -d sub2 && echo "error: sub2 should not exist anymore" [1] $ hg revert sub2/large6 sub2/large7 "revert" works on largefiles (and normal files too). $ echo hack3 >> normal3 $ echo hack4 >> sub/normal4 $ echo hack4 >> sub/large4 $ rm sub2/large6 $ hg revert sub2/large6 $ hg rm sub2/large6 $ echo new >> sub2/large8 $ hg add --large sub2/large8 # XXX we don't really want to report that we're reverting the standin; # that's just an implementation detail. But I don't see an obvious fix. ;-( $ hg revert sub reverting .hglf/sub/large4 (glob) reverting sub/normal4 (glob) $ hg status M normal3 A sub2/large8 R sub2/large6 ? sub/large4.orig ? sub/normal4.orig $ cat sub/normal4 normal4-modified $ cat sub/large4 large4-modified $ hg revert -a --no-backup undeleting .hglf/sub2/large6 (glob) forgetting .hglf/sub2/large8 (glob) reverting normal3 $ hg status ? sub/large4.orig ? sub/normal4.orig ? sub2/large8 $ cat normal3 normal3-modified $ cat sub2/large6 large6-modified $ rm sub/*.orig sub2/large8 revert some files to an older revision $ hg revert --no-backup -r 8 sub2 reverting .hglf/sub2/large6 (glob) $ cat sub2/large6 large6 $ hg revert --no-backup -C -r '.^' sub2 $ hg revert --no-backup sub2 reverting .hglf/sub2/large6 (glob) $ hg status "verify --large" actually verifies largefiles - Where Do We Come From? What Are We? Where Are We Going? $ pwd $TESTTMP/e $ hg paths default = $TESTTMP/d (glob) $ hg verify --large checking changesets checking manifests crosschecking files in changesets and manifests checking files 10 files, 10 changesets, 28 total revisions searching 1 changesets for largefiles verified existence of 3 revisions of 3 largefiles - introduce missing blob in local store repo and make sure that this is caught: $ mv $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 . $ hg verify --large checking changesets checking manifests crosschecking files in changesets and manifests checking files 10 files, 10 changesets, 28 total revisions searching 1 changesets for largefiles changeset 9:598410d3eb9a: sub/large4 references missing $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 (glob) verified existence of 3 revisions of 3 largefiles [1] - introduce corruption and make sure that it is caught when checking content: $ echo '5 cents' > $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 $ hg verify -q --large --lfc changeset 9:598410d3eb9a: sub/large4 references corrupted $TESTTMP/d/.hg/largefiles/e166e74c7303192238d60af5a9c4ce9bef0b7928 (glob) [1] - cleanup $ mv e166e74c7303192238d60af5a9c4ce9bef0b7928 $TESTTMP/d/.hg/largefiles/ - verifying all revisions will fail because we didn't clone all largefiles to d: $ echo 'T-shirt' > $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 $ hg verify -q --lfa --lfc changeset 0:30d30fe6a5be: large1 references missing $TESTTMP/d/.hg/largefiles/4669e532d5b2c093a78eca010077e708a071bb64 (glob) changeset 0:30d30fe6a5be: sub/large2 references missing $TESTTMP/d/.hg/largefiles/1deebade43c8c498a3c8daddac0244dc55d1331d (glob) changeset 1:ce8896473775: large1 references missing $TESTTMP/d/.hg/largefiles/5f78770c0e77ba4287ad6ef3071c9bf9c379742f (glob) changeset 1:ce8896473775: sub/large2 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 (glob) changeset 3:9e8fbc4bce62: large1 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 (glob) changeset 4:74c02385b94c: large3 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 (glob) changeset 4:74c02385b94c: sub/large4 references corrupted $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 (glob) changeset 5:9d5af5072dbd: large3 references missing $TESTTMP/d/.hg/largefiles/baaf12afde9d8d67f25dab6dced0d2bf77dba47c (glob) changeset 5:9d5af5072dbd: sub/large4 references missing $TESTTMP/d/.hg/largefiles/aeb2210d19f02886dde00dac279729a48471e2f9 (glob) changeset 6:4355d653f84f: large3 references missing $TESTTMP/d/.hg/largefiles/7838695e10da2bb75ac1156565f40a2595fa2fa0 (glob) [1] - cleanup $ rm $TESTTMP/d/.hg/largefiles/eb7338044dc27f9bc59b8dd5a246b065ead7a9c4 $ rm -f .hglf/sub/*.orig Update to revision with missing largefile - and make sure it really is missing $ rm ${USERCACHE}/7838695e10da2bb75ac1156565f40a2595fa2fa0 $ hg up -r 6 getting changed largefiles large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file:/*/$TESTTMP/d (glob) 1 largefiles updated, 2 removed 4 files updated, 0 files merged, 2 files removed, 0 files unresolved $ rm normal3 $ echo >> sub/normal4 $ hg ci -m 'commit with missing files' Invoking status precommit hook M sub/normal4 ! large3 ! normal3 created new head $ hg st ! large3 ! normal3 $ hg up -r. 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg st ! large3 ! normal3 $ hg up -Cr. getting changed largefiles large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file:/*/$TESTTMP/d (glob) 0 largefiles updated, 0 removed 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg st ! large3 $ hg rollback repository tip rolled back to revision 9 (undo commit) working directory now based on revision 6 Merge with revision with missing largefile - and make sure it tries to fetch it. $ hg up -Cqr null $ echo f > f $ hg ci -Am branch adding f Invoking status precommit hook A f created new head $ hg merge -r 6 getting changed largefiles large3: largefile 7838695e10da2bb75ac1156565f40a2595fa2fa0 not available from file:/*/$TESTTMP/d (glob) 1 largefiles updated, 0 removed 4 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg rollback -q $ hg up -Cq Pulling 0 revisions with --all-largefiles should not fetch for all revisions $ hg pull --all-largefiles pulling from $TESTTMP/d (glob) searching for changes no changes found Merging does not revert to old versions of largefiles and also check that merging after having pulled from a non-default remote works correctly. $ cd .. $ hg clone -r 7 e temp adding changesets adding manifests adding file changes added 8 changesets with 24 changes to 10 files updating to branch default getting changed largefiles 3 largefiles updated, 0 removed 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone temp f updating to branch default getting changed largefiles 3 largefiles updated, 0 removed 5 files updated, 0 files merged, 0 files removed, 0 files unresolved # Delete the largefiles in the largefiles system cache so that we have an # opportunity to test that caching after a pull works. $ rm "${USERCACHE}"/* $ cd f $ echo "large4-merge-test" > sub/large4 $ hg commit -m "Modify large4 to test merge" Invoking status precommit hook M sub/large4 # Test --cache-largefiles flag $ hg pull --lfrev 'heads(pulled())' ../e pulling from ../e searching for changes adding changesets adding manifests adding file changes added 2 changesets with 4 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) 2 largefiles cached $ hg merge largefile sub/large4 has a merge conflict ancestor was 971fb41e78fea4f8e0ba5244784239371cb00591 keep (l)ocal d846f26643bfa8ec210be40cc93cc6b7ff1128ea or take (o)ther e166e74c7303192238d60af5a9c4ce9bef0b7928? l getting changed largefiles 1 largefiles updated, 0 removed 3 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m "Merge repos e and f" Invoking status precommit hook M normal3 M sub/normal4 M sub2/large6 $ cat normal3 normal3-modified $ cat sub/normal4 normal4-modified $ cat sub/large4 large4-merge-test $ cat sub2/large6 large6-modified $ cat sub2/large7 large7 Test status after merging with a branch that introduces a new largefile: $ echo large > large $ hg add --large large $ hg commit -m 'add largefile' Invoking status precommit hook A large $ hg update -q ".^" $ echo change >> normal3 $ hg commit -m 'some change' Invoking status precommit hook M normal3 created new head $ hg merge getting changed largefiles 1 largefiles updated, 0 removed 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status M large - make sure update of merge with removed largefiles fails as expected $ hg rm sub2/large6 $ hg up -r. abort: outstanding uncommitted merge [255] - revert should be able to revert files introduced in a pending merge $ hg revert --all -r . removing .hglf/large (glob) undeleting .hglf/sub2/large6 (glob) Test that a normal file and a largefile with the same name and path cannot coexist. $ rm sub2/large7 $ echo "largeasnormal" > sub2/large7 $ hg add sub2/large7 sub2/large7 already a largefile (glob) Test that transplanting a largefile change works correctly. $ cd .. $ hg clone -r 8 d g adding changesets adding manifests adding file changes added 9 changesets with 26 changes to 10 files updating to branch default getting changed largefiles 3 largefiles updated, 0 removed 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd g $ hg transplant -s ../d 598410d3eb9a searching for changes searching for changes adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files $ hg log --template '{rev}:{node|short} {desc|firstline}\n' 9:598410d3eb9a modify normal file largefile in repo d 8:a381d2c8c80e modify normal file and largefile in repo b 7:daea875e9014 add/edit more largefiles 6:4355d653f84f edit files yet again 5:9d5af5072dbd edit files again 4:74c02385b94c move files 3:9e8fbc4bce62 copy files 2:51a0ae4d5864 remove files 1:ce8896473775 edit files 0:30d30fe6a5be add files $ cat normal3 normal3-modified $ cat sub/normal4 normal4-modified $ cat sub/large4 large4-modified $ cat sub2/large6 large6-modified $ cat sub2/large7 large7 Cat a largefile $ hg cat normal3 normal3-modified $ hg cat sub/large4 large4-modified $ rm "${USERCACHE}"/* $ hg cat -r a381d2c8c80e -o cat.out sub/large4 $ cat cat.out large4-modified $ rm cat.out $ hg cat -r a381d2c8c80e normal3 normal3-modified $ hg cat -r '.^' normal3 normal3-modified $ hg cat -r '.^' sub/large4 doesntexist large4-modified doesntexist: no such file in rev a381d2c8c80e $ hg --cwd sub cat -r '.^' large4 large4-modified $ hg --cwd sub cat -r '.^' ../normal3 normal3-modified Cat a standin $ hg cat .hglf/sub/large4 e166e74c7303192238d60af5a9c4ce9bef0b7928 $ hg cat .hglf/normal3 .hglf/normal3: no such file in rev 598410d3eb9a (glob) [1] Test that renaming a largefile results in correct output for status $ hg rename sub/large4 large4-renamed $ hg commit -m "test rename output" Invoking status precommit hook A large4-renamed R sub/large4 $ cat large4-renamed large4-modified $ cd sub2 $ hg rename large6 large6-renamed $ hg st A sub2/large6-renamed R sub2/large6 $ cd .. Test --normal flag $ dd if=/dev/zero bs=2k count=11k > new-largefile 2> /dev/null $ hg add --normal --large new-largefile abort: --normal cannot be used with --large [255] $ hg add --normal new-largefile new-largefile: up to 69 MB of RAM may be required to manage this file (use 'hg revert new-largefile' to cancel the pending addition) Test explicit commit of switch between normal and largefile - make sure both the add and the remove is committed. $ hg up -qC $ hg forget normal3 large4-renamed $ hg add --large normal3 $ hg add large4-renamed $ hg commit -m 'swap' normal3 large4-renamed Invoking status precommit hook A large4-renamed A normal3 ? new-largefile ? sub2/large6-renamed $ hg mani .hglf/normal3 .hglf/sub2/large6 .hglf/sub2/large7 large4-renamed sub/normal4 $ cd .. mercurial-3.7.3/tests/test-churn.t0000644000175000017500000001322312676531525016553 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "churn=" >> $HGRCPATH create test repository $ hg init repo $ cd repo $ echo a > a $ hg ci -Am adda -u user1 -d 6:00 adding a $ echo b >> a $ echo b > b $ hg ci -m changeba -u user2 -d 9:00 a $ hg ci -Am addb -u user2 -d 9:30 adding b $ echo c >> a $ echo c >> b $ echo c > c $ hg ci -m changeca -u user3 -d 12:00 a $ hg ci -m changecb -u user3 -d 12:15 b $ hg ci -Am addc -u user3 -d 12:30 adding c $ mkdir -p d/e $ echo abc > d/e/f1.txt $ hg ci -Am "add d/e/f1.txt" -u user1 -d 12:45 d/e/f1.txt $ mkdir -p d/g $ echo def > d/g/f2.txt $ hg ci -Am "add d/g/f2.txt" -u user1 -d 13:00 d/g/f2.txt churn separate directories $ cd d $ hg churn e user1 1 *************************************************************** churn all $ hg churn user1 3 *************************************************************** user3 3 *************************************************************** user2 2 ****************************************** churn excluding one dir $ hg churn -X e user3 3 *************************************************************** user1 2 ****************************************** user2 2 ****************************************** churn up to rev 2 $ hg churn -r :2 user2 2 *************************************************************** user1 1 ******************************** $ cd .. churn with aliases $ cat > ../aliases < user1 alias1 > user3 alias3 > not-an-alias > EOF churn with .hgchurn $ mv ../aliases .hgchurn $ hg churn skipping malformed alias: not-an-alias alias1 3 ************************************************************** alias3 3 ************************************************************** user2 2 ***************************************** $ rm .hgchurn churn with column specifier $ COLUMNS=40 hg churn user1 3 *********************** user3 3 *********************** user2 2 *************** churn by hour $ hg churn -f '%H' -s 06 1 ***************** 09 2 ********************************* 12 4 ****************************************************************** 13 1 ***************** churn with separated added/removed lines $ hg rm d/g/f2.txt $ hg ci -Am "removed d/g/f2.txt" -u user1 -d 14:00 d/g/f2.txt $ hg churn --diffstat user1 +3/-1 +++++++++++++++++++++++++++++++++++++++++-------------- user3 +3/-0 +++++++++++++++++++++++++++++++++++++++++ user2 +2/-0 +++++++++++++++++++++++++++ churn --diffstat with color $ hg --config extensions.color= churn --config color.mode=ansi \ > --diffstat --color=always user1 +3/-1 \x1b[0;32m+++++++++++++++++++++++++++++++++++++++++\x1b[0m\x1b[0;31m--------------\x1b[0m (esc) user3 +3/-0 \x1b[0;32m+++++++++++++++++++++++++++++++++++++++++\x1b[0m (esc) user2 +2/-0 \x1b[0;32m+++++++++++++++++++++++++++\x1b[0m (esc) changeset number churn $ hg churn -c user1 4 *************************************************************** user3 3 *********************************************** user2 2 ******************************** $ echo 'with space = no-space' >> ../aliases $ echo a >> a $ hg commit -m a -u 'with space' -d 15:00 churn with space in alias $ hg churn --aliases ../aliases -r tip no-space 1 ************************************************************ $ cd .. Issue833: ZeroDivisionError $ hg init issue-833 $ cd issue-833 $ touch foo $ hg ci -Am foo adding foo this was failing with a ZeroDivisionError $ hg churn test 0 $ cd .. Ignore trailing or leading spaces in emails $ cd repo $ touch bar $ hg ci -Am'bar' -u 'user4 ' adding bar $ touch foo $ hg ci -Am'foo' -u 'user4 < user4@x.com >' adding foo $ hg log -l2 --template '[{author|email}]\n' [ user4@x.com ] [user4@x.com] $ hg churn -c user1 4 ********************************************************* user3 3 ******************************************* user2 2 ***************************** user4@x.com 2 ***************************** with space 1 ************** Test multibyte sequences in names $ echo bar >> bar $ hg --encoding utf-8 ci -m'changed bar' -u 'El Niño ' $ hg --encoding utf-8 churn -ct '{author|person}' user1 4 ********************************************************** user3 3 ******************************************** user2 2 ***************************** user4 2 ***************************** El Ni\xc3\xb1o 1 *************** (esc) with space 1 *************** Test --template argument, with backwards compatibility $ hg churn -t '{author|user}' user1 4 *************************************************************** user3 3 *********************************************** user2 2 ******************************** nino 1 **************** with 1 **************** 0 user4 0 $ hg churn -T '{author|user}' user1 4 *************************************************************** user3 3 *********************************************** user2 2 ******************************** nino 1 **************** with 1 **************** 0 user4 0 $ hg churn -t 'alltogether' alltogether 11 ********************************************************* $ hg churn -T 'alltogether' alltogether 11 ********************************************************* $ cd .. mercurial-3.7.3/tests/test-hghave.t0000644000175000017500000000157512676531525016705 0ustar mpmmpm00000000000000Testing that hghave does not crash when checking features $ hghave --test-features 2>/dev/null Testing hghave extensibility for third party tools $ cat > hghaveaddon.py < import hghave > @hghave.check("custom", "custom hghave feature") > def has_custom(): > return True > EOF (invocation via run-tests.py) $ cat > test-hghaveaddon.t < #require custom > $ echo foo > foo > EOF $ run-tests.py $HGTEST_RUN_TESTS_PURE test-hghaveaddon.t . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. (invocation via command line) $ unset TESTDIR $ hghave custom (terminate with exit code 2 at failure of importing hghaveaddon.py) $ rm hghaveaddon.* $ cat > hghaveaddon.py < importing this file should cause syntax error > EOF $ hghave custom failed to import hghaveaddon.py from '.': invalid syntax (hghaveaddon.py, line 1) [2] mercurial-3.7.3/tests/test-merge-changedelete.t0000644000175000017500000007472212676531525021154 0ustar mpmmpm00000000000000Tests for change/delete conflicts, including: b5605d88dc27: Make ui.prompt repeat on "unrecognized response" again (issue897) 840e2b315c1f: Fix misleading error and prompts during update/merge (issue556) Make sure HGMERGE doesn't interfere with the test $ unset HGMERGE $ status() { > echo "--- status ---" > hg st -A file1 file2 file3 > echo "--- resolve --list ---" > hg resolve --list file1 file2 file3 > echo "--- debugmergestate ---" > hg debugmergestate > for file in file1 file2 file3; do > if [ -f $file ]; then > echo "--- $file ---" > cat $file > else > echo "*** $file does not exist" > fi > done > } $ hg init repo $ cd repo $ echo 1 > file1 $ echo 2 > file2 $ echo 3 > file3 $ hg ci -Am 'added files' adding file1 adding file2 adding file3 $ hg rm file1 $ echo changed >> file2 $ echo changed1 >> file3 $ hg ci -m 'removed file1, changed file2, changed file3' $ hg co 0 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo changed >> file1 $ hg rm file2 $ echo changed2 >> file3 $ hg ci -m 'changed file1, removed file2, changed file3' created new head Non-interactive merge: $ hg merge -y local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u merging file3 warning: conflicts while merging file3! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 3 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ status --- status --- M file2 M file3 C file1 --- resolve --list --- U file1 U file2 U file3 --- debugmergestate --- * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) --- file1 --- 1 changed --- file2 --- 2 changed --- file3 --- 3 <<<<<<< local: 13910f48cf7b - test: changed file1, removed file2, changed file3 changed2 ======= changed1 >>>>>>> other: 10f9a0a634e8 - test: removed file1, changed file2, changed file3 Interactive merge: $ hg co -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge --config ui.interactive=true < c > d > EOF local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? c remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? d merging file3 warning: conflicts while merging file3! (edit, then use 'hg resolve --mark') 0 files updated, 2 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ status --- status --- file2: * (glob) M file3 C file1 --- resolve --list --- R file1 R file2 U file3 --- debugmergestate --- * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "r", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) --- file1 --- 1 changed *** file2 does not exist --- file3 --- 3 <<<<<<< local: 13910f48cf7b - test: changed file1, removed file2, changed file3 changed2 ======= changed1 >>>>>>> other: 10f9a0a634e8 - test: removed file1, changed file2, changed file3 Interactive merge with bad input: $ hg co -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge --config ui.interactive=true < foo > bar > d > baz > c > EOF local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? foo unrecognized response local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? bar unrecognized response local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? d remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? baz unrecognized response remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? c merging file3 warning: conflicts while merging file3! (edit, then use 'hg resolve --mark') 0 files updated, 1 files merged, 1 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ status --- status --- M file2 M file3 R file1 --- resolve --list --- R file1 R file2 U file3 --- debugmergestate --- * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "r", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) *** file1 does not exist --- file2 --- 2 changed --- file3 --- 3 <<<<<<< local: 13910f48cf7b - test: changed file1, removed file2, changed file3 changed2 ======= changed1 >>>>>>> other: 10f9a0a634e8 - test: removed file1, changed file2, changed file3 Interactive merge with not enough input: $ hg co -C 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge --config ui.interactive=true < d > EOF local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? d remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? merging file3 warning: conflicts while merging file3! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 1 files removed, 2 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ status --- status --- M file2 M file3 R file1 --- resolve --list --- R file1 U file2 U file3 --- debugmergestate --- * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) *** file1 does not exist --- file2 --- 2 changed --- file3 --- 3 <<<<<<< local: 13910f48cf7b - test: changed file1, removed file2, changed file3 changed2 ======= changed1 >>>>>>> other: 10f9a0a634e8 - test: removed file1, changed file2, changed file3 Choose local versions of files $ hg co -C 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge --tool :local 0 files updated, 3 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ status 2>&1 | tee $TESTTMP/local.status --- status --- file2: * (glob) M file3 C file1 --- resolve --list --- R file1 R file2 R file3 --- debugmergestate --- * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "r", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) file: file3 (record type "F", state "r", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) --- file1 --- 1 changed *** file2 does not exist --- file3 --- 3 changed2 Choose other versions of files $ hg co -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge --tool :other 0 files updated, 2 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ status 2>&1 | tee $TESTTMP/other.status --- status --- M file2 M file3 R file1 --- resolve --list --- R file1 R file2 R file3 --- debugmergestate --- * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "r", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) file: file3 (record type "F", state "r", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) *** file1 does not exist --- file2 --- 2 changed --- file3 --- 3 changed1 Fail $ hg co -C 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge --tool :fail 0 files updated, 0 files merged, 0 files removed, 3 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ status 2>&1 | tee $TESTTMP/fail.status --- status --- M file2 M file3 C file1 --- resolve --list --- U file1 U file2 U file3 --- debugmergestate --- * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) --- file1 --- 1 changed --- file2 --- 2 changed --- file3 --- 3 changed2 Force prompts with no input (should be similar to :fail) $ hg co -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge --config ui.interactive=True --tool :prompt local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? no tool found to merge file3 keep (l)ocal, take (o)ther, or leave (u)nresolved? 0 files updated, 0 files merged, 0 files removed, 3 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ status 2>&1 | tee $TESTTMP/prompt.status --- status --- M file2 M file3 C file1 --- resolve --list --- U file1 U file2 U file3 --- debugmergestate --- * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) --- file1 --- 1 changed --- file2 --- 2 changed --- file3 --- 3 changed2 $ cmp $TESTTMP/fail.status $TESTTMP/prompt.status || diff -U8 $TESTTMP/fail.status $TESTTMP/prompt.status Force prompts $ hg co -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge --tool :prompt local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u no tool found to merge file3 keep (l)ocal, take (o)ther, or leave (u)nresolved? u 0 files updated, 0 files merged, 0 files removed, 3 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ status --- status --- M file2 M file3 C file1 --- resolve --list --- U file1 U file2 U file3 --- debugmergestate --- * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) --- file1 --- 1 changed --- file2 --- 2 changed --- file3 --- 3 changed2 Choose to merge all files $ hg co -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge --tool :merge3 local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u merging file3 warning: conflicts while merging file3! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 3 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ status --- status --- M file2 M file3 C file1 --- resolve --list --- U file1 U file2 U file3 --- debugmergestate --- * version 2 records local: 13910f48cf7bdb2a0ba6e24b4900e4fdd5739dd4 other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) file: file3 (record type "F", state "u", hash d5b0a58bc47161b1b8a831084b366f757c4f0b11) local path: file3 (flags "") ancestor path: file3 (node 2661d26c649684b482d10f91960cc3db683c38b4) other path: file3 (node a2644c43e210356772c7772a8674544a62e06beb) --- file1 --- 1 changed --- file2 --- 2 changed --- file3 --- 3 <<<<<<< local: 13910f48cf7b - test: changed file1, removed file2, changed file3 changed2 ||||||| base ======= changed1 >>>>>>> other: 10f9a0a634e8 - test: removed file1, changed file2, changed file3 Exercise transitions between local, other, fail and prompt, and make sure the dirstate stays consistent. (Compare with each other and to the above invocations.) $ testtransitions() { > # this traversal order covers every transition > tools="local other prompt local fail other local prompt other fail prompt fail local" > lasttool="merge3" > for tool in $tools; do > echo "=== :$lasttool -> :$tool ===" > ref="$TESTTMP/$tool.status" > hg resolve --unmark --all > hg resolve --tool ":$tool" --all --config ui.interactive=True > status > "$TESTTMP/compare.status" 2>&1 > echo '--- diff of status ---' > if cmp "$TESTTMP/$tool.status" "$TESTTMP/compare.status" || diff -U8 "$TESTTMP/$tool.status" "$TESTTMP/compare.status"; then > echo '(status identical)' > fi > lasttool="$tool" > echo > done > } $ testtransitions === :merge3 -> :local === (no more unresolved files) --- diff of status --- (status identical) === :local -> :other === (no more unresolved files) --- diff of status --- (status identical) === :other -> :prompt === local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? no tool found to merge file3 keep (l)ocal, take (o)ther, or leave (u)nresolved? --- diff of status --- (status identical) === :prompt -> :local === (no more unresolved files) --- diff of status --- (status identical) === :local -> :fail === --- diff of status --- (status identical) === :fail -> :other === (no more unresolved files) --- diff of status --- (status identical) === :other -> :local === (no more unresolved files) --- diff of status --- (status identical) === :local -> :prompt === local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? no tool found to merge file3 keep (l)ocal, take (o)ther, or leave (u)nresolved? --- diff of status --- (status identical) === :prompt -> :other === (no more unresolved files) --- diff of status --- (status identical) === :other -> :fail === --- diff of status --- (status identical) === :fail -> :prompt === local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? no tool found to merge file3 keep (l)ocal, take (o)ther, or leave (u)nresolved? --- diff of status --- (status identical) === :prompt -> :fail === --- diff of status --- (status identical) === :fail -> :local === (no more unresolved files) --- diff of status --- (status identical) Non-interactive linear update $ hg co -C 0 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo changed >> file1 $ hg rm file2 $ hg update 1 -y local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u 1 files updated, 0 files merged, 0 files removed, 2 files unresolved use 'hg resolve' to retry unresolved file merges [1] $ status --- status --- A file1 C file2 C file3 --- resolve --list --- U file1 U file2 --- debugmergestate --- * version 2 records local: ab57bf49aa276a22d35a473592d4c34b5abc3eff other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) --- file1 --- 1 changed --- file2 --- 2 changed --- file3 --- 3 changed1 Choose local versions of files $ hg co -C 0 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo changed >> file1 $ hg rm file2 $ hg update 1 --tool :local 1 files updated, 2 files merged, 0 files removed, 0 files unresolved $ status 2>&1 | tee $TESTTMP/local.status --- status --- file2: * (glob) A file1 C file3 --- resolve --list --- R file1 R file2 --- debugmergestate --- * version 2 records local: ab57bf49aa276a22d35a473592d4c34b5abc3eff other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "r", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) --- file1 --- 1 changed *** file2 does not exist --- file3 --- 3 changed1 Choose other versions of files $ hg co -C 0 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo changed >> file1 $ hg rm file2 $ hg update 1 --tool :other 1 files updated, 1 files merged, 1 files removed, 0 files unresolved $ status 2>&1 | tee $TESTTMP/other.status --- status --- file1: * (glob) C file2 C file3 --- resolve --list --- R file1 R file2 --- debugmergestate --- * version 2 records local: ab57bf49aa276a22d35a473592d4c34b5abc3eff other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "r", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "r", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) *** file1 does not exist --- file2 --- 2 changed --- file3 --- 3 changed1 Fail $ hg co -C 0 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo changed >> file1 $ hg rm file2 $ hg update 1 --tool :fail 1 files updated, 0 files merged, 0 files removed, 2 files unresolved use 'hg resolve' to retry unresolved file merges [1] $ status 2>&1 | tee $TESTTMP/fail.status --- status --- A file1 C file2 C file3 --- resolve --list --- U file1 U file2 --- debugmergestate --- * version 2 records local: ab57bf49aa276a22d35a473592d4c34b5abc3eff other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) --- file1 --- 1 changed --- file2 --- 2 changed --- file3 --- 3 changed1 Force prompts with no input $ hg co -C 0 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo changed >> file1 $ hg rm file2 $ hg update 1 --config ui.interactive=True --tool :prompt local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? 1 files updated, 0 files merged, 0 files removed, 2 files unresolved use 'hg resolve' to retry unresolved file merges [1] $ status 2>&1 | tee $TESTTMP/prompt.status --- status --- A file1 C file2 C file3 --- resolve --list --- U file1 U file2 --- debugmergestate --- * version 2 records local: ab57bf49aa276a22d35a473592d4c34b5abc3eff other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) --- file1 --- 1 changed --- file2 --- 2 changed --- file3 --- 3 changed1 $ cmp $TESTTMP/fail.status $TESTTMP/prompt.status || diff -U8 $TESTTMP/fail.status $TESTTMP/prompt.status Choose to merge all files $ hg co -C 0 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo changed >> file1 $ hg rm file2 $ hg update 1 --tool :merge3 local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u 1 files updated, 0 files merged, 0 files removed, 2 files unresolved use 'hg resolve' to retry unresolved file merges [1] $ status --- status --- A file1 C file2 C file3 --- resolve --list --- U file1 U file2 --- debugmergestate --- * version 2 records local: ab57bf49aa276a22d35a473592d4c34b5abc3eff other: 10f9a0a634e82080907e62f075ab119cbc565ea6 file: file1 (record type "C", state "u", hash 60b27f004e454aca81b0480209cce5081ec52390) local path: file1 (flags "") ancestor path: file1 (node b8e02f6433738021a065f94175c7cd23db5f05be) other path: file1 (node null) file: file2 (record type "C", state "u", hash null) local path: file2 (flags "") ancestor path: file2 (node 5d9299349fc01ddd25d0070d149b124d8f10411e) other path: file2 (node e7c1328648519852e723de86c0c0525acd779257) --- file1 --- 1 changed --- file2 --- 2 changed --- file3 --- 3 changed1 Test transitions between different merge tools $ testtransitions === :merge3 -> :local === (no more unresolved files) --- diff of status --- (status identical) === :local -> :other === (no more unresolved files) --- diff of status --- (status identical) === :other -> :prompt === local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? --- diff of status --- (status identical) === :prompt -> :local === (no more unresolved files) --- diff of status --- (status identical) === :local -> :fail === --- diff of status --- (status identical) === :fail -> :other === (no more unresolved files) --- diff of status --- (status identical) === :other -> :local === (no more unresolved files) --- diff of status --- (status identical) === :local -> :prompt === local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? --- diff of status --- (status identical) === :prompt -> :other === (no more unresolved files) --- diff of status --- (status identical) === :other -> :fail === --- diff of status --- (status identical) === :fail -> :prompt === local changed file1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? remote changed file2 which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? --- diff of status --- (status identical) === :prompt -> :fail === --- diff of status --- (status identical) === :fail -> :local === (no more unresolved files) --- diff of status --- (status identical) mercurial-3.7.3/tests/test-mq-header-from.t0000644000175000017500000004125612676531525020247 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ echo "[diff]" >> $HGRCPATH $ echo "nodates=true" >> $HGRCPATH $ catlog() { > cat .hg/patches/$1.patch | sed -e "s/^diff \-r [0-9a-f]* /diff -r ... /" \ > -e "s/^\(# Parent \).*/\1/" > hg log --template "{rev}: {desc} - {author}\n" > } $ runtest() { > echo ==== init > hg init a > cd a > hg qinit > > > echo ==== qnew -U > hg qnew -U 1.patch > catlog 1 > > echo ==== qref > echo "1" >1 > hg add > hg qref > catlog 1 > > echo ==== qref -u > hg qref -u mary > catlog 1 > > echo ==== qnew > hg qnew 2.patch > echo "2" >2 > hg add > hg qref > catlog 2 > > echo ==== qref -u > hg qref -u jane > catlog 2 > > > echo ==== qnew -U -m > hg qnew -U -m "Three" 3.patch > catlog 3 > > echo ==== qref > echo "3" >3 > hg add > hg qref > catlog 3 > > echo ==== qref -m > hg qref -m "Drei" > catlog 3 > > echo ==== qref -u > hg qref -u mary > catlog 3 > > echo ==== qref -u -m > hg qref -u maria -m "Three (again)" > catlog 3 > > echo ==== qnew -m > hg qnew -m "Four" 4.patch > echo "4" >4of t > hg add > hg qref > catlog 4 > > echo ==== qref -u > hg qref -u jane > catlog 4 > > > echo ==== qnew with HG header > hg qnew --config 'mq.plain=true' 5.patch > hg qpop > echo "# HG changeset patch" >>.hg/patches/5.patch > echo "# User johndoe" >>.hg/patches/5.patch > hg qpush 2>&1 | grep 'now at' > catlog 5 > > echo ==== hg qref > echo "5" >5 > hg add > hg qref > catlog 5 > > echo ==== hg qref -U > hg qref -U > catlog 5 > > echo ==== hg qref -u > hg qref -u johndeere > catlog 5 > > > echo ==== qnew with plain header > hg qnew --config 'mq.plain=true' -U 6.patch > hg qpop > hg qpush 2>&1 | grep 'now at' > catlog 6 > > echo ==== hg qref > echo "6" >6 > hg add > hg qref > catlog 6 > > echo ==== hg qref -U > hg qref -U > catlog 6 > > echo ==== hg qref -u > hg qref -u johndeere > catlog 6 > > > echo ==== "qpop -a / qpush -a" > hg qpop -a > hg qpush -a > hg log --template "{rev}: {desc} - {author}\n" > } ======= plain headers $ echo "[mq]" >> $HGRCPATH $ echo "plain=true" >> $HGRCPATH $ mkdir sandbox $ (cd sandbox ; runtest) ==== init ==== qnew -U From: test 0: [mq]: 1.patch - test ==== qref adding 1 From: test diff -r ... 1 --- /dev/null +++ b/1 @@ -0,0 +1,1 @@ +1 0: [mq]: 1.patch - test ==== qref -u From: mary diff -r ... 1 --- /dev/null +++ b/1 @@ -0,0 +1,1 @@ +1 0: [mq]: 1.patch - mary ==== qnew adding 2 diff -r ... 2 --- /dev/null +++ b/2 @@ -0,0 +1,1 @@ +2 1: [mq]: 2.patch - test 0: [mq]: 1.patch - mary ==== qref -u From: jane diff -r ... 2 --- /dev/null +++ b/2 @@ -0,0 +1,1 @@ +2 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qnew -U -m From: test Three 2: Three - test 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref adding 3 From: test Three diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 2: Three - test 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref -m From: test Drei diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 2: Drei - test 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref -u From: mary Drei diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 2: Drei - mary 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref -u -m From: maria Three (again) diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qnew -m adding 4of Four diff -r ... 4of --- /dev/null +++ b/4of @@ -0,0 +1,1 @@ +4 t 3: Four - test 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref -u From: jane Four diff -r ... 4of --- /dev/null +++ b/4of @@ -0,0 +1,1 @@ +4 t 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qnew with HG header popping 5.patch now at: 4.patch now at: 5.patch # HG changeset patch # User johndoe 4: imported patch 5.patch - johndoe 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref adding 5 # HG changeset patch # User johndoe # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 4: [mq]: 5.patch - johndoe 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref -U # HG changeset patch # User test # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 4: [mq]: 5.patch - test 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref -u # HG changeset patch # User johndeere # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qnew with plain header popping 6.patch now at: 5.patch now at: 6.patch From: test 5: imported patch 6.patch - test 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref adding 6 From: test diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 5: [mq]: 6.patch - test 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref -U From: test diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 5: [mq]: 6.patch - test 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref -u From: johndeere diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 5: [mq]: 6.patch - johndeere 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qpop -a / qpush -a popping 6.patch popping 5.patch popping 4.patch popping 3.patch popping 2.patch popping 1.patch patch queue now empty applying 1.patch applying 2.patch applying 3.patch applying 4.patch applying 5.patch applying 6.patch now at: 6.patch 5: imported patch 6.patch - johndeere 4: imported patch 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: imported patch 2.patch - jane 0: imported patch 1.patch - mary $ rm -r sandbox ======= hg headers $ echo "plain=false" >> $HGRCPATH $ mkdir sandbox $ (cd sandbox ; runtest) ==== init ==== qnew -U # HG changeset patch # User test # Parent 0: [mq]: 1.patch - test ==== qref adding 1 # HG changeset patch # User test # Parent diff -r ... 1 --- /dev/null +++ b/1 @@ -0,0 +1,1 @@ +1 0: [mq]: 1.patch - test ==== qref -u # HG changeset patch # User mary # Parent diff -r ... 1 --- /dev/null +++ b/1 @@ -0,0 +1,1 @@ +1 0: [mq]: 1.patch - mary ==== qnew adding 2 # HG changeset patch # Parent diff -r ... 2 --- /dev/null +++ b/2 @@ -0,0 +1,1 @@ +2 1: [mq]: 2.patch - test 0: [mq]: 1.patch - mary ==== qref -u # HG changeset patch # User jane # Parent diff -r ... 2 --- /dev/null +++ b/2 @@ -0,0 +1,1 @@ +2 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qnew -U -m # HG changeset patch # User test # Parent Three 2: Three - test 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref adding 3 # HG changeset patch # User test # Parent Three diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 2: Three - test 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref -m # HG changeset patch # User test # Parent Drei diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 2: Drei - test 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref -u # HG changeset patch # User mary # Parent Drei diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 2: Drei - mary 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref -u -m # HG changeset patch # User maria # Parent Three (again) diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qnew -m adding 4of # HG changeset patch # Parent Four diff -r ... 4of --- /dev/null +++ b/4of @@ -0,0 +1,1 @@ +4 t 3: Four - test 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref -u # HG changeset patch # User jane # Parent Four diff -r ... 4of --- /dev/null +++ b/4of @@ -0,0 +1,1 @@ +4 t 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qnew with HG header popping 5.patch now at: 4.patch now at: 5.patch # HG changeset patch # User johndoe 4: imported patch 5.patch - johndoe 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref adding 5 # HG changeset patch # User johndoe # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 4: [mq]: 5.patch - johndoe 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref -U # HG changeset patch # User test # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 4: [mq]: 5.patch - test 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref -u # HG changeset patch # User johndeere # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qnew with plain header popping 6.patch now at: 5.patch now at: 6.patch From: test 5: imported patch 6.patch - test 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref adding 6 From: test diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 5: [mq]: 6.patch - test 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref -U From: test diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 5: [mq]: 6.patch - test 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref -u From: johndeere diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 5: [mq]: 6.patch - johndeere 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qpop -a / qpush -a popping 6.patch popping 5.patch popping 4.patch popping 3.patch popping 2.patch popping 1.patch patch queue now empty applying 1.patch applying 2.patch applying 3.patch applying 4.patch applying 5.patch applying 6.patch now at: 6.patch 5: imported patch 6.patch - johndeere 4: imported patch 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: imported patch 2.patch - jane 0: imported patch 1.patch - mary $ rm -r sandbox $ runtest ==== init ==== qnew -U # HG changeset patch # User test # Parent 0: [mq]: 1.patch - test ==== qref adding 1 # HG changeset patch # User test # Parent diff -r ... 1 --- /dev/null +++ b/1 @@ -0,0 +1,1 @@ +1 0: [mq]: 1.patch - test ==== qref -u # HG changeset patch # User mary # Parent diff -r ... 1 --- /dev/null +++ b/1 @@ -0,0 +1,1 @@ +1 0: [mq]: 1.patch - mary ==== qnew adding 2 # HG changeset patch # Parent diff -r ... 2 --- /dev/null +++ b/2 @@ -0,0 +1,1 @@ +2 1: [mq]: 2.patch - test 0: [mq]: 1.patch - mary ==== qref -u # HG changeset patch # User jane # Parent diff -r ... 2 --- /dev/null +++ b/2 @@ -0,0 +1,1 @@ +2 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qnew -U -m # HG changeset patch # User test # Parent Three 2: Three - test 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref adding 3 # HG changeset patch # User test # Parent Three diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 2: Three - test 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref -m # HG changeset patch # User test # Parent Drei diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 2: Drei - test 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref -u # HG changeset patch # User mary # Parent Drei diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 2: Drei - mary 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref -u -m # HG changeset patch # User maria # Parent Three (again) diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qnew -m adding 4of # HG changeset patch # Parent Four diff -r ... 4of --- /dev/null +++ b/4of @@ -0,0 +1,1 @@ +4 t 3: Four - test 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qref -u # HG changeset patch # User jane # Parent Four diff -r ... 4of --- /dev/null +++ b/4of @@ -0,0 +1,1 @@ +4 t 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qnew with HG header popping 5.patch now at: 4.patch now at: 5.patch # HG changeset patch # User johndoe 4: imported patch 5.patch - johndoe 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref adding 5 # HG changeset patch # User johndoe # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 4: [mq]: 5.patch - johndoe 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref -U # HG changeset patch # User test # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 4: [mq]: 5.patch - test 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref -u # HG changeset patch # User johndeere # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qnew with plain header popping 6.patch now at: 5.patch now at: 6.patch From: test 5: imported patch 6.patch - test 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref adding 6 From: test diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 5: [mq]: 6.patch - test 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref -U From: test diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 5: [mq]: 6.patch - test 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== hg qref -u From: johndeere diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 5: [mq]: 6.patch - johndeere 4: [mq]: 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: [mq]: 2.patch - jane 0: [mq]: 1.patch - mary ==== qpop -a / qpush -a popping 6.patch popping 5.patch popping 4.patch popping 3.patch popping 2.patch popping 1.patch patch queue now empty applying 1.patch applying 2.patch applying 3.patch applying 4.patch applying 5.patch applying 6.patch now at: 6.patch 5: imported patch 6.patch - johndeere 4: imported patch 5.patch - johndeere 3: Four - jane 2: Three (again) - maria 1: imported patch 2.patch - jane 0: imported patch 1.patch - mary $ cd .. mercurial-3.7.3/tests/test-revlog-packentry.t0000644000175000017500000000111712676531525020727 0ustar mpmmpm00000000000000 $ hg init repo $ cd repo $ touch foo $ hg ci -Am 'add foo' adding foo $ hg up -C null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved this should be stored as a delta against rev 0 $ echo foo bar baz > foo $ hg ci -Am 'add foo again' adding foo created new head $ hg debugindex foo rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 0 ..... 0 b80de5d13875 000000000000 000000000000 (re) 1 0 13 ..... 1 0376abec49b8 000000000000 000000000000 (re) $ cd .. mercurial-3.7.3/tests/test-histedit-drop.t0000644000175000017500000001026512676531525020216 0ustar mpmmpm00000000000000 $ . "$TESTDIR/histedit-helpers.sh" $ cat >> $HGRCPATH < [extensions] > histedit= > EOF $ initrepo () > { > hg init r > cd r > for x in a b c d e f ; do > echo $x > $x > hg add $x > hg ci -m $x > done > } $ initrepo log before edit $ hg log --graph @ changeset: 5:652413bf663e | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 4:e860deea161a | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 3:055a42cdd887 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 2:177f92b77385 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 1:d2ae7f538514 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a edit the history $ hg histedit 177f92b77385 --commands - 2>&1 << EOF | fixbundle > drop 177f92b77385 c > pick e860deea161a e > pick 652413bf663e f > pick 055a42cdd887 d > EOF 0 files updated, 0 files merged, 4 files removed, 0 files unresolved log after edit $ hg log --graph @ changeset: 4:f518305ce889 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 3:a4f7421b80f7 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 2:ee283cb5f2d5 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 1:d2ae7f538514 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a Check histedit_source $ hg log --debug --rev f518305ce889 changeset: 4:f518305ce889c07cb5bd05522176d75590ef3324 tag: tip phase: draft parent: 3:a4f7421b80f79fcc59fff01bcbf4a53d127dd6d3 parent: -1:0000000000000000000000000000000000000000 manifest: 4:d3d4f51c157ff242c32ff745d4799aaa26ccda44 user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: d extra: branch=default extra: histedit_source=055a42cdd88768532f9cf79daa407fc8d138de9b description: d manifest after edit $ hg manifest a b d e f Drop the last changeset $ hg histedit ee283cb5f2d5 --commands - 2>&1 << EOF | fixbundle > pick ee283cb5f2d5 e > pick a4f7421b80f7 f > drop f518305ce889 d > EOF 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log --graph @ changeset: 3:a4f7421b80f7 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 2:ee283cb5f2d5 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 1:d2ae7f538514 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ hg histedit cb9a9f314b8b --commands - 2>&1 << EOF | fixbundle > pick cb9a9f314b8b a > pick ee283cb5f2d5 e > EOF hg: parse error: missing rules for changeset a4f7421b80f7 (use "drop a4f7421b80f7" to discard, see also: "hg help -e histedit.config") $ hg --config histedit.dropmissing=True histedit cb9a9f314b8b --commands - 2>&1 << EOF | fixbundle > pick cb9a9f314b8b a > pick ee283cb5f2d5 e > EOF 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg log --graph @ changeset: 1:e99c679bf03e | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a mercurial-3.7.3/tests/test-gendoc.t0000644000175000017500000000401612676531525016673 0ustar mpmmpm00000000000000#require docutils #require gettext Test document extraction $ HGENCODING=UTF-8 $ export HGENCODING $ { echo C; ls "$TESTDIR/../i18n"/*.po | sort; } | while read PO; do > LOCALE=`basename "$PO" .po` > echo > echo "% extracting documentation from $LOCALE" > echo ".. -*- coding: utf-8 -*-" > gendoc-$LOCALE.txt > echo "" >> gendoc-$LOCALE.txt > LANGUAGE=$LOCALE python "$TESTDIR/../doc/gendoc.py" >> gendoc-$LOCALE.txt 2> /dev/null || exit > > if [ $LOCALE != C ]; then > cmp -s gendoc-C.txt gendoc-$LOCALE.txt && echo '** NOTHING TRANSLATED **' > fi > > echo "checking for parse errors" > python "$TESTDIR/../doc/docchecker" gendoc-$LOCALE.txt > # We call runrst without adding "--halt warning" to make it report > # all errors instead of stopping on the first one. > python "$TESTDIR/../doc/runrst" html gendoc-$LOCALE.txt /dev/null > done % extracting documentation from C checking for parse errors % extracting documentation from da checking for parse errors % extracting documentation from de checking for parse errors Die Dateien werden dem Projektarchiv beim n\xc3\xa4chsten \xc3\x9cbernehmen (commit) hinzugef\xc3\xbcgt. Um dies vorher r\xc3\xbcckg\xc3\xa4ngig zu machen, siehe:hg:`forget`. (esc) warning: please have a space before :hg: % extracting documentation from el checking for parse errors % extracting documentation from fr checking for parse errors % extracting documentation from it checking for parse errors % extracting documentation from ja checking for parse errors % extracting documentation from pt_BR checking for parse errors % extracting documentation from ro checking for parse errors % extracting documentation from ru checking for parse errors % extracting documentation from sv checking for parse errors % extracting documentation from zh_CN checking for parse errors % extracting documentation from zh_TW checking for parse errors mercurial-3.7.3/tests/test-rebase-bookmarks.t0000644000175000017500000001062512676531525020666 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > > [phases] > publish=False > > [alias] > tglog = log -G --template "{rev}: '{desc}' bookmarks: {bookmarks}\n" > EOF Create a repo with several bookmarks $ hg init a $ cd a $ echo a > a $ hg ci -Am A adding a $ echo b > b $ hg ci -Am B adding b $ hg book 'X' $ hg book 'Y' $ echo c > c $ hg ci -Am C adding c $ hg book 'Z' $ hg up -q 0 $ echo d > d $ hg ci -Am D adding d created new head $ hg book W $ hg tglog @ 3: 'D' bookmarks: W | | o 2: 'C' bookmarks: Y Z | | | o 1: 'B' bookmarks: X |/ o 0: 'A' bookmarks: Move only rebased bookmarks $ cd .. $ hg clone -q a a1 $ cd a1 $ hg up -q Z Test deleting divergent bookmarks from dest (issue3685) $ hg book -r 3 Z@diverge ... and also test that bookmarks not on dest or not being moved aren't deleted $ hg book -r 3 X@diverge $ hg book -r 0 Y@diverge $ hg tglog o 3: 'D' bookmarks: W X@diverge Z@diverge | | @ 2: 'C' bookmarks: Y Z | | | o 1: 'B' bookmarks: X |/ o 0: 'A' bookmarks: Y@diverge $ hg rebase -s Y -d 3 rebasing 2:49cb3485fa0c "C" (Y Z) saved backup bundle to $TESTTMP/a1/.hg/strip-backup/49cb3485fa0c-126f3e97-backup.hg (glob) $ hg tglog @ 3: 'C' bookmarks: Y Z | o 2: 'D' bookmarks: W X@diverge | | o 1: 'B' bookmarks: X |/ o 0: 'A' bookmarks: Y@diverge Do not try to keep active but deleted divergent bookmark $ cd .. $ hg clone -q a a4 $ cd a4 $ hg up -q 2 $ hg book W@diverge $ hg rebase -s W -d . rebasing 3:41acb9dca9eb "D" (tip W) saved backup bundle to $TESTTMP/a4/.hg/strip-backup/41acb9dca9eb-b35a6a63-backup.hg (glob) $ hg bookmarks W 3:0d3554f74897 X 1:6c81ed0049f8 Y 2:49cb3485fa0c Z 2:49cb3485fa0c Keep bookmarks to the correct rebased changeset $ cd .. $ hg clone -q a a2 $ cd a2 $ hg up -q Z $ hg rebase -s 1 -d 3 rebasing 1:6c81ed0049f8 "B" (X) rebasing 2:49cb3485fa0c "C" (Y Z) saved backup bundle to $TESTTMP/a2/.hg/strip-backup/6c81ed0049f8-a687065f-backup.hg (glob) $ hg tglog @ 3: 'C' bookmarks: Y Z | o 2: 'B' bookmarks: X | o 1: 'D' bookmarks: W | o 0: 'A' bookmarks: Keep active bookmark on the correct changeset $ cd .. $ hg clone -q a a3 $ cd a3 $ hg up -q X $ hg rebase -d W rebasing 1:6c81ed0049f8 "B" (X) rebasing 2:49cb3485fa0c "C" (Y Z) saved backup bundle to $TESTTMP/a3/.hg/strip-backup/6c81ed0049f8-a687065f-backup.hg (glob) $ hg tglog o 3: 'C' bookmarks: Y Z | @ 2: 'B' bookmarks: X | o 1: 'D' bookmarks: W | o 0: 'A' bookmarks: $ hg bookmarks W 1:41acb9dca9eb * X 2:e926fccfa8ec Y 3:3d5fa227f4b5 Z 3:3d5fa227f4b5 rebase --continue with bookmarks present (issue3802) $ hg up 2 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (leaving bookmark X) $ echo 'C' > c $ hg add c $ hg ci -m 'other C' created new head $ hg up 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg rebase rebasing 3:3d5fa227f4b5 "C" (Y Z) merging c warning: conflicts while merging c! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] $ echo 'c' > c $ hg resolve --mark c (no more unresolved files) continue: hg rebase --continue $ hg rebase --continue rebasing 3:3d5fa227f4b5 "C" (Y Z) saved backup bundle to $TESTTMP/a3/.hg/strip-backup/3d5fa227f4b5-c6ea2371-backup.hg (glob) $ hg tglog @ 4: 'C' bookmarks: Y Z | o 3: 'other C' bookmarks: | o 2: 'B' bookmarks: X | o 1: 'D' bookmarks: W | o 0: 'A' bookmarks: ensure that bookmarks given the names of revset functions can be used as --rev arguments (issue3950) $ hg update -q 3 $ echo bimble > bimble $ hg add bimble $ hg commit -q -m 'bisect' $ echo e >> bimble $ hg ci -m bisect2 $ echo e >> bimble $ hg ci -m bisect3 $ hg book bisect $ hg update -q Y $ hg rebase -r '"bisect"^^::"bisect"^' -r bisect -d Z rebasing 5:345c90f326a4 "bisect" rebasing 6:f677a2907404 "bisect2" rebasing 7:325c16001345 "bisect3" (tip bisect) saved backup bundle to $TESTTMP/a3/.hg/strip-backup/345c90f326a4-b4840586-backup.hg (glob) mercurial-3.7.3/tests/test-rebase-mq.t0000644000175000017500000001560012676531525017311 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > mq= > > [mq] > plain=true > > [alias] > tglog = log -G --template "{rev}: '{desc}' tags: {tags}\n" > EOF $ hg init a $ cd a $ hg qinit -c $ echo c1 > f $ hg add f $ hg ci -m C1 $ echo r1 > f $ hg ci -m R1 $ hg up -q 0 $ hg qnew f.patch -d '1 0' $ echo mq1 > f $ hg qref -m P0 $ hg qnew f2.patch $ echo mq2 > f $ hg qref -m P1 -d '2 0' $ hg tglog @ 3: 'P1' tags: f2.patch qtip tip | o 2: 'P0' tags: f.patch qbase | | o 1: 'R1' tags: |/ o 0: 'C1' tags: qparent Rebase - try to rebase on an applied mq patch: $ hg rebase -s 1 -d 3 abort: cannot rebase onto an applied mq patch [255] Rebase - same thing, but mq patch is default dest: $ hg up -q 1 $ hg rebase abort: cannot rebase onto an applied mq patch [255] $ hg up -q qtip Rebase - generate a conflict: $ hg rebase -s 2 -d 1 rebasing 2:3504f44bffc0 "P0" (f.patch qbase) merging f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] Fix the 1st conflict: $ echo mq1r1 > f $ hg resolve -m f (no more unresolved files) continue: hg rebase --continue $ hg rebase -c rebasing 2:3504f44bffc0 "P0" (f.patch qbase) rebasing 3:929394423cd3 "P1" (f2.patch qtip tip) merging f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] Fix the 2nd conflict: $ echo mq1r1mq2 > f $ hg resolve -m f (no more unresolved files) continue: hg rebase --continue $ hg rebase -c already rebased 2:3504f44bffc0 "P0" (f.patch qbase) as ebe9914c0d1c rebasing 3:929394423cd3 "P1" (f2.patch qtip) saved backup bundle to $TESTTMP/a/.hg/strip-backup/3504f44bffc0-30595b40-backup.hg (glob) $ hg tglog @ 3: 'P1' tags: f2.patch qtip tip | o 2: 'P0' tags: f.patch qbase | o 1: 'R1' tags: qparent | o 0: 'C1' tags: $ hg up -q qbase $ cat f mq1r1 $ cat .hg/patches/f.patch # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID ebe9914c0d1c3f60096e952fa4dbb3d377dea3ab # Parent bac9ed9960d8992bcad75864a879fa76cadaf1b0 P0 diff -r bac9ed9960d8 -r ebe9914c0d1c f --- a/f Thu Jan 01 00:00:00 1970 +0000 +++ b/f Thu Jan 01 00:00:01 1970 +0000 @@ -1,1 +1,1 @@ -r1 +mq1r1 Update to qtip: $ hg up -q qtip $ cat f mq1r1mq2 $ cat .hg/patches/f2.patch # HG changeset patch # User test # Date 2 0 # Thu Jan 01 00:00:02 1970 +0000 # Node ID 462012cf340c97d44d62377c985a423f6bb82f07 # Parent ebe9914c0d1c3f60096e952fa4dbb3d377dea3ab P1 diff -r ebe9914c0d1c -r 462012cf340c f --- a/f Thu Jan 01 00:00:01 1970 +0000 +++ b/f Thu Jan 01 00:00:02 1970 +0000 @@ -1,1 +1,1 @@ -mq1r1 +mq1r1mq2 Adding one git-style patch and one normal: $ hg qpop -a popping f2.patch popping f.patch patch queue now empty $ rm -fr .hg/patches $ hg qinit -c $ hg up -q 0 $ hg qnew --git f_git.patch -d '3 0' $ echo mq1 > p $ hg add p $ hg qref --git -m 'P0 (git)' $ hg qnew f.patch -d '4 0' $ echo mq2 > p $ hg qref -m P1 $ hg qci -m 'save patch state' $ hg qseries -s f_git.patch: P0 (git) f.patch: P1 $ hg -R .hg/patches manifest .hgignore f.patch f_git.patch series $ cat .hg/patches/f_git.patch Date: 3 0 P0 (git) diff --git a/p b/p new file mode 100644 --- /dev/null +++ b/p @@ -0,0 +1,1 @@ +mq1 $ cat .hg/patches/f.patch Date: 4 0 P1 diff -r ???????????? p (glob) --- a/p ??? ??? ?? ??:??:?? ???? ????? (glob) +++ b/p ??? ??? ?? ??:??:?? ???? ????? (glob) @@ -1,1 +1,1 @@ -mq1 +mq2 Rebase the applied mq patches: $ hg rebase -s 2 -d 1 rebasing 2:0c587ffcb480 "P0 (git)" (f_git.patch qbase) rebasing 3:c7f18665e4bc "P1" (f.patch qtip tip) saved backup bundle to $TESTTMP/a/.hg/strip-backup/0c587ffcb480-0ea5695f-backup.hg (glob) $ hg qci -m 'save patch state' $ hg qseries -s f_git.patch: P0 (git) f.patch: P1 $ hg -R .hg/patches manifest .hgignore f.patch f_git.patch series $ cat .hg/patches/f_git.patch # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID 12d9f6a3bbe560dee50c7c454d434add7fb8e837 # Parent bac9ed9960d8992bcad75864a879fa76cadaf1b0 P0 (git) diff --git a/p b/p new file mode 100644 --- /dev/null +++ b/p @@ -0,0 +1,1 @@ +mq1 $ cat .hg/patches/f.patch # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Node ID c77a2661c64c60d82f63c4f7aefd95b3a948a557 # Parent 12d9f6a3bbe560dee50c7c454d434add7fb8e837 P1 diff -r 12d9f6a3bbe5 -r c77a2661c64c p --- a/p Thu Jan 01 00:00:03 1970 +0000 +++ b/p Thu Jan 01 00:00:04 1970 +0000 @@ -1,1 +1,1 @@ -mq1 +mq2 $ cd .. Rebase with guards $ hg init foo $ cd foo $ echo a > a $ hg ci -Am a adding a Create mq repo with guarded patches foo and bar and empty patch: $ hg qinit $ echo guarded > guarded $ hg add guarded $ hg qnew guarded $ hg qnew empty-important -m 'important commit message' -d '1 0' $ echo bar > bar $ hg add bar $ hg qnew bar -d '2 0' $ echo foo > foo $ hg add foo $ hg qnew foo $ hg qpop -a popping foo popping bar popping empty-important popping guarded patch queue now empty $ hg qguard guarded +guarded $ hg qguard bar +baz $ hg qguard foo +baz $ hg qselect baz number of unguarded, unapplied patches has changed from 1 to 3 $ hg qpush bar applying empty-important patch empty-important is empty applying bar now at: bar $ hg qguard -l guarded: +guarded empty-important: unguarded bar: +baz foo: +baz $ hg tglog @ 2: 'imported patch bar' tags: bar qtip tip | o 1: 'important commit message' tags: empty-important qbase | o 0: 'a' tags: qparent Create new head to rebase bar onto: $ hg up -C 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo b > b $ hg add b $ hg ci -m b created new head $ hg up -C 2 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo a >> a $ hg qref $ hg tglog @ 3: '[mq]: bar' tags: bar qtip tip | | o 2: 'b' tags: | | o | 1: 'important commit message' tags: empty-important qbase |/ o 0: 'a' tags: qparent Rebase bar (make sure series order is preserved and empty-important also is removed from the series): $ hg qseries guarded empty-important bar foo $ [ -f .hg/patches/empty-important ] $ hg -q rebase -d 2 note: rebase of 1:0aaf4c3af7eb created no changes to commit $ hg qseries guarded bar foo $ [ -f .hg/patches/empty-important ] [1] $ hg qguard -l guarded: +guarded bar: +baz foo: +baz $ hg tglog @ 2: '[mq]: bar' tags: bar qbase qtip tip | o 1: 'b' tags: qparent | o 0: 'a' tags: $ cd .. mercurial-3.7.3/tests/test-issue672.t0000644000175000017500000000554212676531525017030 0ustar mpmmpm00000000000000https://bz.mercurial-scm.org/672 # 0-2-4 # \ \ \ # 1-3-5 # # rename in #1, content change in #4. $ hg init $ touch 1 $ touch 2 $ hg commit -Am init # 0 adding 1 adding 2 $ hg rename 1 1a $ hg commit -m rename # 1 $ hg co -C 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo unrelated >> 2 $ hg ci -m unrelated1 # 2 created new head $ hg merge --debug 1 searching for copies back to rev 1 unmatched files in other: 1a all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: '1' -> dst: '1a' checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 81f4b099af3d, local: c64f439569a9+, remote: c12dcd37c90a 1: other deleted -> r removing 1 1a: remote created -> g getting 1a 2: remote unchanged -> k 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m merge1 # 3 $ hg co -C 2 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo hello >> 1 $ hg ci -m unrelated2 # 4 created new head $ hg co -C 3 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge -y --debug 4 searching for copies back to rev 1 unmatched files in local: 1a all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: '1' -> dst: '1a' * checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: c64f439569a9, local: e327dca35ac8+, remote: 746e9549ea96 preserving 1a for resolve of 1a 1a: local copied/moved from 1 -> m (premerge) picked tool ':merge' for 1a (binary False symlink False changedelete False) merging 1a and 1 to 1a my 1a@e327dca35ac8+ other 1@746e9549ea96 ancestor 1@81f4b099af3d premerge successful 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg co -C 4 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge -y --debug 3 searching for copies back to rev 1 unmatched files in other: 1a all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: '1' -> dst: '1a' * checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: c64f439569a9, local: 746e9549ea96+, remote: e327dca35ac8 preserving 1 for resolve of 1a removing 1 1a: remote moved from 1 -> m (premerge) picked tool ':merge' for 1a (binary False symlink False changedelete False) merging 1 and 1a to 1a my 1a@746e9549ea96+ other 1a@e327dca35ac8 ancestor 1@81f4b099af3d premerge successful 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) mercurial-3.7.3/tests/test-pull.t0000644000175000017500000000532212676531525016411 0ustar mpmmpm00000000000000#require serve $ hg init test $ cd test $ echo foo>foo $ hg addremove adding foo $ hg commit -m 1 $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions $ hg serve -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ cd .. $ hg clone --pull http://foo:bar@localhost:$HGPORT/ copy requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd copy $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions $ hg co 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat foo foo $ hg manifest --debug 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 foo $ hg pull pulling from http://foo@localhost:$HGPORT/ searching for changes no changes found $ hg rollback --dry-run --verbose repository tip rolled back to revision -1 (undo pull: http://foo:***@localhost:$HGPORT/) Test pull of non-existing 20 character revision specification, making sure plain ascii identifiers not are encoded like a node: $ hg pull -r 'xxxxxxxxxxxxxxxxxxxy' pulling from http://foo@localhost:$HGPORT/ abort: unknown revision 'xxxxxxxxxxxxxxxxxxxy'! [255] $ hg pull -r 'xxxxxxxxxxxxxxxxxx y' pulling from http://foo@localhost:$HGPORT/ abort: unknown revision '7878787878787878787878787878787878782079'! [255] Issue622: hg init && hg pull -u URL doesn't checkout default branch $ cd .. $ hg init empty $ cd empty $ hg pull -u ../test pulling from ../test requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Test 'file:' uri handling: $ hg pull -q file://../test-does-not-exist abort: file:// URLs can only refer to localhost [255] $ hg pull -q file://../test abort: file:// URLs can only refer to localhost [255] $ hg pull -q file:../test # no-msys It's tricky to make file:// URLs working on every platform with regular shell commands. $ URL=`$PYTHON -c "import os; print 'file://foobar' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"` $ hg pull -q "$URL" abort: file:// URLs can only refer to localhost [255] $ URL=`$PYTHON -c "import os; print 'file://localhost' + ('/' + os.getcwd().replace(os.sep, '/')).replace('//', '/') + '/../test'"` $ hg pull -q "$URL" $ cd .. mercurial-3.7.3/tests/test-symlink-placeholder.t0000644000175000017500000000360012676531525021400 0ustar mpmmpm00000000000000#require symlink Create extension that can disable symlink support: $ cat > nolink.py < from mercurial import extensions, util > def setflags(orig, f, l, x): > pass > def checklink(orig, path): > return False > def extsetup(ui): > extensions.wrapfunction(util, 'setflags', setflags) > extensions.wrapfunction(util, 'checklink', checklink) > EOF $ hg init unix-repo $ cd unix-repo $ echo foo > a $ ln -s a b $ hg ci -Am0 adding a adding b $ cd .. Simulate a checkout shared on NFS/Samba: $ hg clone -q unix-repo shared $ cd shared $ rm b $ echo foo > b $ hg --config extensions.n=$TESTTMP/nolink.py status --debug ignoring suspect symlink placeholder "b" Make a clone using placeholders: $ hg --config extensions.n=$TESTTMP/nolink.py clone . ../win-repo updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../win-repo $ cat b a (no-eol) $ hg --config extensions.n=$TESTTMP/nolink.py st --debug Empty placeholder: $ rm b $ touch b $ hg --config extensions.n=$TESTTMP/nolink.py st --debug ignoring suspect symlink placeholder "b" Write binary data to the placeholder: >>> open('b', 'w').write('this is a binary\0') $ hg --config extensions.n=$TESTTMP/nolink.py st --debug ignoring suspect symlink placeholder "b" Write a long string to the placeholder: >>> open('b', 'w').write('this' * 1000) $ hg --config extensions.n=$TESTTMP/nolink.py st --debug ignoring suspect symlink placeholder "b" Commit shouldn't succeed: $ hg --config extensions.n=$TESTTMP/nolink.py ci -m1 nothing changed [1] Write a valid string to the placeholder: >>> open('b', 'w').write('this') $ hg --config extensions.n=$TESTTMP/nolink.py st --debug M b $ hg --config extensions.n=$TESTTMP/nolink.py ci -m1 $ hg manifest tip --verbose 644 a 644 @ b $ cd .. mercurial-3.7.3/tests/test-alias.t0000644000175000017500000003213512676531525016530 0ustar mpmmpm00000000000000 $ HGFOO=BAR; export HGFOO $ cat >> $HGRCPATH < [alias] > # should clobber ci but not commit (issue2993) > ci = version > myinit = init > mycommit = commit > optionalrepo = showconfig alias.myinit > cleanstatus = status -c > unknown = bargle > ambiguous = s > recursive = recursive > disabled = email > nodefinition = > noclosingquotation = ' > no--cwd = status --cwd elsewhere > no-R = status -R elsewhere > no--repo = status --repo elsewhere > no--repository = status --repository elsewhere > no--config = status --config a.config=1 > mylog = log > lognull = log -r null > shortlog = log --template '{rev} {node|short} | {date|isodate}\n' > positional = log --template '{\$2} {\$1} | {date|isodate}\n' > dln = lognull --debug > nousage = rollback > put = export -r 0 -o "\$FOO/%R.diff" > blank = !printf '\n' > self = !printf '\$0\n' > echoall = !printf '\$@\n' > echo1 = !printf '\$1\n' > echo2 = !printf '\$2\n' > echo13 = !printf '\$1 \$3\n' > echotokens = !printf "%s\n" "\$@" > count = !hg log -r "\$@" --template=. | wc -c | sed -e 's/ //g' > mcount = !hg log \$@ --template=. | wc -c | sed -e 's/ //g' > rt = root > tglog = log -G --template "{rev}:{node|short}: '{desc}' {branches}\n" > idalias = id > idaliaslong = id > idaliasshell = !echo test > parentsshell1 = !echo one > parentsshell2 = !echo two > escaped1 = !printf 'test\$\$test\n' > escaped2 = !sh -c 'echo "HGFOO is \$\$HGFOO"' > escaped3 = !sh -c 'echo "\$1 is \$\$\$1"' > escaped4 = !printf '\$\$0 \$\$@\n' > exit1 = !sh -c 'exit 1' > > [defaults] > mylog = -q > lognull = -q > log = -v > EOF basic $ hg myinit alias unknown $ hg unknown abort: alias 'unknown' resolves to unknown command 'bargle' [255] $ hg help unknown alias 'unknown' resolves to unknown command 'bargle' ambiguous $ hg ambiguous abort: alias 'ambiguous' resolves to ambiguous command 's' [255] $ hg help ambiguous alias 'ambiguous' resolves to ambiguous command 's' recursive $ hg recursive abort: alias 'recursive' resolves to unknown command 'recursive' [255] $ hg help recursive alias 'recursive' resolves to unknown command 'recursive' disabled $ hg disabled abort: alias 'disabled' resolves to unknown command 'email' ('email' is provided by 'patchbomb' extension) [255] $ hg help disabled alias 'disabled' resolves to unknown command 'email' 'email' is provided by the following extension: patchbomb command to send changesets as (a series of) patch emails (use "hg help extensions" for information on enabling extensions) no definition $ hg nodef abort: no definition for alias 'nodefinition' [255] $ hg help nodef no definition for alias 'nodefinition' no closing quotation $ hg noclosing abort: error in definition for alias 'noclosingquotation': No closing quotation [255] $ hg help noclosing error in definition for alias 'noclosingquotation': No closing quotation invalid options $ hg no--cwd abort: error in definition for alias 'no--cwd': --cwd may only be given on the command line [255] $ hg help no--cwd error in definition for alias 'no--cwd': --cwd may only be given on the command line $ hg no-R abort: error in definition for alias 'no-R': -R may only be given on the command line [255] $ hg help no-R error in definition for alias 'no-R': -R may only be given on the command line $ hg no--repo abort: error in definition for alias 'no--repo': --repo may only be given on the command line [255] $ hg help no--repo error in definition for alias 'no--repo': --repo may only be given on the command line $ hg no--repository abort: error in definition for alias 'no--repository': --repository may only be given on the command line [255] $ hg help no--repository error in definition for alias 'no--repository': --repository may only be given on the command line $ hg no--config abort: error in definition for alias 'no--config': --config may only be given on the command line [255] optional repository #if no-outer-repo $ hg optionalrepo init #endif $ cd alias $ cat > .hg/hgrc < [alias] > myinit = init -q > EOF $ hg optionalrepo init -q no usage $ hg nousage no rollback information available [1] $ echo foo > foo $ hg commit -Amfoo adding foo infer repository $ cd .. #if no-outer-repo $ hg shortlog alias/foo 0 e63c23eaa88a | 1970-01-01 00:00 +0000 #endif $ cd alias with opts $ hg cleanst C foo with opts and whitespace $ hg shortlog 0 e63c23eaa88a | 1970-01-01 00:00 +0000 positional arguments $ hg positional abort: too few arguments for command alias [255] $ hg positional a abort: too few arguments for command alias [255] $ hg positional 'node|short' rev 0 e63c23eaa88a | 1970-01-01 00:00 +0000 interaction with defaults $ hg mylog 0:e63c23eaa88a $ hg lognull -1:000000000000 properly recursive $ hg dln changeset: -1:0000000000000000000000000000000000000000 phase: public parent: -1:0000000000000000000000000000000000000000 parent: -1:0000000000000000000000000000000000000000 manifest: -1:0000000000000000000000000000000000000000 user: date: Thu Jan 01 00:00:00 1970 +0000 extra: branch=default path expanding $ FOO=`pwd` hg put $ cat 0.diff # HG changeset patch # User test # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID e63c23eaa88ae77967edcf4ea194d31167c478b0 # Parent 0000000000000000000000000000000000000000 foo diff -r 000000000000 -r e63c23eaa88a foo --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/foo Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +foo simple shell aliases $ hg blank $ hg blank foo $ hg self self $ hg echoall $ hg echoall foo foo $ hg echoall 'test $2' foo test $2 foo $ hg echoall 'test $@' foo '$@' test $@ foo $@ $ hg echoall 'test "$@"' foo '"$@"' test "$@" foo "$@" $ hg echo1 foo bar baz foo $ hg echo2 foo bar baz bar $ hg echo13 foo bar baz test foo baz $ hg echo2 foo $ hg echotokens $ hg echotokens foo 'bar $1 baz' foo bar $1 baz $ hg echotokens 'test $2' foo test $2 foo $ hg echotokens 'test $@' foo '$@' test $@ foo $@ $ hg echotokens 'test "$@"' foo '"$@"' test "$@" foo "$@" $ echo bar > bar $ hg commit -qA -m bar $ hg count . 1 $ hg count 'branch(default)' 2 $ hg mcount -r '"branch(default)"' 2 $ hg tglog @ 1:042423737847: 'bar' | o 0:e63c23eaa88a: 'foo' shadowing $ hg i hg: command 'i' is ambiguous: idalias idaliaslong idaliasshell identify import incoming init [255] $ hg id 042423737847 tip $ hg ida hg: command 'ida' is ambiguous: idalias idaliaslong idaliasshell [255] $ hg idalias 042423737847 tip $ hg idaliasl 042423737847 tip $ hg idaliass test $ hg parentsshell hg: command 'parentsshell' is ambiguous: parentsshell1 parentsshell2 [255] $ hg parentsshell1 one $ hg parentsshell2 two shell aliases with global options $ hg init sub $ cd sub $ hg count 'branch(default)' abort: unknown revision 'default'! 0 $ hg -v count 'branch(default)' abort: unknown revision 'default'! 0 $ hg -R .. count 'branch(default)' abort: unknown revision 'default'! 0 $ hg --cwd .. count 'branch(default)' 2 $ hg echoall --cwd .. repo specific shell aliases $ cat >> .hg/hgrc < [alias] > subalias = !echo sub > EOF $ cat >> ../.hg/hgrc < [alias] > mainalias = !echo main > EOF shell alias defined in current repo $ hg subalias sub $ hg --cwd .. subalias > /dev/null hg: unknown command 'subalias' (did you mean idalias?) [255] $ hg -R .. subalias > /dev/null hg: unknown command 'subalias' (did you mean idalias?) [255] shell alias defined in other repo $ hg mainalias > /dev/null hg: unknown command 'mainalias' (did you mean idalias?) [255] $ hg -R .. mainalias main $ hg --cwd .. mainalias main typos get useful suggestions $ hg --cwd .. manalias hg: unknown command 'manalias' (did you mean one of idalias, mainalias, manifest?) [255] shell aliases with escaped $ chars $ hg escaped1 test$test $ hg escaped2 HGFOO is BAR $ hg escaped3 HGFOO HGFOO is BAR $ hg escaped4 test $0 $@ abbreviated name, which matches against both shell alias and the command provided extension, should be aborted. $ cat >> .hg/hgrc < [extensions] > hgext.rebase = > EOF #if windows $ cat >> .hg/hgrc < [alias] > rebate = !echo this is %HG_ARGS% > EOF #else $ cat >> .hg/hgrc < [alias] > rebate = !echo this is \$HG_ARGS > EOF #endif $ hg reba hg: command 'reba' is ambiguous: rebase rebate [255] $ hg rebat this is rebate $ hg rebat --foo-bar this is rebate --foo-bar invalid arguments $ hg rt foo hg rt: invalid arguments hg rt alias for: hg root (use "hg rt -h" to show more help) [255] invalid global arguments for normal commands, aliases, and shell aliases $ hg --invalid root hg: option --invalid not recognized Mercurial Distributed SCM basic commands: add add the specified files on the next commit annotate show changeset information by line for each file clone make a copy of an existing repository commit commit the specified files or all outstanding changes diff diff repository (or selected files) export dump the header and diffs for one or more changesets forget forget the specified files on the next commit init create a new repository in the given directory log show revision history of entire repository or files merge merge another revision into working directory pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state update update working directory (or switch revisions) (use "hg help" for the full list of commands or "hg -v" for details) [255] $ hg --invalid mylog hg: option --invalid not recognized Mercurial Distributed SCM basic commands: add add the specified files on the next commit annotate show changeset information by line for each file clone make a copy of an existing repository commit commit the specified files or all outstanding changes diff diff repository (or selected files) export dump the header and diffs for one or more changesets forget forget the specified files on the next commit init create a new repository in the given directory log show revision history of entire repository or files merge merge another revision into working directory pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state update update working directory (or switch revisions) (use "hg help" for the full list of commands or "hg -v" for details) [255] $ hg --invalid blank hg: option --invalid not recognized Mercurial Distributed SCM basic commands: add add the specified files on the next commit annotate show changeset information by line for each file clone make a copy of an existing repository commit commit the specified files or all outstanding changes diff diff repository (or selected files) export dump the header and diffs for one or more changesets forget forget the specified files on the next commit init create a new repository in the given directory log show revision history of entire repository or files merge merge another revision into working directory pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state update update working directory (or switch revisions) (use "hg help" for the full list of commands or "hg -v" for details) [255] This should show id: $ hg --config alias.log='id' log 000000000000 tip This shouldn't: $ hg --config alias.log='id' history $ cd ../.. return code of command and shell aliases: $ hg mycommit -R alias nothing changed [1] $ hg exit1 [1] #if no-outer-repo $ hg root abort: no repository found in '$TESTTMP' (.hg not found)! [255] $ hg --config alias.hgroot='!hg root' hgroot abort: no repository found in '$TESTTMP' (.hg not found)! [255] #endif mercurial-3.7.3/tests/test-unrelated-pull.t0000644000175000017500000000154612676531525020376 0ustar mpmmpm00000000000000 $ hg init a $ cd a $ echo 123 > a $ hg add a $ hg commit -m "a" -u a $ cd .. $ hg init b $ cd b $ echo 321 > b $ hg add b $ hg commit -m "b" -u b $ hg pull ../a pulling from ../a searching for changes abort: repository is unrelated [255] $ hg pull -f ../a pulling from ../a searching for changes warning: repository is unrelated requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg heads changeset: 1:9a79c33a9db3 tag: tip parent: -1:000000000000 user: a date: Thu Jan 01 00:00:00 1970 +0000 summary: a changeset: 0:01f8062b2de5 user: b date: Thu Jan 01 00:00:00 1970 +0000 summary: b $ cd .. mercurial-3.7.3/tests/test-merge5.t0000644000175000017500000000164012676531525016620 0ustar mpmmpm00000000000000 $ hg init $ echo This is file a1 > a $ echo This is file b1 > b $ hg add a b $ hg commit -m "commit #0" $ echo This is file b22 > b $ hg commit -m "comment #1" $ hg update 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm b $ hg commit -A -m "comment #2" removing b created new head $ hg update 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg update abort: not a linear update (merge or update --check to force update) [255] $ rm b $ hg update -c abort: uncommitted changes [255] $ hg revert b $ hg update -c 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ mv a c Should abort: $ hg update 1 abort: uncommitted changes (commit or update --clean to discard changes) [255] $ mv c a Should succeed: $ hg update 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved mercurial-3.7.3/tests/test-remove.t0000644000175000017500000001240312676531525016730 0ustar mpmmpm00000000000000 $ remove() { > hg rm $@ > echo "exit code: $?" > hg st > # do not use ls -R, which recurses in .hg subdirs on Mac OS X 10.5 > find . -name .hg -prune -o -type f -print | sort > hg up -C > } $ hg init a $ cd a $ echo a > foo file not managed $ remove foo not removing foo: file is untracked exit code: 1 ? foo ./foo 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg add foo $ hg commit -m1 the table cases 00 state added, options none $ echo b > bar $ hg add bar $ remove bar not removing bar: file has been marked for add (use forget to undo) exit code: 1 A bar ./bar ./foo 0 files updated, 0 files merged, 0 files removed, 0 files unresolved 01 state clean, options none $ remove foo exit code: 0 R foo ? bar ./bar 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 02 state modified, options none $ echo b >> foo $ remove foo not removing foo: file is modified (use -f to force removal) exit code: 1 M foo ? bar ./bar ./foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 03 state missing, options none $ rm foo $ remove foo exit code: 0 R foo ? bar ./bar 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 10 state added, options -f $ echo b > bar $ hg add bar $ remove -f bar exit code: 0 ? bar ./bar ./foo 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm bar 11 state clean, options -f $ remove -f foo exit code: 0 R foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 12 state modified, options -f $ echo b >> foo $ remove -f foo exit code: 0 R foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 13 state missing, options -f $ rm foo $ remove -f foo exit code: 0 R foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 20 state added, options -A $ echo b > bar $ hg add bar $ remove -A bar not removing bar: file still exists exit code: 1 A bar ./bar ./foo 0 files updated, 0 files merged, 0 files removed, 0 files unresolved 21 state clean, options -A $ remove -A foo not removing foo: file still exists exit code: 1 ? bar ./bar ./foo 0 files updated, 0 files merged, 0 files removed, 0 files unresolved 22 state modified, options -A $ echo b >> foo $ remove -A foo not removing foo: file still exists exit code: 1 M foo ? bar ./bar ./foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 23 state missing, options -A $ rm foo $ remove -A foo exit code: 0 R foo ? bar ./bar 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 30 state added, options -Af $ echo b > bar $ hg add bar $ remove -Af bar exit code: 0 ? bar ./bar ./foo 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm bar 31 state clean, options -Af $ remove -Af foo exit code: 0 R foo ./foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 32 state modified, options -Af $ echo b >> foo $ remove -Af foo exit code: 0 R foo ./foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 33 state missing, options -Af $ rm foo $ remove -Af foo exit code: 0 R foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved test some directory stuff $ mkdir test $ echo a > test/foo $ echo b > test/bar $ hg ci -Am2 adding test/bar adding test/foo dir, options none $ rm test/bar $ remove test removing test/bar (glob) removing test/foo (glob) exit code: 0 R test/bar R test/foo ./foo 2 files updated, 0 files merged, 0 files removed, 0 files unresolved dir, options -f $ rm test/bar $ remove -f test removing test/bar (glob) removing test/foo (glob) exit code: 0 R test/bar R test/foo ./foo 2 files updated, 0 files merged, 0 files removed, 0 files unresolved dir, options -A $ rm test/bar $ remove -A test not removing test/foo: file still exists (glob) removing test/bar (glob) exit code: 1 R test/bar ./foo ./test/foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved dir, options -Af $ rm test/bar $ remove -Af test removing test/bar (glob) removing test/foo (glob) exit code: 0 R test/bar R test/foo ./foo ./test/foo 2 files updated, 0 files merged, 0 files removed, 0 files unresolved test remove dropping empty trees (issue1861) $ mkdir -p issue1861/b/c $ echo x > issue1861/x $ echo y > issue1861/b/c/y $ hg ci -Am add adding issue1861/b/c/y adding issue1861/x $ hg rm issue1861/b removing issue1861/b/c/y (glob) $ hg ci -m remove $ ls issue1861 x test that commit does not crash if the user removes a newly added file $ touch f1 $ hg add f1 $ rm f1 $ hg ci -A -mx removing f1 nothing changed [1] handling of untracked directories and missing files $ mkdir d1 $ echo a > d1/a $ hg rm --after d1 not removing d1: no tracked files [1] $ hg add d1/a $ rm d1/a $ hg rm --after d1 removing d1/a (glob) #if windows $ hg rm --after nosuch nosuch: * (glob) [1] #else $ hg rm --after nosuch nosuch: No such file or directory [1] #endif mercurial-3.7.3/tests/test-symlinks.t0000644000175000017500000001162212676531525017306 0ustar mpmmpm00000000000000#require symlink == tests added in 0.7 == $ hg init test-symlinks-0.7; cd test-symlinks-0.7; $ touch foo; ln -s foo bar; ln -s nonexistent baz import with add and addremove -- symlink walking should _not_ screwup. $ hg add adding bar adding baz adding foo $ hg forget bar baz foo $ hg addremove adding bar adding baz adding foo commit -- the symlink should _not_ appear added to dir state $ hg commit -m 'initial' $ touch bomb again, symlink should _not_ show up on dir state $ hg addremove adding bomb Assert screamed here before, should go by without consequence $ hg commit -m 'is there a bug?' $ cd .. == fifo & ignore == $ hg init test; cd test; $ mkdir dir $ touch a.c dir/a.o dir/b.o test what happens if we want to trick hg $ hg commit -A -m 0 adding a.c adding dir/a.o adding dir/b.o $ echo "relglob:*.o" > .hgignore $ rm a.c $ rm dir/a.o $ rm dir/b.o $ mkdir dir/a.o $ ln -s nonexistent dir/b.o $ mkfifo a.c it should show a.c, dir/a.o and dir/b.o deleted $ hg status M dir/b.o ! a.c ! dir/a.o ? .hgignore $ hg status a.c a.c: unsupported file type (type is fifo) ! a.c $ cd .. == symlinks from outside the tree == test absolute path through symlink outside repo $ p=`pwd` $ hg init x $ ln -s x y $ cd x $ touch f $ hg add f $ hg status "$p"/y/f A f try symlink outside repo to file inside $ ln -s x/f ../z this should fail $ hg status ../z && { echo hg mistakenly exited with status 0; exit 1; } || : abort: ../z not under root '$TESTTMP/x' $ cd .. == cloning symlinks == $ hg init clone; cd clone; try cloning symlink in a subdir 1. commit a symlink $ mkdir -p a/b/c $ cd a/b/c $ ln -s /path/to/symlink/source demo $ cd ../../.. $ hg stat ? a/b/c/demo $ hg commit -A -m 'add symlink in a/b/c subdir' adding a/b/c/demo 2. clone it $ cd .. $ hg clone clone clonedest updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved == symlink and git diffs == git symlink diff $ cd clonedest $ hg diff --git -r null:tip diff --git a/a/b/c/demo b/a/b/c/demo new file mode 120000 --- /dev/null +++ b/a/b/c/demo @@ -0,0 +1,1 @@ +/path/to/symlink/source \ No newline at end of file $ hg export --git tip > ../sl.diff import git symlink diff $ hg rm a/b/c/demo $ hg commit -m'remove link' $ hg import ../sl.diff applying ../sl.diff $ hg diff --git -r 1:tip diff --git a/a/b/c/demo b/a/b/c/demo new file mode 120000 --- /dev/null +++ b/a/b/c/demo @@ -0,0 +1,1 @@ +/path/to/symlink/source \ No newline at end of file == symlinks and addremove == directory moved and symlinked $ mkdir foo $ touch foo/a $ hg ci -Ama adding foo/a $ mv foo bar $ ln -s bar foo $ hg status ! foo/a ? bar/a ? foo now addremove should remove old files $ hg addremove adding bar/a adding foo removing foo/a commit and update back $ hg ci -mb $ hg up '.^' 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg up tip 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ cd .. == root of repository is symlinked == $ hg init root $ ln -s root link $ cd root $ echo foo > foo $ hg status ? foo $ hg status ../link ? foo $ hg add foo $ hg cp foo "$TESTTMP/link/bar" foo has not been committed yet, so no copy data will be stored for bar. $ cd .. $ hg init b $ cd b $ ln -s nothing dangling $ hg commit -m 'commit symlink without adding' dangling abort: dangling: file not tracked! [255] $ hg add dangling $ hg commit -m 'add symlink' $ hg tip -v changeset: 0:cabd88b706fc tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: dangling description: add symlink $ hg manifest --debug 2564acbe54bbbedfbf608479340b359f04597f80 644 @ dangling $ readlink.py dangling dangling -> nothing $ rm dangling $ ln -s void dangling $ hg commit -m 'change symlink' $ readlink.py dangling dangling -> void modifying link $ rm dangling $ ln -s empty dangling $ readlink.py dangling dangling -> empty reverting to rev 0: $ hg revert -r 0 -a reverting dangling $ readlink.py dangling dangling -> nothing backups: $ readlink.py *.orig dangling.orig -> empty $ rm *.orig $ hg up -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved copies $ hg cp -v dangling dangling2 copying dangling to dangling2 $ hg st -Cmard A dangling2 dangling $ readlink.py dangling dangling2 dangling -> void dangling2 -> void Issue995: hg copy -A incorrectly handles symbolic links $ hg up -C 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ mkdir dir $ ln -s dir dirlink $ hg ci -qAm 'add dirlink' $ mkdir newdir $ mv dir newdir/dir $ mv dirlink newdir/dirlink $ hg mv -A dirlink newdir/dirlink $ cd .. mercurial-3.7.3/tests/test-unified-test.t0000644000175000017500000000366512676531525020045 0ustar mpmmpm00000000000000Test that the syntax of "unified tests" is properly processed ============================================================== Simple commands: $ echo foo foo $ printf 'oh no' oh no (no-eol) $ printf 'bar\nbaz\n' | cat bar baz Multi-line command: $ foo() { > echo bar > } $ foo bar Return codes before inline python: $ sh -c 'exit 1' [1] Doctest commands: >>> print 'foo' foo $ echo interleaved interleaved >>> for c in 'xyz': ... print c x y z >>> print >>> foo = 'global name' >>> def func(): ... print foo, 'should be visible in func()' >>> func() global name should be visible in func() >>> print '''multiline ... string''' multiline string Regular expressions: $ echo foobarbaz foobar.* (re) $ echo barbazquux .*quux.* (re) Globs: $ printf '* \\foobarbaz {10}\n' \* \\fo?bar* {10} (glob) Literal match ending in " (re)": $ echo 'foo (re)' foo (re) Windows: \r\n is handled like \n and can be escaped: #if windows $ printf 'crlf\r\ncr\r\tcrlf\r\ncrlf\r\n' crlf cr\r (no-eol) (esc) \tcrlf (esc) crlf\r (esc) #endif Combining esc with other markups - and handling lines ending with \r instead of \n: $ printf 'foo/bar\r' fo?/bar\r (no-eol) (glob) (esc) #if windows $ printf 'foo\\bar\r' foo/bar\r (no-eol) (glob) (esc) #endif $ printf 'foo/bar\rfoo/bar\r' foo.bar\r \(no-eol\) (re) (esc) foo.bar\r \(no-eol\) (re) testing hghave $ hghave true $ hghave false skipped: missing feature: nail clipper [1] $ hghave no-true skipped: system supports yak shaving [1] $ hghave no-false Conditional sections based on hghave: #if true $ echo tested tested #else $ echo skipped #endif #if false $ echo skipped #else $ echo tested tested #endif #if no-false $ echo tested tested #else $ echo skipped #endif #if no-true $ echo skipped #else $ echo tested tested #endif Exit code: $ (exit 1) [1] mercurial-3.7.3/tests/test-double-merge.t0000644000175000017500000000337212676531525020007 0ustar mpmmpm00000000000000 $ hg init repo $ cd repo $ echo line 1 > foo $ hg ci -qAm 'add foo' copy foo to bar and change both files $ hg cp foo bar $ echo line 2-1 >> foo $ echo line 2-2 >> bar $ hg ci -m 'cp foo bar; change both' in another branch, change foo in a way that doesn't conflict with the other changes $ hg up -qC 0 $ echo line 0 > foo $ hg cat foo >> foo $ hg ci -m 'change foo' created new head we get conflicts that shouldn't be there $ hg merge -P changeset: 1:484bf6903104 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: cp foo bar; change both $ hg merge --debug searching for copies back to rev 1 unmatched files in other: bar all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'foo' -> dst: 'bar' * checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: e6dc8efe11cc, local: 6a0df1dad128+, remote: 484bf6903104 preserving foo for resolve of bar preserving foo for resolve of foo bar: remote copied from foo -> m (premerge) picked tool ':merge' for bar (binary False symlink False changedelete False) merging foo and bar to bar my bar@6a0df1dad128+ other bar@484bf6903104 ancestor foo@e6dc8efe11cc premerge successful foo: versions differ -> m (premerge) picked tool ':merge' for foo (binary False symlink False changedelete False) merging foo my foo@6a0df1dad128+ other foo@484bf6903104 ancestor foo@e6dc8efe11cc premerge successful 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) contents of foo $ cat foo line 0 line 1 line 2-1 contents of bar $ cat bar line 0 line 1 line 2-2 $ cd .. mercurial-3.7.3/tests/test-minirst.py0000644000175000017500000001210412676531525017303 0ustar mpmmpm00000000000000from pprint import pprint from mercurial import minirst def debugformat(text, form, **kwargs): if form == 'html': print "html format:" out = minirst.format(text, style=form, **kwargs) else: print "%d column format:" % form out = minirst.format(text, width=form, **kwargs) print "-" * 70 if type(out) == tuple: print out[0][:-1] print "-" * 70 pprint(out[1]) else: print out[:-1] print "-" * 70 print def debugformats(title, text, **kwargs): print "== %s ==" % title debugformat(text, 60, **kwargs) debugformat(text, 30, **kwargs) debugformat(text, 'html', **kwargs) paragraphs = """ This is some text in the first paragraph. A small indented paragraph. It is followed by some lines containing random whitespace. \n \n \nThe third and final paragraph. """ debugformats('paragraphs', paragraphs) definitions = """ A Term Definition. The indented lines make up the definition. Another Term Another definition. The final line in the definition determines the indentation, so this will be indented with four spaces. A Nested/Indented Term Definition. """ debugformats('definitions', definitions) literals = r""" The fully minimized form is the most convenient form:: Hello literal world In the partially minimized form a paragraph simply ends with space-double-colon. :: //////////////////////////////////////// long un-wrapped line in a literal block \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\ :: This literal block is started with '::', the so-called expanded form. The paragraph with '::' disappears in the final output. """ debugformats('literals', literals) lists = """ - This is the first list item. Second paragraph in the first list item. - List items need not be separated by a blank line. - And will be rendered without one in any case. We can have indented lists: - This is an indented list item - Another indented list item:: - A literal block in the middle of an indented list. (The above is not a list item since we are in the literal block.) :: Literal block with no indentation (apart from the two spaces added to all literal blocks). 1. This is an enumerated list (first item). 2. Continuing with the second item. (1) foo (2) bar 1) Another 2) List Line blocks are also a form of list: | This is the first line. The line continues here. | This is the second line. """ debugformats('lists', lists) options = """ There is support for simple option lists, but only with long options: -X, --exclude filter an option with a short and long option with an argument -I, --include an option with both a short option and a long option --all Output all. --both Output both (this description is quite long). --long Output all day long. --par This option has two paragraphs in its description. This is the first. This is the second. Blank lines may be omitted between options (as above) or left in (as here). The next paragraph looks like an option list, but lacks the two-space marker after the option. It is treated as a normal paragraph: --foo bar baz """ debugformats('options', options) fields = """ :a: First item. :ab: Second item. Indentation and wrapping is handled automatically. Next list: :small: The larger key below triggers full indentation here. :much too large: This key is big enough to get its own line. """ debugformats('fields', fields) containers = """ Normal output. .. container:: debug Initial debug output. .. container:: verbose Verbose output. .. container:: debug Debug output. """ debugformats('containers (normal)', containers) debugformats('containers (verbose)', containers, keep=['verbose']) debugformats('containers (debug)', containers, keep=['debug']) debugformats('containers (verbose debug)', containers, keep=['verbose', 'debug']) roles = """Please see :hg:`add`.""" debugformats('roles', roles) sections = """ Title ===== Section ------- Subsection '''''''''' Markup: ``foo`` and :hg:`help` ------------------------------ """ debugformats('sections', sections) admonitions = """ .. note:: This is a note - Bullet 1 - Bullet 2 .. warning:: This is a warning Second input line of warning .. danger:: This is danger """ debugformats('admonitions', admonitions) comments = """ Some text. .. A comment .. An indented comment Some indented text. .. Empty comment above """ debugformats('comments', comments) data = [['a', 'b', 'c'], ['1', '2', '3'], ['foo', 'bar', 'baz this list is very very very long man']] rst = minirst.maketable(data, 2, True) table = ''.join(rst) print table debugformats('table', table) data = [['s', 'long', 'line\ngoes on here'], ['', 'xy', 'tried to fix here\n by indenting']] rst = minirst.maketable(data, 1, False) table = ''.join(rst) print table debugformats('table+nl', table) mercurial-3.7.3/tests/test-merge-closedheads.t0000644000175000017500000000436412676531525021015 0ustar mpmmpm00000000000000 $ hgcommit() { > hg commit -u user "$@" > } $ hg init clhead $ cd clhead $ touch foo && hg add && hgcommit -m 'foo' adding foo $ touch bar && hg add && hgcommit -m 'bar' adding bar $ touch baz && hg add && hgcommit -m 'baz' adding baz $ echo "flub" > foo $ hgcommit -m "flub" $ echo "nub" > foo $ hgcommit -m "nub" $ hg up -C 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "c1" > c1 $ hg add c1 $ hgcommit -m "c1" created new head $ echo "c2" > c1 $ hgcommit -m "c2" $ hg up -C 2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo "d1" > d1 $ hg add d1 $ hgcommit -m "d1" created new head $ echo "d2" > d1 $ hgcommit -m "d2" $ hg tag -l good fail with three heads $ hg up -C good 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge abort: branch 'default' has 3 heads - please merge with an explicit rev (run 'hg heads .' to see heads) [255] close one of the heads $ hg up -C 6 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hgcommit -m 'close this head' --close-branch succeed with two open heads $ hg up -C good 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg up -C good 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hgcommit -m 'merged heads' hg update -C 8 $ hg update -C 8 1 files updated, 0 files merged, 0 files removed, 0 files unresolved hg branch some-branch $ hg branch some-branch marked working directory as branch some-branch (branches are permanent and global, did you want a bookmark?) hg commit $ hgcommit -m 'started some-branch' hg commit --close-branch $ hgcommit --close-branch -m 'closed some-branch' hg update default $ hg update default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved hg merge some-branch $ hg merge some-branch 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) hg commit (no reopening of some-branch) $ hgcommit -m 'merge with closed branch' $ cd .. mercurial-3.7.3/tests/test-bisect2.t0000644000175000017500000004666612676531525017010 0ustar mpmmpm00000000000000# The tests in test-bisect are done on a linear history. Here the # following repository history is used for testing: # # 17 # | # 18 16 # \ / # 15 # / \ # / \ # 10 13 # / \ | # / \ | 14 # 7 6 9 12 / # \ / \ | |/ # 4 \ | 11 # \ \ | / # 3 5 | / # \ / |/ # 2 8 # \ / # 1 # | # 0 init $ hg init committing changes $ echo > a $ echo '0' >> a $ hg add a $ hg ci -m "0" -d "0 0" $ echo '1' >> a $ hg ci -m "1" -d "1 0" $ echo '2' >> a $ hg ci -m "2" -d "2 0" $ echo '3' >> a $ hg ci -m "3" -d "3 0" $ echo '4' >> a $ hg ci -m "4" -d "4 0" create branch $ hg up -r 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo '5' >> b $ hg add b $ hg ci -m "5" -d "5 0" created new head merge $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m "merge 4,5" -d "6 0" create branch $ hg up -r 4 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo '7' > c $ hg add c $ hg ci -m "7" -d "7 0" created new head create branch $ hg up -r 1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo '8' > d $ hg add d $ hg ci -m "8" -d "8 0" created new head $ echo '9' >> d $ hg ci -m "9" -d "9 0" merge $ hg merge -r 6 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m "merge 6,9" -d "10 0" create branch $ hg up -r 8 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo '11' > e $ hg add e $ hg ci -m "11" -d "11 0" created new head $ echo '12' >> e $ hg ci -m "12" -d "12 0" $ echo '13' >> e $ hg ci -m "13" -d "13 0" create branch $ hg up -r 11 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo '14' > f $ hg add f $ hg ci -m "14" -d "14 0" created new head merge $ hg up -r 13 -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge -r 10 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m "merge 10,13" -d "15 0" $ echo '16' >> e $ hg ci -m "16" -d "16 0" $ echo '17' >> e $ hg ci -m "17" -d "17 0" create branch $ hg up -r 15 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo '18' >> e $ hg ci -m "18" -d "18 0" created new head log $ hg log changeset: 18:d42e18c7bc9b tag: tip parent: 15:857b178a7cf3 user: test date: Thu Jan 01 00:00:18 1970 +0000 summary: 18 changeset: 17:228c06deef46 user: test date: Thu Jan 01 00:00:17 1970 +0000 summary: 17 changeset: 16:609d82a7ebae user: test date: Thu Jan 01 00:00:16 1970 +0000 summary: 16 changeset: 15:857b178a7cf3 parent: 13:b0a32c86eb31 parent: 10:429fcd26f52d user: test date: Thu Jan 01 00:00:15 1970 +0000 summary: merge 10,13 changeset: 14:faa450606157 parent: 11:82ca6f06eccd user: test date: Thu Jan 01 00:00:14 1970 +0000 summary: 14 changeset: 13:b0a32c86eb31 user: test date: Thu Jan 01 00:00:13 1970 +0000 summary: 13 changeset: 12:9f259202bbe7 user: test date: Thu Jan 01 00:00:12 1970 +0000 summary: 12 changeset: 11:82ca6f06eccd parent: 8:dab8161ac8fc user: test date: Thu Jan 01 00:00:11 1970 +0000 summary: 11 changeset: 10:429fcd26f52d parent: 9:3c77083deb4a parent: 6:a214d5d3811a user: test date: Thu Jan 01 00:00:10 1970 +0000 summary: merge 6,9 changeset: 9:3c77083deb4a user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: 9 changeset: 8:dab8161ac8fc parent: 1:4ca5088da217 user: test date: Thu Jan 01 00:00:08 1970 +0000 summary: 8 changeset: 7:50c76098bbf2 parent: 4:5c668c22234f user: test date: Thu Jan 01 00:00:07 1970 +0000 summary: 7 changeset: 6:a214d5d3811a parent: 5:385a529b6670 parent: 4:5c668c22234f user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: merge 4,5 changeset: 5:385a529b6670 parent: 2:051e12f87bf1 user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: 5 changeset: 4:5c668c22234f user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: 4 changeset: 3:0950834f0a9c user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: 3 changeset: 2:051e12f87bf1 user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: 2 changeset: 1:4ca5088da217 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: 1 changeset: 0:33b1f9bc8bc5 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 hg up -C $ hg up -C 0 files updated, 0 files merged, 0 files removed, 0 files unresolved complex bisect test 1 # first bad rev is 9 $ hg bisect -r $ hg bisect -g 0 $ hg bisect -b 17 # -> update to rev 6 Testing changeset 6:a214d5d3811a (15 changesets remaining, ~3 tests) 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 17:228c06deef46 $ hg log -q -r 'bisect(untested)' 1:4ca5088da217 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 8:dab8161ac8fc 9:3c77083deb4a 10:429fcd26f52d 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 15:857b178a7cf3 16:609d82a7ebae $ hg log -q -r 'bisect(ignored)' $ hg bisect -g # -> update to rev 13 Testing changeset 13:b0a32c86eb31 (9 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 10 Testing changeset 10:429fcd26f52d (9 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -b # -> update to rev 8 Testing changeset 8:dab8161ac8fc (3 changesets remaining, ~1 tests) 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -g # -> update to rev 9 Testing changeset 9:3c77083deb4a (2 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b The first bad revision is: changeset: 9:3c77083deb4a user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: 9 $ hg log -q -r 'bisect(range)' 0:33b1f9bc8bc5 1:4ca5088da217 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 8:dab8161ac8fc 9:3c77083deb4a 10:429fcd26f52d 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 15:857b178a7cf3 16:609d82a7ebae 17:228c06deef46 $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 1:4ca5088da217 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 8:dab8161ac8fc 9:3c77083deb4a 10:429fcd26f52d 13:b0a32c86eb31 15:857b178a7cf3 16:609d82a7ebae 17:228c06deef46 18:d42e18c7bc9b $ hg log -q -r 'bisect(untested)' 11:82ca6f06eccd 12:9f259202bbe7 $ hg log -q -r 'bisect(goods)' 0:33b1f9bc8bc5 1:4ca5088da217 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 8:dab8161ac8fc $ hg log -q -r 'bisect(bads)' 9:3c77083deb4a 10:429fcd26f52d 15:857b178a7cf3 16:609d82a7ebae 17:228c06deef46 18:d42e18c7bc9b complex bisect test 2 # first good rev is 13 $ hg bisect -r $ hg bisect -g 18 $ hg bisect -b 1 # -> update to rev 6 Testing changeset 6:a214d5d3811a (13 changesets remaining, ~3 tests) 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 10 Testing changeset 10:429fcd26f52d (13 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 1:4ca5088da217 6:a214d5d3811a 18:d42e18c7bc9b $ hg bisect -b # -> update to rev 12 Testing changeset 12:9f259202bbe7 (5 changesets remaining, ~2 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 1:4ca5088da217 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 8:dab8161ac8fc 9:3c77083deb4a 10:429fcd26f52d 18:d42e18c7bc9b $ hg log -q -r 'bisect(untested)' 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 15:857b178a7cf3 $ hg bisect -b # -> update to rev 13 Testing changeset 13:b0a32c86eb31 (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g The first good revision is: changeset: 13:b0a32c86eb31 user: test date: Thu Jan 01 00:00:13 1970 +0000 summary: 13 $ hg log -q -r 'bisect(range)' 1:4ca5088da217 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 8:dab8161ac8fc 9:3c77083deb4a 10:429fcd26f52d 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 15:857b178a7cf3 18:d42e18c7bc9b complex bisect test 3 first bad rev is 15 10,9,13 are skipped an might be the first bad revisions as well $ hg bisect -r $ hg bisect -g 1 $ hg bisect -b 16 # -> update to rev 6 Testing changeset 6:a214d5d3811a (13 changesets remaining, ~3 tests) 2 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 1:4ca5088da217 16:609d82a7ebae 17:228c06deef46 $ hg bisect -g # -> update to rev 13 Testing changeset 13:b0a32c86eb31 (8 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 10 Testing changeset 10:429fcd26f52d (8 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 12 Testing changeset 12:9f259202bbe7 (8 changesets remaining, ~3 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 1:4ca5088da217 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 10:429fcd26f52d 13:b0a32c86eb31 16:609d82a7ebae 17:228c06deef46 $ hg bisect -g # -> update to rev 9 Testing changeset 9:3c77083deb4a (5 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -s # -> update to rev 15 Testing changeset 15:857b178a7cf3 (5 changesets remaining, ~2 tests) 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(ignored)' $ hg bisect -b Due to skipped revisions, the first bad revision could be any of: changeset: 9:3c77083deb4a user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: 9 changeset: 10:429fcd26f52d parent: 9:3c77083deb4a parent: 6:a214d5d3811a user: test date: Thu Jan 01 00:00:10 1970 +0000 summary: merge 6,9 changeset: 13:b0a32c86eb31 user: test date: Thu Jan 01 00:00:13 1970 +0000 summary: 13 changeset: 15:857b178a7cf3 parent: 13:b0a32c86eb31 parent: 10:429fcd26f52d user: test date: Thu Jan 01 00:00:15 1970 +0000 summary: merge 10,13 $ hg log -q -r 'bisect(range)' 1:4ca5088da217 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 8:dab8161ac8fc 9:3c77083deb4a 10:429fcd26f52d 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 15:857b178a7cf3 16:609d82a7ebae $ hg log -q -r 'bisect(ignored)' complex bisect test 4 first good revision is 17 15,16 are skipped an might be the first good revisions as well $ hg bisect -r $ hg bisect -g 17 $ hg bisect -b 8 # -> update to rev 10 Testing changeset 13:b0a32c86eb31 (8 changesets remaining, ~3 tests) 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -b # -> update to rev 13 Testing changeset 10:429fcd26f52d (5 changesets remaining, ~2 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bisect -b # -> update to rev 15 Testing changeset 15:857b178a7cf3 (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 1:4ca5088da217 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 8:dab8161ac8fc 9:3c77083deb4a 10:429fcd26f52d 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 17:228c06deef46 $ hg bisect -s # -> update to rev 16 Testing changeset 16:609d82a7ebae (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 1:4ca5088da217 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 8:dab8161ac8fc 9:3c77083deb4a 10:429fcd26f52d 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 15:857b178a7cf3 17:228c06deef46 $ hg bisect -s Due to skipped revisions, the first good revision could be any of: changeset: 15:857b178a7cf3 parent: 13:b0a32c86eb31 parent: 10:429fcd26f52d user: test date: Thu Jan 01 00:00:15 1970 +0000 summary: merge 10,13 changeset: 16:609d82a7ebae user: test date: Thu Jan 01 00:00:16 1970 +0000 summary: 16 changeset: 17:228c06deef46 user: test date: Thu Jan 01 00:00:17 1970 +0000 summary: 17 $ hg log -q -r 'bisect(range)' 8:dab8161ac8fc 9:3c77083deb4a 10:429fcd26f52d 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 15:857b178a7cf3 16:609d82a7ebae 17:228c06deef46 $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 1:4ca5088da217 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 8:dab8161ac8fc 9:3c77083deb4a 10:429fcd26f52d 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 15:857b178a7cf3 16:609d82a7ebae 17:228c06deef46 test unrelated revs: $ hg bisect --reset $ hg bisect -b 7 $ hg bisect -g 14 abort: starting revisions are not directly related [255] $ hg log -q -r 'bisect(range)' $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 1:4ca5088da217 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 7:50c76098bbf2 14:faa450606157 $ hg bisect --reset end at merge: 17 bad, 11 good (but 9 is first bad) $ hg bisect -r $ hg bisect -b 17 $ hg bisect -g 11 Testing changeset 13:b0a32c86eb31 (5 changesets remaining, ~2 tests) 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -q -r 'bisect(ignored)' 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 9:3c77083deb4a 10:429fcd26f52d $ hg bisect -g Testing changeset 15:857b178a7cf3 (3 changesets remaining, ~1 tests) 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b The first bad revision is: changeset: 15:857b178a7cf3 parent: 13:b0a32c86eb31 parent: 10:429fcd26f52d user: test date: Thu Jan 01 00:00:15 1970 +0000 summary: merge 10,13 Not all ancestors of this changeset have been checked. Use bisect --extend to continue the bisection from the common ancestor, dab8161ac8fc. $ hg log -q -r 'bisect(range)' 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 15:857b178a7cf3 16:609d82a7ebae 17:228c06deef46 $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 1:4ca5088da217 8:dab8161ac8fc 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 15:857b178a7cf3 16:609d82a7ebae 17:228c06deef46 18:d42e18c7bc9b $ hg log -q -r 'bisect(untested)' $ hg log -q -r 'bisect(ignored)' 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 9:3c77083deb4a 10:429fcd26f52d $ hg bisect --extend Extending search to changeset 8:dab8161ac8fc 2 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg log -q -r 'bisect(untested)' $ hg log -q -r 'bisect(ignored)' 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a 9:3c77083deb4a 10:429fcd26f52d $ hg bisect -g # dab8161ac8fc Testing changeset 9:3c77083deb4a (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(untested)' 9:3c77083deb4a 10:429fcd26f52d $ hg log -q -r 'bisect(ignored)' 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a $ hg log -q -r 'bisect(goods)' 0:33b1f9bc8bc5 1:4ca5088da217 8:dab8161ac8fc 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 $ hg log -q -r 'bisect(bads)' 15:857b178a7cf3 16:609d82a7ebae 17:228c06deef46 18:d42e18c7bc9b $ hg bisect -b The first bad revision is: changeset: 9:3c77083deb4a user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: 9 $ hg log -q -r 'bisect(range)' 8:dab8161ac8fc 9:3c77083deb4a 10:429fcd26f52d 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 15:857b178a7cf3 16:609d82a7ebae 17:228c06deef46 $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 1:4ca5088da217 8:dab8161ac8fc 9:3c77083deb4a 10:429fcd26f52d 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 15:857b178a7cf3 16:609d82a7ebae 17:228c06deef46 18:d42e18c7bc9b $ hg log -q -r 'bisect(untested)' $ hg log -q -r 'bisect(ignored)' 2:051e12f87bf1 3:0950834f0a9c 4:5c668c22234f 5:385a529b6670 6:a214d5d3811a $ hg log -q -r 'bisect(goods)' 0:33b1f9bc8bc5 1:4ca5088da217 8:dab8161ac8fc 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 $ hg log -q -r 'bisect(bads)' 9:3c77083deb4a 10:429fcd26f52d 15:857b178a7cf3 16:609d82a7ebae 17:228c06deef46 18:d42e18c7bc9b user adds irrelevant but consistent information (here: -g 2) to bisect state $ hg bisect -r $ hg bisect -b 13 $ hg bisect -g 8 Testing changeset 11:82ca6f06eccd (3 changesets remaining, ~1 tests) 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(untested)' 11:82ca6f06eccd 12:9f259202bbe7 $ hg bisect -g 2 Testing changeset 11:82ca6f06eccd (3 changesets remaining, ~1 tests) 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -q -r 'bisect(untested)' 11:82ca6f06eccd 12:9f259202bbe7 $ hg bisect -b The first bad revision is: changeset: 11:82ca6f06eccd parent: 8:dab8161ac8fc user: test date: Thu Jan 01 00:00:11 1970 +0000 summary: 11 $ hg log -q -r 'bisect(range)' 8:dab8161ac8fc 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 $ hg log -q -r 'bisect(pruned)' 0:33b1f9bc8bc5 1:4ca5088da217 2:051e12f87bf1 8:dab8161ac8fc 11:82ca6f06eccd 12:9f259202bbe7 13:b0a32c86eb31 14:faa450606157 15:857b178a7cf3 16:609d82a7ebae 17:228c06deef46 18:d42e18c7bc9b $ hg log -q -r 'bisect(untested)' mercurial-3.7.3/tests/test-issue1306.t0000644000175000017500000000377312676531525017107 0ustar mpmmpm00000000000000https://bz.mercurial-scm.org/1306 Initialize remote repo with branches: $ hg init remote $ cd remote $ echo a > a $ hg ci -Ama adding a $ hg branch br marked working directory as branch br (branches are permanent and global, did you want a bookmark?) $ hg ci -Amb $ echo c > c $ hg ci -Amc adding c $ hg log changeset: 2:ae3d9c30ec50 branch: br tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: c changeset: 1:3f7f930ca414 branch: br user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ cd .. Try cloning -r branch: $ hg clone -rbr remote local1 adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 2 files updating to branch br 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R local1 parents changeset: 2:ae3d9c30ec50 branch: br tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: c Try cloning -rother clone#branch: $ hg clone -r0 remote#br local2 adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 2 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R local2 parents changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a Try cloning -r1 clone#branch: $ hg clone -r1 remote#br local3 adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 2 files updating to branch br 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R local3 parents changeset: 1:3f7f930ca414 branch: br user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b mercurial-3.7.3/tests/test-revlog.t0000644000175000017500000000116012676531525016727 0ustar mpmmpm00000000000000Test for CVE-2016-3630 $ hg init >>> open("a.i", "w").write( ... """eJxjYGZgZIAAYQYGxhgom+k/FMx8YKx9ZUaKSOyqo4cnuKb8mbqHV5cBCVTMWb1Cwqkhe4Gsg9AD ... Joa3dYtcYYYBAQ8Qr4OqZAYRICPTSr5WKd/42rV36d+8/VmrNpv7NP1jQAXrQE4BqQUARngwVA==""" ... .decode("base64").decode("zlib")) $ hg debugindex a.i rev offset length delta linkrev nodeid p1 p2 0 0 19 -1 2 99e0332bd498 000000000000 000000000000 1 19 12 0 3 6674f57a23d8 99e0332bd498 000000000000 $ hg debugdata a.i 1 2>&1 | grep decoded mpatch.mpatchError: patch cannot be decoded mercurial-3.7.3/tests/test-dirstate-race.t0000644000175000017500000000054112676531525020162 0ustar mpmmpm00000000000000 $ hg init $ echo a > a $ hg add a $ hg commit -m test Do we ever miss a sub-second change?: $ for i in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20; do > hg co -qC 0 > echo b > a > hg st > done M a M a M a M a M a M a M a M a M a M a M a M a M a M a M a M a M a M a M a M a mercurial-3.7.3/tests/test-rename-dir-merge.t0000644000175000017500000001503212676531525020554 0ustar mpmmpm00000000000000 $ hg init t $ cd t $ mkdir a $ echo foo > a/a $ echo bar > a/b $ hg ci -Am "0" adding a/a adding a/b $ hg co -C 0 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg mv a b moving a/a to b/a (glob) moving a/b to b/b (glob) $ hg ci -m "1 mv a/ b/" $ hg co -C 0 2 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo baz > a/c $ echo quux > a/d $ hg add a/c $ hg ci -m "2 add a/c" created new head $ hg merge --debug 1 searching for copies back to rev 1 unmatched files in local: a/c unmatched files in other: b/a b/b all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a/a' -> dst: 'b/a' src: 'a/b' -> dst: 'b/b' checking for directory renames discovered dir src: 'a/' -> dst: 'b/' pending file src: 'a/c' -> dst: 'b/c' resolving manifests branchmerge: True, force: False, partial: False ancestor: f9b20c0d4c51, local: ce36d17b18fb+, remote: 397f8b00a740 a/a: other deleted -> r removing a/a a/b: other deleted -> r removing a/b b/a: remote created -> g getting b/a b/b: remote created -> g getting b/b b/c: remote directory rename - move from a/c -> dm moving a/c to b/c (glob) 3 files updated, 0 files merged, 2 files removed, 0 files unresolved (branch merge, don't forget to commit) $ echo a/* b/* a/d b/a b/b b/c $ hg st -C M b/a M b/b A b/c a/c R a/a R a/b R a/c ? a/d $ hg ci -m "3 merge 2+1" $ hg debugrename b/c b/c renamed from a/c:354ae8da6e890359ef49ade27b68bbc361f3ca88 (glob) $ hg co -C 1 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge --debug 2 searching for copies back to rev 1 unmatched files in local: b/a b/b unmatched files in other: a/c all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a/a' -> dst: 'b/a' src: 'a/b' -> dst: 'b/b' checking for directory renames discovered dir src: 'a/' -> dst: 'b/' pending file src: 'a/c' -> dst: 'b/c' resolving manifests branchmerge: True, force: False, partial: False ancestor: f9b20c0d4c51, local: 397f8b00a740+, remote: ce36d17b18fb b/c: local directory rename - get from a/c -> dg getting a/c to b/c 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ echo a/* b/* a/d b/a b/b b/c $ hg st -C A b/c a/c ? a/d $ hg ci -m "4 merge 1+2" created new head $ hg debugrename b/c b/c renamed from a/c:354ae8da6e890359ef49ade27b68bbc361f3ca88 (glob) Local directory rename with conflicting file added in remote source directory and untracked in local target directory. $ hg co -qC 1 $ echo target > b/c $ hg merge 2 b/c: untracked file differs abort: untracked files in working directory differ from files in requested revision [255] $ cat b/c target but it should succeed if the content matches $ hg cat -r 2 a/c > b/c $ hg merge 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg st -C A b/c a/c ? a/d Local directory rename with conflicting file added in remote source directory and committed in local target directory. $ hg co -qC 1 $ echo target > b/c $ hg add b/c $ hg commit -qm 'new file in target directory' $ hg merge 2 merging b/c and a/c to b/c warning: conflicts while merging b/c! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg st -A M b/c a/c ? a/d ? b/c.orig C b/a C b/b $ cat b/c <<<<<<< local: f1c50ca4f127 - test: new file in target directory target ======= baz >>>>>>> other: ce36d17b18fb - test: 2 add a/c $ rm b/c.orig Remote directory rename with conflicting file added in remote target directory and committed in local source directory. $ hg co -qC 2 $ hg st -A ? a/d C a/a C a/b C a/c $ hg merge 5 merging a/c and b/c to b/c warning: conflicts while merging b/c! (edit, then use 'hg resolve --mark') 2 files updated, 0 files merged, 2 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg st -A M b/a M b/b M b/c a/c R a/a R a/b R a/c ? a/d ? b/c.orig $ cat b/c <<<<<<< local: ce36d17b18fb - test: 2 add a/c baz ======= target >>>>>>> other: f1c50ca4f127 - test: new file in target directory Second scenario with two repos: $ cd .. $ hg init r1 $ cd r1 $ mkdir a $ echo foo > a/f $ hg add a adding a/f (glob) $ hg ci -m "a/f == foo" $ cd .. $ hg clone r1 r2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd r2 $ hg mv a b moving a/f to b/f (glob) $ echo foo1 > b/f $ hg ci -m" a -> b, b/f == foo1" $ cd .. $ cd r1 $ mkdir a/aa $ echo bar > a/aa/g $ hg add a/aa adding a/aa/g (glob) $ hg ci -m "a/aa/g" $ hg pull ../r2 pulling from ../r2 searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg merge 2 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg st -C M b/f A b/aa/g a/aa/g R a/aa/g R a/f $ cd .. Test renames to separate directories $ hg init a $ cd a $ mkdir a $ touch a/s $ touch a/t $ hg ci -Am0 adding a/s adding a/t Add more files $ touch a/s2 $ touch a/t2 $ hg ci -Am1 adding a/s2 adding a/t2 Do moves on a branch $ hg up 0 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ mkdir s $ mkdir t $ hg mv a/s s $ hg mv a/t t $ hg ci -Am2 created new head $ hg st --copies --change . A s/s a/s A t/t a/t R a/s R a/t Merge shouldn't move s2, t2 $ hg merge 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg st --copies M a/s2 M a/t2 Try the merge in the other direction. It may or may not be appropriate for status to list copies here. $ hg up -C 1 4 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg merge 2 files updated, 0 files merged, 2 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg st --copies M s/s M t/t R a/s R a/t mercurial-3.7.3/tests/test-mq-qdelete.t0000644000175000017500000000675312676531525017504 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg init a $ cd a $ echo 'base' > base $ hg ci -Ambase -d '1 0' adding base $ hg qnew -d '1 0' pa $ hg qnew -d '1 0' pb $ hg qnew -d '1 0' pc $ hg qdel abort: qdelete requires at least one revision or patch name [255] $ hg qdel pc abort: cannot delete applied patch pc [255] $ hg qpop popping pc now at: pb Delete the same patch twice in one command (issue2427) $ hg qdel pc pc $ hg qseries pa pb $ ls .hg/patches pa pb series status $ hg qpop popping pb now at: pa $ hg qdel -k 1 $ ls .hg/patches pa pb series status $ hg qdel -r pa patch pa finalized without changeset message $ hg qapplied $ hg log --template '{rev} {desc}\n' 1 [mq]: pa 0 base $ hg qnew pd $ hg qnew pe $ hg qnew pf $ hg qdel -r pe abort: cannot delete revision 3 above applied patches [255] $ hg qdel -r qbase:pe patch pd finalized without changeset message patch pe finalized without changeset message $ hg qapplied pf $ hg log --template '{rev} {desc}\n' 4 [mq]: pf 3 [mq]: pe 2 [mq]: pd 1 [mq]: pa 0 base $ cd .. $ hg init b $ cd b $ echo 'base' > base $ hg ci -Ambase -d '1 0' adding base $ hg qfinish abort: no revisions specified [255] $ hg qfinish -a no patches applied $ hg qnew -d '1 0' pa $ hg qnew -d '1 0' pb $ hg qnew pc # XXX fails to apply by /usr/bin/patch if we put a date $ hg qfinish 0 abort: revision 0 is not managed [255] $ hg qfinish pb abort: cannot delete revision 2 above applied patches [255] $ hg qpop popping pc now at: pb $ hg qfinish -a pc abort: unknown revision 'pc'! [255] $ hg qpush applying pc patch pc is empty now at: pc $ hg qfinish qbase:pb patch pa finalized without changeset message patch pb finalized without changeset message $ hg qapplied pc $ hg log --template '{rev} {desc}\n' 3 imported patch pc 2 [mq]: pb 1 [mq]: pa 0 base $ hg qfinish -a pc patch pc finalized without changeset message $ hg qapplied $ hg log --template '{rev} {desc}\n' 3 imported patch pc 2 [mq]: pb 1 [mq]: pa 0 base $ ls .hg/patches series status qdel -k X && hg qimp -e X used to trigger spurious output with versioned queues $ hg init --mq $ hg qimport -r 3 $ hg qpop popping imported_patch_pc patch queue now empty $ hg qdel -k imported_patch_pc $ hg qimp -e imported_patch_pc adding imported_patch_pc to series file $ hg qfinish -a no patches applied resilience to inconsistency: qfinish -a with applied patches not in series $ hg qser imported_patch_pc $ hg qapplied $ hg qpush applying imported_patch_pc patch imported_patch_pc is empty now at: imported_patch_pc $ echo next >> base $ hg qrefresh -d '1 0' $ echo > .hg/patches/series # remove 3.diff from series to confuse mq $ hg qfinish -a revision 47dfa8501675 refers to unknown patches: imported_patch_pc more complex state 'both known and unknown patches $ echo hip >> base $ hg qnew -f -d '1 0' -m 4 4.diff $ echo hop >> base $ hg qnew -f -d '1 0' -m 5 5.diff $ echo > .hg/patches/series # remove 4.diff and 5.diff from series to confuse mq $ echo hup >> base $ hg qnew -f -d '1 0' -m 6 6.diff $ echo pup > base $ hg qfinish -a warning: uncommitted changes in the working directory revision 2b1c98802260 refers to unknown patches: 5.diff revision 33a6861311c0 refers to unknown patches: 4.diff $ cd .. mercurial-3.7.3/tests/test-update-reverse.t0000644000175000017500000000320612676531525020367 0ustar mpmmpm00000000000000 $ hg init $ touch a $ hg add a $ hg commit -m "Added a" $ touch main $ hg add main $ hg commit -m "Added main" $ hg checkout 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved 'main' should be gone: $ ls a $ touch side1 $ hg add side1 $ hg commit -m "Added side1" created new head $ touch side2 $ hg add side2 $ hg commit -m "Added side2" $ hg log changeset: 3:91ebc10ed028 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added side2 changeset: 2:b932d7dbb1e1 parent: 0:c2eda428b523 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added side1 changeset: 1:71a760306caf user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added main changeset: 0:c2eda428b523 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added a $ hg heads changeset: 3:91ebc10ed028 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added side2 changeset: 1:71a760306caf user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added main $ ls a side1 side2 $ hg update --debug -C 1 resolving manifests branchmerge: False, force: True, partial: False ancestor: 91ebc10ed028+, local: 91ebc10ed028+, remote: 71a760306caf side1: other deleted -> r removing side1 side2: other deleted -> r removing side2 main: remote created -> g getting main 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ ls a main mercurial-3.7.3/tests/test-issue1438.t0000644000175000017500000000055712676531525017112 0ustar mpmmpm00000000000000#require symlink https://bz.mercurial-scm.org/1438 $ hg init $ ln -s foo link $ hg add link $ hg ci -mbad link $ hg rm link $ hg ci -mok $ hg diff -g -r 0:1 > bad.patch $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg import --no-commit bad.patch applying bad.patch $ hg status R link ? bad.patch mercurial-3.7.3/tests/test-bundle2-multiple-changegroups.t0000644000175000017500000002350712676531525023311 0ustar mpmmpm00000000000000Create an extension to test bundle2 with multiple changegroups $ cat > bundle2.py < """ > """ > from mercurial import changegroup, exchange > > def _getbundlechangegrouppart(bundler, repo, source, bundlecaps=None, > b2caps=None, heads=None, common=None, > **kwargs): > # Create two changegroups given the common changesets and heads for the > # changegroup part we are being requested. Use the parent of each head > # in 'heads' as intermediate heads for the first changegroup. > intermediates = [repo[r].p1().node() for r in heads] > cg = changegroup.getchangegroup(repo, source, heads=intermediates, > common=common, bundlecaps=bundlecaps) > bundler.newpart('output', data='changegroup1') > bundler.newpart('changegroup', data=cg.getchunks()) > cg = changegroup.getchangegroup(repo, source, heads=heads, > common=common + intermediates, > bundlecaps=bundlecaps) > bundler.newpart('output', data='changegroup2') > bundler.newpart('changegroup', data=cg.getchunks()) > > def _pull(repo, *args, **kwargs): > pullop = _orig_pull(repo, *args, **kwargs) > repo.ui.write('pullop.cgresult is %d\n' % pullop.cgresult) > return pullop > > _orig_pull = exchange.pull > exchange.pull = _pull > exchange.getbundle2partsmapping['changegroup'] = _getbundlechangegrouppart > EOF $ cat >> $HGRCPATH << EOF > [experimental] > bundle2-exp=True > [ui] > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline} > EOF Start with a simple repository with a single commit $ hg init repo $ cd repo $ cat > .hg/hgrc << EOF > [extensions] > bundle2=$TESTTMP/bundle2.py > EOF $ echo A > A $ hg commit -A -m A -q $ cd .. Clone $ hg clone -q repo clone Add two linear commits $ cd repo $ echo B > B $ hg commit -A -m B -q $ echo C > C $ hg commit -A -m C -q $ cd ../clone $ cat >> .hg/hgrc < [hooks] > pretxnchangegroup = sh -c "printenv.py pretxnchangegroup" > changegroup = sh -c "printenv.py changegroup" > incoming = sh -c "printenv.py incoming" > EOF Pull the new commits in the clone $ hg pull pulling from $TESTTMP/repo (glob) searching for changes remote: changegroup1 adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files pretxnchangegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) remote: changegroup2 adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files pretxnchangegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) changegroup hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_NODE_LAST=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) incoming hook: HG_NODE=27547f69f25460a52fff66ad004e58da7ad3fb56 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) changegroup hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_NODE_LAST=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) incoming hook: HG_NODE=f838bfaca5c7226600ebcfd84f3c3c13a28d3757 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) pullop.cgresult is 1 (run 'hg update' to get a working copy) $ hg update 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -G @ 2:f838bfaca5c7 public test C | o 1:27547f69f254 public test B | o 0:4a2df7238c3b public test A Add more changesets with multiple heads to the original repository $ cd ../repo $ echo D > D $ hg commit -A -m D -q $ hg up -r 1 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo E > E $ hg commit -A -m E -q $ echo F > F $ hg commit -A -m F -q $ hg up -r 1 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo G > G $ hg commit -A -m G -q $ hg up -r 3 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo H > H $ hg commit -A -m H -q $ hg log -G @ 7:5cd59d311f65 draft test H | | o 6:1d14c3ce6ac0 draft test G | | | | o 5:7f219660301f draft test F | | | | | o 4:8a5212ebc852 draft test E | |/ o | 3:b3325c91a4d9 draft test D | | o | 2:f838bfaca5c7 draft test C |/ o 1:27547f69f254 draft test B | o 0:4a2df7238c3b draft test A New heads are reported during transfer and properly accounted for in pullop.cgresult $ cd ../clone $ hg pull pulling from $TESTTMP/repo (glob) searching for changes remote: changegroup1 adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) pretxnchangegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) remote: changegroup2 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files (+1 heads) pretxnchangegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) changegroup hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_NODE_LAST=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) incoming hook: HG_NODE=b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) incoming hook: HG_NODE=8a5212ebc8527f9fb821601504794e3eb11a1ed3 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) changegroup hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_NODE_LAST=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) incoming hook: HG_NODE=7f219660301fe4c8a116f714df5e769695cc2b46 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) incoming hook: HG_NODE=1d14c3ce6ac0582d2809220d33e8cd7a696e0156 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) incoming hook: HG_NODE=5cd59d311f6508b8e0ed28a266756c859419c9f1 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) pullop.cgresult is 3 (run 'hg heads' to see heads, 'hg merge' to merge) $ hg log -G o 7:5cd59d311f65 public test H | | o 6:1d14c3ce6ac0 public test G | | | | o 5:7f219660301f public test F | | | | | o 4:8a5212ebc852 public test E | |/ o | 3:b3325c91a4d9 public test D | | @ | 2:f838bfaca5c7 public test C |/ o 1:27547f69f254 public test B | o 0:4a2df7238c3b public test A Removing a head from the original repository by merging it $ cd ../repo $ hg merge -r 6 -q $ hg commit -m Merge $ echo I > I $ hg commit -A -m H -q $ hg log -G @ 9:9d18e5bd9ab0 draft test H | o 8:71bd7b46de72 draft test Merge |\ | o 7:5cd59d311f65 draft test H | | o | 6:1d14c3ce6ac0 draft test G | | | | o 5:7f219660301f draft test F | | | +---o 4:8a5212ebc852 draft test E | | | o 3:b3325c91a4d9 draft test D | | | o 2:f838bfaca5c7 draft test C |/ o 1:27547f69f254 draft test B | o 0:4a2df7238c3b draft test A Removed heads are reported during transfer and properly accounted for in pullop.cgresult $ cd ../clone $ hg pull pulling from $TESTTMP/repo (glob) searching for changes remote: changegroup1 adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 0 files (-1 heads) pretxnchangegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_PENDING=$TESTTMP/clone HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) remote: changegroup2 adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files pretxnchangegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PENDING=$TESTTMP/clone HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) changegroup hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_NODE_LAST=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) incoming hook: HG_NODE=71bd7b46de72e69a32455bf88d04757d542e6cf4 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) changegroup hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_NODE_LAST=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) incoming hook: HG_NODE=9d18e5bd9ab09337802595d49f1dad0c98df4d84 HG_PHASES_MOVED=1 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/repo (glob) pullop.cgresult is -2 (run 'hg update' to get a working copy) $ hg log -G o 9:9d18e5bd9ab0 public test H | o 8:71bd7b46de72 public test Merge |\ | o 7:5cd59d311f65 public test H | | o | 6:1d14c3ce6ac0 public test G | | | | o 5:7f219660301f public test F | | | +---o 4:8a5212ebc852 public test E | | | o 3:b3325c91a4d9 public test D | | | @ 2:f838bfaca5c7 public test C |/ o 1:27547f69f254 public test B | o 0:4a2df7238c3b public test A mercurial-3.7.3/tests/test-clone-pull-corruption.t0000644000175000017500000000231612676531525021711 0ustar mpmmpm00000000000000Corrupt an hg repo with a pull started during an aborted commit Create two repos, so that one of them can pull from the other one. $ hg init source $ cd source $ touch foo $ hg add foo $ hg ci -m 'add foo' $ hg clone . ../corrupted updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo >> foo $ hg ci -m 'change foo' Add a hook to wait 5 seconds and then abort the commit $ cd ../corrupted $ echo "[hooks]" >> .hg/hgrc $ echo 'pretxncommit = sh -c "sleep 5; exit 1"' >> .hg/hgrc start a commit... $ touch bar $ hg add bar $ hg ci -m 'add bar' & ... and start a pull while the commit is still running $ sleep 1 $ hg pull ../source 2>/dev/null pulling from ../source transaction abort! rollback completed abort: pretxncommit hook exited with status 1 searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) see what happened $ wait $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions $ cd .. mercurial-3.7.3/tests/cgienv0000644000175000017500000000332712676531525015474 0ustar mpmmpm00000000000000DOCUMENT_ROOT="/var/www/hg"; export DOCUMENT_ROOT GATEWAY_INTERFACE="CGI/1.1"; export GATEWAY_INTERFACE HTTP_ACCEPT="text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; export HTTP_ACCEPT HTTP_ACCEPT_CHARSET="ISO-8859-1,utf-8;q=0.7,*;q=0.7"; export HTTP_ACCEPT_CHARSET HTTP_ACCEPT_ENCODING="gzip,deflate"; export HTTP_ACCEPT_ENCODING HTTP_ACCEPT_LANGUAGE="en-us,en;q=0.5"; export HTTP_ACCEPT_LANGUAGE HTTP_CACHE_CONTROL="max-age=0"; export HTTP_CACHE_CONTROL HTTP_CONNECTION="keep-alive"; export HTTP_CONNECTION HTTP_HOST="hg.omnifarious.org"; export HTTP_HOST HTTP_KEEP_ALIVE="300"; export HTTP_KEEP_ALIVE HTTP_USER_AGENT="Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.8.0.4) Gecko/20060608 Ubuntu/dapper-security Firefox/1.5.0.4"; export HTTP_USER_AGENT PATH_INFO="/"; export PATH_INFO PATH_TRANSLATED="/var/www/hg/index.html"; export PATH_TRANSLATED QUERY_STRING=""; export QUERY_STRING REMOTE_ADDR="127.0.0.2"; export REMOTE_ADDR REMOTE_PORT="44703"; export REMOTE_PORT REQUEST_METHOD="GET"; export REQUEST_METHOD REQUEST_URI="/test/"; export REQUEST_URI SCRIPT_FILENAME="/home/hopper/hg_public/test.cgi"; export SCRIPT_FILENAME SCRIPT_NAME="/test"; export SCRIPT_NAME SCRIPT_URI="http://hg.omnifarious.org/test/"; export SCRIPT_URI SCRIPT_URL="/test/"; export SCRIPT_URL SERVER_ADDR="127.0.0.1"; export SERVER_ADDR SERVER_ADMIN="eric@localhost"; export SERVER_ADMIN SERVER_NAME="hg.omnifarious.org"; export SERVER_NAME SERVER_PORT="80"; export SERVER_PORT SERVER_PROTOCOL="HTTP/1.1"; export SERVER_PROTOCOL SERVER_SIGNATURE="
                        Apache/2.0.53 (Fedora) Server at hg.omnifarious.org Port 80
                        "; export SERVER_SIGNATURE SERVER_SOFTWARE="Apache/2.0.53 (Fedora)"; export SERVER_SOFTWARE mercurial-3.7.3/tests/test-known.t0000644000175000017500000000242412676531525016571 0ustar mpmmpm00000000000000#require killdaemons = Test the known() protocol function = Create a test repository: $ hg init repo $ cd repo $ touch a ; hg add a ; hg ci -ma $ touch b ; hg add b ; hg ci -mb $ touch c ; hg add c ; hg ci -mc $ hg log --template '{node}\n' 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 3903775176ed42b1458a6281db4a0ccf4d9f287a $ cd .. Test locally: $ hg debugknown repo 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 3903775176ed42b1458a6281db4a0ccf4d9f287a 111 $ hg debugknown repo 000a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 0003775176ed42b1458a6281db4a0ccf4d9f287a 010 $ hg debugknown repo Test via HTTP: $ hg serve -R repo -p $HGPORT -d --pid-file=hg.pid -E error.log -A access.log $ cat hg.pid >> $DAEMON_PIDS $ hg debugknown http://localhost:$HGPORT/ 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 3903775176ed42b1458a6281db4a0ccf4d9f287a 111 $ hg debugknown http://localhost:$HGPORT/ 000a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 0003775176ed42b1458a6281db4a0ccf4d9f287a 010 $ hg debugknown http://localhost:$HGPORT/ $ cat error.log $ killdaemons.py mercurial-3.7.3/tests/readlink.py0000755000175000017500000000036512676531525016443 0ustar mpmmpm00000000000000#!/usr/bin/env python import errno, os, sys for f in sys.argv[1:]: try: print f, '->', os.readlink(f) except OSError as err: if err.errno != errno.EINVAL: raise print f, 'not a symlink' sys.exit(0) mercurial-3.7.3/tests/test-init.t0000644000175000017500000001434612676531525016406 0ustar mpmmpm00000000000000This test tries to exercise the ssh functionality with a dummy script (enable general delta early) $ cat << EOF >> $HGRCPATH > [format] > usegeneraldelta=yes > EOF $ checknewrepo() > { > name=$1 > if [ -d "$name"/.hg/store ]; then > echo store created > fi > if [ -f "$name"/.hg/00changelog.i ]; then > echo 00changelog.i created > fi > cat "$name"/.hg/requires > } creating 'local' $ hg init local $ checknewrepo local store created 00changelog.i created dotencode fncache generaldelta revlogv1 store $ echo this > local/foo $ hg ci --cwd local -A -m "init" adding foo test custom revlog chunk cache sizes $ hg --config format.chunkcachesize=0 log -R local -pv abort: revlog chunk cache size 0 is not greater than 0! [255] $ hg --config format.chunkcachesize=1023 log -R local -pv abort: revlog chunk cache size 1023 is not a power of 2! [255] $ hg --config format.chunkcachesize=1024 log -R local -pv changeset: 0:08b9e9f63b32 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: foo description: init diff -r 000000000000 -r 08b9e9f63b32 foo --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/foo Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +this creating repo with format.usestore=false $ hg --config format.usestore=false init old $ checknewrepo old generaldelta revlogv1 creating repo with format.usefncache=false $ hg --config format.usefncache=false init old2 $ checknewrepo old2 store created 00changelog.i created generaldelta revlogv1 store creating repo with format.dotencode=false $ hg --config format.dotencode=false init old3 $ checknewrepo old3 store created 00changelog.i created fncache generaldelta revlogv1 store creating repo with format.dotencode=false $ hg --config format.generaldelta=false --config format.usegeneraldelta=false init old4 $ checknewrepo old4 store created 00changelog.i created dotencode fncache revlogv1 store test failure $ hg init local abort: repository local already exists! [255] init+push to remote2 $ hg init -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote2 $ hg incoming -R remote2 local comparing with local changeset: 0:08b9e9f63b32 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: init $ hg push -R local -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote2 pushing to ssh://user@dummy/remote2 searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files clone to remote1 $ hg clone -e "python \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remote1 searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files The largefiles extension doesn't crash $ hg clone -e "python \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remotelf --config extensions.largefiles= searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files init to existing repo $ hg init -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote1 abort: repository remote1 already exists! abort: could not create remote repo! [255] clone to existing repo $ hg clone -e "python \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remote1 abort: repository remote1 already exists! abort: could not create remote repo! [255] output of dummyssh $ cat dummylog Got arguments 1:user@dummy 2:hg init remote2 Got arguments 1:user@dummy 2:hg -R remote2 serve --stdio Got arguments 1:user@dummy 2:hg -R remote2 serve --stdio Got arguments 1:user@dummy 2:hg init remote1 Got arguments 1:user@dummy 2:hg -R remote1 serve --stdio Got arguments 1:user@dummy 2:hg init remotelf Got arguments 1:user@dummy 2:hg -R remotelf serve --stdio Got arguments 1:user@dummy 2:hg init remote1 Got arguments 1:user@dummy 2:hg init remote1 comparing repositories $ hg tip -q -R local 0:08b9e9f63b32 $ hg tip -q -R remote1 0:08b9e9f63b32 $ hg tip -q -R remote2 0:08b9e9f63b32 check names for repositories (clashes with URL schemes, special chars) $ for i in bundle file hg http https old-http ssh static-http "with space"; do > printf "hg init \"$i\"... " > hg init "$i" > test -d "$i" -a -d "$i/.hg" && echo "ok" || echo "failed" > done hg init "bundle"... ok hg init "file"... ok hg init "hg"... ok hg init "http"... ok hg init "https"... ok hg init "old-http"... ok hg init "ssh"... ok hg init "static-http"... ok hg init "with space"... ok #if eol-in-paths /* " " is not a valid name for a directory on Windows */ $ hg init " " $ test -d " " $ test -d " /.hg" #endif creating 'local/sub/repo' $ hg init local/sub/repo $ checknewrepo local/sub/repo store created 00changelog.i created dotencode fncache generaldelta revlogv1 store prepare test of init of url configured from paths $ echo '[paths]' >> $HGRCPATH $ echo "somewhere = `pwd`/url from paths" >> $HGRCPATH $ echo "elsewhere = `pwd`/another paths url" >> $HGRCPATH init should (for consistency with clone) expand the url $ hg init somewhere $ checknewrepo "url from paths" store created 00changelog.i created dotencode fncache generaldelta revlogv1 store verify that clone also expand urls $ hg clone somewhere elsewhere updating to branch default 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ checknewrepo "another paths url" store created 00changelog.i created dotencode fncache generaldelta revlogv1 store clone bookmarks $ hg -R local bookmark test $ hg -R local bookmarks * test 0:08b9e9f63b32 $ hg clone -e "python \"$TESTDIR/dummyssh\"" local ssh://user@dummy/remote-bookmarks searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files exporting bookmark test $ hg -R remote-bookmarks bookmarks test 0:08b9e9f63b32 mercurial-3.7.3/tests/test-histedit-obsolete.t0000644000175000017500000002460512676531525021071 0ustar mpmmpm00000000000000 $ . "$TESTDIR/histedit-helpers.sh" Enable obsolete $ cat >> $HGRCPATH << EOF > [ui] > logtemplate= {rev}:{node|short} {desc|firstline} > [phases] > publish=False > [experimental] > evolution=createmarkers,allowunstable > [extensions] > histedit= > rebase= > EOF $ hg init base $ cd base $ for x in a b c d e f ; do > echo $x > $x > hg add $x > hg ci -m $x > done $ hg log --graph @ 5:652413bf663e f | o 4:e860deea161a e | o 3:055a42cdd887 d | o 2:177f92b77385 c | o 1:d2ae7f538514 b | o 0:cb9a9f314b8b a $ HGEDITOR=cat hg histedit 1 pick d2ae7f538514 1 b pick 177f92b77385 2 c pick 055a42cdd887 3 d pick e860deea161a 4 e pick 652413bf663e 5 f # Edit history between d2ae7f538514 and 652413bf663e # # Commits are listed from least to most recent # # Commands: # # e, edit = use commit, but stop for amending # m, mess = edit commit message without changing commit content # p, pick = use commit # d, drop = remove commit from history # f, fold = use commit, but combine it with the one above # r, roll = like fold, but discard this commit's description # $ hg histedit 1 --commands - --verbose < pick 177f92b77385 2 c > drop d2ae7f538514 1 b > pick 055a42cdd887 3 d > fold e860deea161a 4 e > pick 652413bf663e 5 f > EOF [1] $ hg log --graph --hidden @ 10:cacdfd884a93 f | o 9:59d9f330561f d | | x 8:b558abc46d09 fold-temp-revision e860deea161a | | | x 7:96e494a2d553 d |/ o 6:b346ab9a313d c | | x 5:652413bf663e f | | | x 4:e860deea161a e | | | x 3:055a42cdd887 d | | | x 2:177f92b77385 c | | | x 1:d2ae7f538514 b |/ o 0:cb9a9f314b8b a $ hg debugobsolete 96e494a2d553dd05902ba1cee1d94d4cb7b8faed 0 {b346ab9a313db8537ecf96fca3ca3ca984ef3bd7} (*) {'user': 'test'} (glob) b558abc46d09c30f57ac31e85a8a3d64d2e906e4 0 {96e494a2d553dd05902ba1cee1d94d4cb7b8faed} (*) {'user': 'test'} (glob) d2ae7f538514cd87c17547b0de4cea71fe1af9fb 0 {cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b} (*) {'user': 'test'} (glob) 177f92b773850b59254aa5e923436f921b55483b b346ab9a313db8537ecf96fca3ca3ca984ef3bd7 0 (*) {'user': 'test'} (glob) 055a42cdd88768532f9cf79daa407fc8d138de9b 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (*) {'user': 'test'} (glob) e860deea161a2f77de56603b340ebbb4536308ae 59d9f330561fd6c88b1a6b32f0e45034d88db784 0 (*) {'user': 'test'} (glob) 652413bf663ef2a641cab26574e46d5f5a64a55a cacdfd884a9321ec4e1de275ef3949fa953a1f83 0 (*) {'user': 'test'} (glob) Ensure hidden revision does not prevent histedit ------------------------------------------------- create an hidden revision $ hg histedit 6 --commands - << EOF > pick b346ab9a313d 6 c > drop 59d9f330561f 7 d > pick cacdfd884a93 8 f > EOF 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg log --graph @ 11:c13eb81022ca f | o 6:b346ab9a313d c | o 0:cb9a9f314b8b a check hidden revision are ignored (6 have hidden children 7 and 8) $ hg histedit 6 --commands - << EOF > pick b346ab9a313d 6 c > pick c13eb81022ca 8 f > EOF Test that rewriting leaving instability behind is allowed --------------------------------------------------------------------- $ hg up '.^' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -r 'children(.)' 11:c13eb81022ca f (no-eol) $ hg histedit -r '.' --commands - < edit b346ab9a313d 6 c > EOF 0 files updated, 0 files merged, 1 files removed, 0 files unresolved adding c Editing (b346ab9a313d), you may commit or record as needed now. (hg histedit --continue to resume) [1] $ echo c >> c $ hg histedit --continue $ hg log -r 'unstable()' 11:c13eb81022ca f (no-eol) stabilise $ hg rebase -r 'unstable()' -d . rebasing 11:c13eb81022ca "f" $ hg up tip -q Test dropping of changeset on the top of the stack ------------------------------------------------------- Nothing is rewritten below, the working directory parent must be change for the dropped changeset to be hidden. $ cd .. $ hg clone base droplast updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd droplast $ hg histedit -r '40db8afa467b' --commands - << EOF > pick 40db8afa467b 10 c > drop b449568bf7fc 11 f > EOF 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -G @ 12:40db8afa467b c | o 0:cb9a9f314b8b a With rewritten ancestors $ echo e > e $ hg add e $ hg commit -m g $ echo f > f $ hg add f $ hg commit -m h $ hg histedit -r '40db8afa467b' --commands - << EOF > pick 47a8561c0449 12 g > pick 40db8afa467b 10 c > drop 1b3b05f35ff0 13 h > EOF 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg log -G @ 17:ee6544123ab8 c | o 16:269e713e9eae g | o 0:cb9a9f314b8b a $ cd ../base Test phases support =========================================== Check that histedit respect immutability ------------------------------------------- $ cat >> $HGRCPATH << EOF > [ui] > logtemplate= {rev}:{node|short} ({phase}) {desc|firstline}\n > EOF $ hg ph -pv '.^' phase changed for 2 changesets $ hg log -G @ 13:b449568bf7fc (draft) f | o 12:40db8afa467b (public) c | o 0:cb9a9f314b8b (public) a $ hg histedit -r '.~2' abort: cannot edit public changeset: cb9a9f314b8b (see "hg help phases" for details) [255] Prepare further testing ------------------------------------------- $ for x in g h i j k ; do > echo $x > $x > hg add $x > hg ci -m $x > done $ hg phase --force --secret .~2 $ hg log -G @ 18:ee118ab9fa44 (secret) k | o 17:3a6c53ee7f3d (secret) j | o 16:b605fb7503f2 (secret) i | o 15:7395e1ff83bd (draft) h | o 14:6b70183d2492 (draft) g | o 13:b449568bf7fc (draft) f | o 12:40db8afa467b (public) c | o 0:cb9a9f314b8b (public) a $ cd .. simple phase conservation ------------------------------------------- Resulting changeset should conserve the phase of the original one whatever the phases.new-commit option is. New-commit as draft (default) $ cp -r base simple-draft $ cd simple-draft $ hg histedit -r 'b449568bf7fc' --commands - << EOF > edit b449568bf7fc 11 f > pick 6b70183d2492 12 g > pick 7395e1ff83bd 13 h > pick b605fb7503f2 14 i > pick 3a6c53ee7f3d 15 j > pick ee118ab9fa44 16 k > EOF 0 files updated, 0 files merged, 6 files removed, 0 files unresolved adding f Editing (b449568bf7fc), you may commit or record as needed now. (hg histedit --continue to resume) [1] $ echo f >> f $ hg histedit --continue $ hg log -G @ 24:12e89af74238 (secret) k | o 23:636a8687b22e (secret) j | o 22:ccaf0a38653f (secret) i | o 21:11a89d1c2613 (draft) h | o 20:c1dec7ca82ea (draft) g | o 19:087281e68428 (draft) f | o 12:40db8afa467b (public) c | o 0:cb9a9f314b8b (public) a $ cd .. New-commit as draft (default) $ cp -r base simple-secret $ cd simple-secret $ cat >> .hg/hgrc << EOF > [phases] > new-commit=secret > EOF $ hg histedit -r 'b449568bf7fc' --commands - << EOF > edit b449568bf7fc 11 f > pick 6b70183d2492 12 g > pick 7395e1ff83bd 13 h > pick b605fb7503f2 14 i > pick 3a6c53ee7f3d 15 j > pick ee118ab9fa44 16 k > EOF 0 files updated, 0 files merged, 6 files removed, 0 files unresolved adding f Editing (b449568bf7fc), you may commit or record as needed now. (hg histedit --continue to resume) [1] $ echo f >> f $ hg histedit --continue $ hg log -G @ 24:12e89af74238 (secret) k | o 23:636a8687b22e (secret) j | o 22:ccaf0a38653f (secret) i | o 21:11a89d1c2613 (draft) h | o 20:c1dec7ca82ea (draft) g | o 19:087281e68428 (draft) f | o 12:40db8afa467b (public) c | o 0:cb9a9f314b8b (public) a $ cd .. Changeset reordering ------------------------------------------- If a secret changeset is put before a draft one, all descendant should be secret. It seems more important to present the secret phase. $ cp -r base reorder $ cd reorder $ hg histedit -r 'b449568bf7fc' --commands - << EOF > pick b449568bf7fc 11 f > pick 3a6c53ee7f3d 15 j > pick 6b70183d2492 12 g > pick b605fb7503f2 14 i > pick 7395e1ff83bd 13 h > pick ee118ab9fa44 16 k > EOF 0 files updated, 0 files merged, 5 files removed, 0 files unresolved $ hg log -G @ 23:558246857888 (secret) k | o 22:28bd44768535 (secret) h | o 21:d5395202aeb9 (secret) i | o 20:21edda8e341b (secret) g | o 19:5ab64f3a4832 (secret) j | o 13:b449568bf7fc (draft) f | o 12:40db8afa467b (public) c | o 0:cb9a9f314b8b (public) a $ cd .. Changeset folding ------------------------------------------- Folding a secret changeset with a draft one turn the result secret (again, better safe than sorry). Folding between same phase changeset still works Note that there is a few reordering in this series for more extensive test $ cp -r base folding $ cd folding $ cat >> .hg/hgrc << EOF > [phases] > new-commit=secret > EOF $ hg histedit -r 'b449568bf7fc' --commands - << EOF > pick 7395e1ff83bd 13 h > fold b449568bf7fc 11 f > pick 6b70183d2492 12 g > fold 3a6c53ee7f3d 15 j > pick b605fb7503f2 14 i > fold ee118ab9fa44 16 k > EOF 0 files updated, 0 files merged, 6 files removed, 0 files unresolved 0 files updated, 0 files merged, 2 files removed, 0 files unresolved 2 files updated, 0 files merged, 0 files removed, 0 files unresolved 0 files updated, 0 files merged, 2 files removed, 0 files unresolved 2 files updated, 0 files merged, 0 files removed, 0 files unresolved 0 files updated, 0 files merged, 2 files removed, 0 files unresolved 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -G @ 27:f9daec13fb98 (secret) i | o 24:49807617f46a (secret) g | o 21:050280826e04 (draft) h | o 12:40db8afa467b (public) c | o 0:cb9a9f314b8b (public) a $ hg co 49807617f46a 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo wat >> wat $ hg add wat $ hg ci -m 'add wat' created new head $ hg merge f9daec13fb98 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m 'merge' $ echo not wat > wat $ hg ci -m 'modify wat' $ hg histedit 050280826e04 abort: cannot edit history that contains merges [255] $ cd .. mercurial-3.7.3/tests/test-batching.py0000644000175000017500000001264712676531525017411 0ustar mpmmpm00000000000000# test-batching.py - tests for transparent command batching # # Copyright 2011 Peter Arrenbrecht # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from mercurial.peer import localbatch, batchable, future from mercurial.wireproto import remotebatch # equivalent of repo.repository class thing(object): def hello(self): return "Ready." # equivalent of localrepo.localrepository class localthing(thing): def foo(self, one, two=None): if one: return "%s and %s" % (one, two,) return "Nope" def bar(self, b, a): return "%s und %s" % (b, a,) def greet(self, name=None): return "Hello, %s" % name def batch(self): '''Support for local batching.''' return localbatch(self) # usage of "thing" interface def use(it): # Direct call to base method shared between client and server. print it.hello() # Direct calls to proxied methods. They cause individual roundtrips. print it.foo("Un", two="Deux") print it.bar("Eins", "Zwei") # Batched call to a couple of (possibly proxied) methods. batch = it.batch() # The calls return futures to eventually hold results. foo = batch.foo(one="One", two="Two") foo2 = batch.foo(None) bar = batch.bar("Eins", "Zwei") # We can call non-batchable proxy methods, but the break the current batch # request and cause additional roundtrips. greet = batch.greet(name="John Smith") # We can also add local methods into the mix, but they break the batch too. hello = batch.hello() bar2 = batch.bar(b="Uno", a="Due") # Only now are all the calls executed in sequence, with as few roundtrips # as possible. batch.submit() # After the call to submit, the futures actually contain values. print foo.value print foo2.value print bar.value print greet.value print hello.value print bar2.value # local usage mylocal = localthing() print print "== Local" use(mylocal) # demo remoting; mimicks what wireproto and HTTP/SSH do # shared def escapearg(plain): return (plain .replace(':', '::') .replace(',', ':,') .replace(';', ':;') .replace('=', ':=')) def unescapearg(escaped): return (escaped .replace(':=', '=') .replace(':;', ';') .replace(':,', ',') .replace('::', ':')) # server side # equivalent of wireproto's global functions class server(object): def __init__(self, local): self.local = local def _call(self, name, args): args = dict(arg.split('=', 1) for arg in args) return getattr(self, name)(**args) def perform(self, req): print "REQ:", req name, args = req.split('?', 1) args = args.split('&') vals = dict(arg.split('=', 1) for arg in args) res = getattr(self, name)(**vals) print " ->", res return res def batch(self, cmds): res = [] for pair in cmds.split(';'): name, args = pair.split(':', 1) vals = {} for a in args.split(','): if a: n, v = a.split('=') vals[n] = unescapearg(v) res.append(escapearg(getattr(self, name)(**vals))) return ';'.join(res) def foo(self, one, two): return mangle(self.local.foo(unmangle(one), unmangle(two))) def bar(self, b, a): return mangle(self.local.bar(unmangle(b), unmangle(a))) def greet(self, name): return mangle(self.local.greet(unmangle(name))) myserver = server(mylocal) # local side # equivalent of wireproto.encode/decodelist, that is, type-specific marshalling # here we just transform the strings a bit to check we're properly en-/decoding def mangle(s): return ''.join(chr(ord(c) + 1) for c in s) def unmangle(s): return ''.join(chr(ord(c) - 1) for c in s) # equivalent of wireproto.wirerepository and something like http's wire format class remotething(thing): def __init__(self, server): self.server = server def _submitone(self, name, args): req = name + '?' + '&'.join(['%s=%s' % (n, v) for n, v in args]) return self.server.perform(req) def _submitbatch(self, cmds): req = [] for name, args in cmds: args = ','.join(n + '=' + escapearg(v) for n, v in args) req.append(name + ':' + args) req = ';'.join(req) res = self._submitone('batch', [('cmds', req,)]) return res.split(';') def batch(self): return remotebatch(self) @batchable def foo(self, one, two=None): if not one: yield "Nope", None encargs = [('one', mangle(one),), ('two', mangle(two),)] encresref = future() yield encargs, encresref yield unmangle(encresref.value) @batchable def bar(self, b, a): encresref = future() yield [('b', mangle(b),), ('a', mangle(a),)], encresref yield unmangle(encresref.value) # greet is coded directly. It therefore does not support batching. If it # does appear in a batch, the batch is split around greet, and the call to # greet is done in its own roundtrip. def greet(self, name=None): return unmangle(self._submitone('greet', [('name', mangle(name),)])) # demo remote usage myproxy = remotething(myserver) print print "== Remote" use(myproxy) mercurial-3.7.3/tests/test-diff-reverse.t0000644000175000017500000000156512676531525020023 0ustar mpmmpm00000000000000 $ hg init $ cat > a < a > b > c > EOF $ hg ci -Am adda adding a $ cat > a < d > e > f > EOF $ hg ci -m moda $ hg diff --reverse -r0 -r1 diff -r 2855cdcfcbb7 -r 8e1805a3cf6e a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +1,3 @@ -d -e -f +a +b +c $ cat >> a < g > h > EOF $ hg diff --reverse --nodates diff -r 2855cdcfcbb7 a --- a/a +++ b/a @@ -1,5 +1,3 @@ d e f -g -h should show removed file 'a' as being added $ hg revert a $ hg rm a $ hg diff --reverse --nodates a diff -r 2855cdcfcbb7 a --- /dev/null +++ b/a @@ -0,0 +1,3 @@ +d +e +f should show added file 'b' as being removed $ echo b >> b $ hg add b $ hg diff --reverse --nodates b diff -r 2855cdcfcbb7 b --- a/b +++ /dev/null @@ -1,1 +0,0 @@ -b mercurial-3.7.3/tests/test-hgweb-no-request-uri.t0000644000175000017500000001046312676531525021430 0ustar mpmmpm00000000000000This tests if hgweb and hgwebdir still work if the REQUEST_URI variable is no longer passed with the request. Instead, SCRIPT_NAME and PATH_INFO should be used from d74fc8dec2b4 onward to route the request. $ hg init repo $ cd repo $ echo foo > bar $ hg add bar $ hg commit -m "test" $ hg tip changeset: 0:61c9426e69fe tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: test $ cat > request.py < from mercurial.hgweb import hgweb, hgwebdir > from StringIO import StringIO > import os, sys > > errors = StringIO() > input = StringIO() > > def startrsp(status, headers): > print '---- STATUS' > print status > print '---- HEADERS' > print [i for i in headers if i[0] != 'ETag'] > print '---- DATA' > return output.write > > env = { > 'wsgi.version': (1, 0), > 'wsgi.url_scheme': 'http', > 'wsgi.errors': errors, > 'wsgi.input': input, > 'wsgi.multithread': False, > 'wsgi.multiprocess': False, > 'wsgi.run_once': False, > 'REQUEST_METHOD': 'GET', > 'SCRIPT_NAME': '', > 'SERVER_NAME': '127.0.0.1', > 'SERVER_PORT': os.environ['HGPORT'], > 'SERVER_PROTOCOL': 'HTTP/1.0' > } > > def process(app): > content = app(env, startrsp) > sys.stdout.write(output.getvalue()) > sys.stdout.write(''.join(content)) > getattr(content, 'close', lambda : None)() > print '---- ERRORS' > print errors.getvalue() > > output = StringIO() > env['PATH_INFO'] = '/' > env['QUERY_STRING'] = 'style=atom' > process(hgweb('.', name = 'repo')) > > output = StringIO() > env['PATH_INFO'] = '/file/tip/' > env['QUERY_STRING'] = 'style=raw' > process(hgweb('.', name = 'repo')) > > output = StringIO() > env['PATH_INFO'] = '/' > env['QUERY_STRING'] = 'style=raw' > process(hgwebdir({'repo': '.'})) > > output = StringIO() > env['PATH_INFO'] = '/repo/file/tip/' > env['QUERY_STRING'] = 'style=raw' > process(hgwebdir({'repo': '.'})) > EOF $ python request.py ---- STATUS 200 Script output follows ---- HEADERS [('Content-Type', 'application/atom+xml; charset=ascii')] ---- DATA http://127.0.0.1:$HGPORT/ repo Changelog 1970-01-01T00:00:00+00:00 [default] test http://127.0.0.1:$HGPORT/#changeset-61c9426e69fef294feed5e2bbfc97d39944a5b1c test test 1970-01-01T00:00:00+00:00 1970-01-01T00:00:00+00:00
                        changeset 61c9426e69fe
                        branch default
                        bookmark
                        tag tip
                        user test
                        description test
                        files bar
                        ---- ERRORS ---- STATUS 200 Script output follows ---- HEADERS [('Content-Type', 'text/plain; charset=ascii')] ---- DATA -rw-r--r-- 4 bar ---- ERRORS ---- STATUS 200 Script output follows ---- HEADERS [('Content-Type', 'text/plain; charset=ascii')] ---- DATA /repo/ ---- ERRORS ---- STATUS 200 Script output follows ---- HEADERS [('Content-Type', 'text/plain; charset=ascii')] ---- DATA -rw-r--r-- 4 bar ---- ERRORS $ cd .. mercurial-3.7.3/tests/test-incoming-outgoing.t0000644000175000017500000002576312676531525021104 0ustar mpmmpm00000000000000#require serve $ hg init test $ cd test $ for i in 0 1 2 3 4 5 6 7 8; do > echo $i >> foo > hg commit -A -m $i > done adding foo $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 9 changesets, 9 total revisions $ hg serve -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ cd .. $ hg init new http incoming $ hg -R new incoming http://localhost:$HGPORT/ comparing with http://localhost:$HGPORT/ changeset: 0:00a43fa82f62 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 changeset: 1:5460a410df01 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 changeset: 2:d9f42cd1a1ec user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 3:376476025137 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 3 changeset: 4:70d7eb252d49 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 4 changeset: 5:ad284ee3b5ee user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 5 changeset: 6:e9229f2de384 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 6 changeset: 7:d152815bb8db user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 7 changeset: 8:e4feb4ac9035 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 8 $ hg -R new incoming -r 4 http://localhost:$HGPORT/ comparing with http://localhost:$HGPORT/ changeset: 0:00a43fa82f62 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 changeset: 1:5460a410df01 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 changeset: 2:d9f42cd1a1ec user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 3:376476025137 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 3 changeset: 4:70d7eb252d49 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 4 local incoming $ hg -R new incoming test comparing with test changeset: 0:00a43fa82f62 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 changeset: 1:5460a410df01 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 changeset: 2:d9f42cd1a1ec user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 3:376476025137 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 3 changeset: 4:70d7eb252d49 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 4 changeset: 5:ad284ee3b5ee user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 5 changeset: 6:e9229f2de384 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 6 changeset: 7:d152815bb8db user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 7 changeset: 8:e4feb4ac9035 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 8 $ hg -R new incoming -r 4 test comparing with test changeset: 0:00a43fa82f62 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 changeset: 1:5460a410df01 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 changeset: 2:d9f42cd1a1ec user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 3:376476025137 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 3 changeset: 4:70d7eb252d49 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 4 limit to 2 changesets $ hg -R new incoming -l 2 test comparing with test changeset: 0:00a43fa82f62 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 changeset: 1:5460a410df01 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 limit to 2 changesets, test with -p --git $ hg -R new incoming -l 2 -p --git test comparing with test changeset: 0:00a43fa82f62 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 diff --git a/foo b/foo new file mode 100644 --- /dev/null +++ b/foo @@ -0,0 +1,1 @@ +0 changeset: 1:5460a410df01 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 diff --git a/foo b/foo --- a/foo +++ b/foo @@ -1,1 +1,2 @@ 0 +1 test with --bundle $ hg -R new incoming --bundle test.hg http://localhost:$HGPORT/ comparing with http://localhost:$HGPORT/ changeset: 0:00a43fa82f62 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 changeset: 1:5460a410df01 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 changeset: 2:d9f42cd1a1ec user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 3:376476025137 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 3 changeset: 4:70d7eb252d49 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 4 changeset: 5:ad284ee3b5ee user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 5 changeset: 6:e9229f2de384 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 6 changeset: 7:d152815bb8db user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 7 changeset: 8:e4feb4ac9035 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 8 $ hg -R new incoming --bundle test2.hg test comparing with test changeset: 0:00a43fa82f62 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 changeset: 1:5460a410df01 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 changeset: 2:d9f42cd1a1ec user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 changeset: 3:376476025137 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 3 changeset: 4:70d7eb252d49 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 4 changeset: 5:ad284ee3b5ee user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 5 changeset: 6:e9229f2de384 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 6 changeset: 7:d152815bb8db user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 7 changeset: 8:e4feb4ac9035 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 8 test the resulting bundles $ hg init temp $ hg init temp2 $ hg -R temp unbundle test.hg adding changesets adding manifests adding file changes added 9 changesets with 9 changes to 1 files (run 'hg update' to get a working copy) $ hg -R temp2 unbundle test2.hg adding changesets adding manifests adding file changes added 9 changesets with 9 changes to 1 files (run 'hg update' to get a working copy) $ hg -R temp tip changeset: 8:e4feb4ac9035 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 8 $ hg -R temp2 tip changeset: 8:e4feb4ac9035 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 8 $ rm -r temp temp2 new test outgoing $ hg clone test test-dev updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd test-dev $ for i in 9 10 11 12 13; do > echo $i >> foo > hg commit -A -m $i > done $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 14 changesets, 14 total revisions $ cd .. $ hg -R test-dev outgoing test comparing with test searching for changes changeset: 9:d89d4abea5bc user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 9 changeset: 10:820095aa7158 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 10 changeset: 11:09ede2f3a638 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 11 changeset: 12:e576b1bed305 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 12 changeset: 13:96bbff09a7cc tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 13 test outgoing with secret changesets $ hg -R test-dev phase --force --secret 9 $ hg -R test-dev outgoing test comparing with test searching for changes no changes found (ignored 5 secret changesets) [1] $ hg -R test-dev phase --draft -r 'head()' limit to 3 changesets $ hg -R test-dev outgoing -l 3 test comparing with test searching for changes changeset: 9:d89d4abea5bc user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 9 changeset: 10:820095aa7158 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 10 changeset: 11:09ede2f3a638 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 11 $ hg -R test-dev outgoing http://localhost:$HGPORT/ comparing with http://localhost:$HGPORT/ searching for changes changeset: 9:d89d4abea5bc user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 9 changeset: 10:820095aa7158 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 10 changeset: 11:09ede2f3a638 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 11 changeset: 12:e576b1bed305 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 12 changeset: 13:96bbff09a7cc tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 13 $ hg -R test-dev outgoing -r 11 http://localhost:$HGPORT/ comparing with http://localhost:$HGPORT/ searching for changes changeset: 9:d89d4abea5bc user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 9 changeset: 10:820095aa7158 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 10 changeset: 11:09ede2f3a638 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 11 incoming from empty remote repository $ hg init r1 $ hg init r2 $ echo a > r1/foo $ hg -R r1 ci -Ama adding foo $ hg -R r1 incoming r2 --bundle x.hg comparing with r2 searching for changes no changes found [1] mercurial-3.7.3/tests/test-shelve.t0000644000175000017500000007651612676531525016740 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [extensions] > mq = > shelve = > [defaults] > diff = --nodates --git > qnew = --date '0 0' > [shelve] > maxbackups = 2 > EOF $ hg init repo $ cd repo $ mkdir a b $ echo a > a/a $ echo b > b/b $ echo c > c $ echo d > d $ echo x > x $ hg addremove -q shelve has a help message $ hg shelve -h hg shelve [OPTION]... [FILE]... save and set aside changes from the working directory Shelving takes files that "hg status" reports as not clean, saves the modifications to a bundle (a shelved change), and reverts the files so that their state in the working directory becomes clean. To restore these changes to the working directory, using "hg unshelve"; this will work even if you switch to a different commit. When no files are specified, "hg shelve" saves all not-clean files. If specific files or directories are named, only changes to those files are shelved. Each shelved change has a name that makes it easier to find later. The name of a shelved change defaults to being based on the active bookmark, or if there is no active bookmark, the current named branch. To specify a different name, use "--name". To see a list of existing shelved changes, use the "--list" option. For each shelved change, this will print its name, age, and description; use " --patch" or "--stat" for more details. To delete specific shelved changes, use "--delete". To delete all shelved changes, use "--cleanup". (use "hg help -e shelve" to show help for the shelve extension) options ([+] can be repeated): -A --addremove mark new/missing files as added/removed before shelving -u --unknown store unknown files in the shelve --cleanup delete all shelved changes --date DATE shelve with the specified commit date -d --delete delete the named shelved change(s) -e --edit invoke editor on commit messages -l --list list current shelves -m --message TEXT use text as shelve message -n --name NAME use the given name for the shelved commit -p --patch show patch -i --interactive interactive mode, only works while creating a shelve --stat output diffstat-style summary of changes -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns --mq operate on patch repository (some details hidden, use --verbose to show complete help) shelving in an empty repo should be possible (this tests also that editor is not invoked, if '--edit' is not specified) $ HGEDITOR=cat hg shelve shelved as default 0 files updated, 0 files merged, 5 files removed, 0 files unresolved $ hg unshelve unshelving change 'default' $ hg commit -q -m 'initial commit' $ hg shelve nothing changed [1] make sure shelve files were backed up $ ls .hg/shelve-backup default.hg default.patch create an mq patch - shelving should work fine with a patch applied $ echo n > n $ hg add n $ hg commit n -m second $ hg qnew second.patch shelve a change that we will delete later $ echo a >> a/a $ hg shelve shelved as default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved set up some more complex changes to shelve $ echo a >> a/a $ hg mv b b.rename moving b/b to b.rename/b (glob) $ hg cp c c.copy $ hg status -C M a/a A b.rename/b b/b A c.copy c R b/b prevent some foot-shooting $ hg shelve -n foo/bar abort: shelved change names may not contain slashes [255] $ hg shelve -n .baz abort: shelved change names may not start with '.' [255] the common case - no options or filenames $ hg shelve shelved as default-01 2 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg status -C ensure that our shelved changes exist $ hg shelve -l default-01 (*)* changes to: [mq]: second.patch (glob) default (*)* changes to: [mq]: second.patch (glob) $ hg shelve -l -p default default (*)* changes to: [mq]: second.patch (glob) diff --git a/a/a b/a/a --- a/a/a +++ b/a/a @@ -1,1 +1,2 @@ a +a $ hg shelve --list --addremove abort: options '--list' and '--addremove' may not be used together [255] delete our older shelved change $ hg shelve -d default $ hg qfinish -a -q ensure shelve backups aren't overwritten $ ls .hg/shelve-backup/ default-1.hg default-1.patch default.hg default.patch local edits should not prevent a shelved change from applying $ printf "z\na\n" > a/a $ hg unshelve --keep unshelving change 'default-01' temporarily committing pending changes (restore with 'hg unshelve --abort') rebasing shelved changes rebasing 4:32c69314e062 "changes to: [mq]: second.patch" (tip) merging a/a $ hg revert --all -q $ rm a/a.orig b.rename/b c.copy apply it and make sure our state is as expected (this also tests that same timestamp prevents backups from being removed, even though there are more than 'maxbackups' backups) $ f -t .hg/shelve-backup/default.hg .hg/shelve-backup/default.hg: file $ touch -t 200001010000 .hg/shelve-backup/default.hg $ f -t .hg/shelve-backup/default-1.hg .hg/shelve-backup/default-1.hg: file $ touch -t 200001010000 .hg/shelve-backup/default-1.hg $ hg unshelve unshelving change 'default-01' $ hg status -C M a/a A b.rename/b b/b A c.copy c R b/b $ hg shelve -l (both of default.hg and default-1.hg should be still kept, because it is difficult to decide actual order of them from same timestamp) $ ls .hg/shelve-backup/ default-01.hg default-01.patch default-1.hg default-1.patch default.hg default.patch $ hg unshelve abort: no shelved changes to apply! [255] $ hg unshelve foo abort: shelved change 'foo' not found [255] named shelves, specific filenames, and "commit messages" should all work (this tests also that editor is invoked, if '--edit' is specified) $ hg status -C M a/a A b.rename/b b/b A c.copy c R b/b $ HGEDITOR=cat hg shelve -q -n wibble -m wat -e a wat HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: shelve@localhost HG: branch 'default' HG: changed a/a expect "a" to no longer be present, but status otherwise unchanged $ hg status -C A b.rename/b b/b A c.copy c R b/b $ hg shelve -l --stat wibble (*) wat (glob) a/a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) and now "a/a" should reappear $ cd a $ hg unshelve -q wibble $ cd .. $ hg status -C M a/a A b.rename/b b/b A c.copy c R b/b ensure old shelve backups are being deleted automatically $ ls .hg/shelve-backup/ default-01.hg default-01.patch wibble.hg wibble.patch cause unshelving to result in a merge with 'a' conflicting $ hg shelve -q $ echo c>>a/a $ hg commit -m second $ hg tip --template '{files}\n' a/a add an unrelated change that should be preserved $ mkdir foo $ echo foo > foo/foo $ hg add foo/foo force a conflicted merge to occur $ hg unshelve unshelving change 'default' temporarily committing pending changes (restore with 'hg unshelve --abort') rebasing shelved changes rebasing 5:32c69314e062 "changes to: [mq]: second.patch" (tip) merging a/a warning: conflicts while merging a/a! (edit, then use 'hg resolve --mark') unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue') [1] ensure that we have a merge with unresolved conflicts $ hg heads -q --template '{rev}\n' 5 4 $ hg parents -q --template '{rev}\n' 4 5 $ hg status M a/a M b.rename/b M c.copy R b/b ? a/a.orig $ hg diff diff --git a/a/a b/a/a --- a/a/a +++ b/a/a @@ -1,2 +1,6 @@ a +<<<<<<< dest: * - shelve: pending changes temporary commit (glob) c +======= +a +>>>>>>> source: 32c69314e062 - shelve: changes to: [mq]: second.patch diff --git a/b/b b/b.rename/b rename from b/b rename to b.rename/b diff --git a/c b/c.copy copy from c copy to c.copy $ hg resolve -l U a/a $ hg shelve abort: unshelve already in progress (use 'hg unshelve --continue' or 'hg unshelve --abort') [255] abort the unshelve and be happy $ hg status M a/a M b.rename/b M c.copy R b/b ? a/a.orig $ hg unshelve -a rebase aborted unshelve of 'default' aborted $ hg heads -q 3:2e69b451d1ea $ hg parents changeset: 3:2e69b451d1ea tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: second $ hg resolve -l $ hg status A foo/foo ? a/a.orig try to continue with no unshelve underway $ hg unshelve -c abort: no unshelve operation underway [255] $ hg status A foo/foo ? a/a.orig redo the unshelve to get a conflict $ hg unshelve -q warning: conflicts while merging a/a! (edit, then use 'hg resolve --mark') unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue') [1] attempt to continue $ hg unshelve -c abort: unresolved conflicts, can't continue (see 'hg resolve', then 'hg unshelve --continue') [255] $ hg revert -r . a/a $ hg resolve -m a/a (no more unresolved files) continue: hg unshelve --continue $ hg commit -m 'commit while unshelve in progress' abort: unshelve already in progress (use 'hg unshelve --continue' or 'hg unshelve --abort') [255] $ hg unshelve -c rebasing 5:32c69314e062 "changes to: [mq]: second.patch" (tip) unshelve of 'default' complete ensure the repo is as we hope $ hg parents changeset: 3:2e69b451d1ea tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: second $ hg heads -q 3:2e69b451d1ea $ hg status -C A b.rename/b b/b A c.copy c A foo/foo R b/b ? a/a.orig there should be no shelves left $ hg shelve -l #if execbit ensure that metadata-only changes are shelved $ chmod +x a/a $ hg shelve -q -n execbit a/a $ hg status a/a $ hg unshelve -q execbit $ hg status a/a M a/a $ hg revert a/a #endif #if symlink $ rm a/a $ ln -s foo a/a $ hg shelve -q -n symlink a/a $ hg status a/a $ hg unshelve -q symlink $ hg status a/a M a/a $ hg revert a/a #endif set up another conflict between a commit and a shelved change $ hg revert -q -C -a $ rm a/a.orig b.rename/b c.copy $ echo a >> a/a $ hg shelve -q $ echo x >> a/a $ hg ci -m 'create conflict' $ hg add foo/foo if we resolve a conflict while unshelving, the unshelve should succeed $ hg unshelve --tool :merge-other --keep unshelving change 'default' temporarily committing pending changes (restore with 'hg unshelve --abort') rebasing shelved changes rebasing 6:2f694dd83a13 "changes to: second" (tip) merging a/a $ hg parents -q 4:33f7f61e6c5e $ hg shelve -l default (*)* changes to: second (glob) $ hg status M a/a A foo/foo $ cat a/a a c a $ cat > a/a << EOF > a > c > x > EOF $ HGMERGE=true hg unshelve unshelving change 'default' temporarily committing pending changes (restore with 'hg unshelve --abort') rebasing shelved changes rebasing 6:2f694dd83a13 "changes to: second" (tip) merging a/a note: rebase of 6:2f694dd83a13 created no changes to commit $ hg parents -q 4:33f7f61e6c5e $ hg shelve -l $ hg status A foo/foo $ cat a/a a c x test keep and cleanup $ hg shelve shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg shelve --list default (*)* changes to: create conflict (glob) $ hg unshelve -k unshelving change 'default' $ hg shelve --list default (*)* changes to: create conflict (glob) $ hg shelve --cleanup $ hg shelve --list $ hg shelve --cleanup --delete abort: options '--cleanup' and '--delete' may not be used together [255] $ hg shelve --cleanup --patch abort: options '--cleanup' and '--patch' may not be used together [255] $ hg shelve --cleanup --message MESSAGE abort: options '--cleanup' and '--message' may not be used together [255] test bookmarks $ hg bookmark test $ hg bookmark * test 4:33f7f61e6c5e $ hg shelve shelved as test 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg bookmark * test 4:33f7f61e6c5e $ hg unshelve unshelving change 'test' $ hg bookmark * test 4:33f7f61e6c5e shelve should still work even if mq is disabled $ hg --config extensions.mq=! shelve shelved as test 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg --config extensions.mq=! shelve --list test (*)* changes to: create conflict (glob) $ hg bookmark * test 4:33f7f61e6c5e $ hg --config extensions.mq=! unshelve unshelving change 'test' $ hg bookmark * test 4:33f7f61e6c5e shelve should leave dirstate clean (issue4055) $ cd .. $ hg init shelverebase $ cd shelverebase $ printf 'x\ny\n' > x $ echo z > z $ hg commit -Aqm xy $ echo z >> x $ hg commit -Aqm z $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ printf 'a\nx\ny\nz\n' > x $ hg commit -Aqm xyz $ echo c >> z $ hg shelve shelved as default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg rebase -d 1 --config extensions.rebase= rebasing 2:323bfa07f744 "xyz" (tip) merging x saved backup bundle to $TESTTMP/shelverebase/.hg/strip-backup/323bfa07f744-78114325-backup.hg (glob) $ hg unshelve unshelving change 'default' rebasing shelved changes rebasing 4:82a0d7d6ba61 "changes to: xyz" (tip) $ hg status M z $ cd .. shelve should only unshelve pending changes (issue4068) $ hg init onlypendingchanges $ cd onlypendingchanges $ touch a $ hg ci -Aqm a $ touch b $ hg ci -Aqm b $ hg up -q 0 $ touch c $ hg ci -Aqm c $ touch d $ hg add d $ hg shelve shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg up -q 1 $ hg unshelve unshelving change 'default' rebasing shelved changes rebasing 3:958bcbd1776e "changes to: c" (tip) $ hg status A d unshelve should work on an ancestor of the original commit $ hg shelve shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg unshelve unshelving change 'default' rebasing shelved changes rebasing 3:013284d9655e "changes to: b" (tip) $ hg status A d test bug 4073 we need to enable obsolete markers for it $ cat >> $HGRCPATH << EOF > [experimental] > evolution=createmarkers > EOF $ hg shelve shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg debugobsolete `hg --debug id -i -r 1` $ hg unshelve unshelving change 'default' unshelve should leave unknown files alone (issue4113) $ echo e > e $ hg shelve shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg status ? e $ hg unshelve unshelving change 'default' $ hg status A d ? e $ cat e e unshelve should keep a copy of unknown files $ hg add e $ hg shelve shelved as default 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo z > e $ hg unshelve unshelving change 'default' $ cat e e $ cat e.orig z unshelve and conflicts with tracked and untracked files preparing: $ rm *.orig $ hg ci -qm 'commit stuff' $ hg phase -p null: no other changes - no merge: $ echo f > f $ hg add f $ hg shelve shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo g > f $ hg unshelve unshelving change 'default' $ hg st A f ? f.orig $ cat f f $ cat f.orig g other uncommitted changes - merge: $ hg st A f ? f.orig $ hg shelve shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -G --template '{rev} {desc|firstline} {author}' -R bundle://.hg/shelved/default.hg -r 'bundle()' o 4 changes to: commit stuff shelve@localhost | $ hg log -G --template '{rev} {desc|firstline} {author}' @ 3 commit stuff test | | o 2 c test |/ o 0 a test $ mv f.orig f $ echo 1 > a $ hg unshelve --date '1073741824 0' unshelving change 'default' temporarily committing pending changes (restore with 'hg unshelve --abort') rebasing shelved changes rebasing 5:81152db69da7 "changes to: commit stuff" (tip) merging f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue') [1] $ hg log -G --template '{rev} {desc|firstline} {author} {date|isodate}' @ 5 changes to: commit stuff shelve@localhost 1970-01-01 00:00 +0000 | | @ 4 pending changes temporary commit shelve@localhost 2004-01-10 13:37 +0000 |/ o 3 commit stuff test 1970-01-01 00:00 +0000 | | o 2 c test 1970-01-01 00:00 +0000 |/ o 0 a test 1970-01-01 00:00 +0000 $ hg st M f ? f.orig $ cat f <<<<<<< dest: 5f6b880e719b - shelve: pending changes temporary commit g ======= f >>>>>>> source: 81152db69da7 - shelve: changes to: commit stuff $ cat f.orig g $ hg unshelve --abort -t false tool option will be ignored rebase aborted unshelve of 'default' aborted $ hg st M a ? f.orig $ cat f.orig g $ hg unshelve unshelving change 'default' temporarily committing pending changes (restore with 'hg unshelve --abort') rebasing shelved changes rebasing 5:81152db69da7 "changes to: commit stuff" (tip) $ hg st M a A f ? f.orig other committed changes - merge: $ hg shelve f shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg ci a -m 'intermediate other change' $ mv f.orig f $ hg unshelve unshelving change 'default' rebasing shelved changes rebasing 5:81152db69da7 "changes to: commit stuff" (tip) merging f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue') [1] $ hg st M f ? f.orig $ cat f <<<<<<< dest: * - test: intermediate other change (glob) g ======= f >>>>>>> source: 81152db69da7 - shelve: changes to: commit stuff $ cat f.orig g $ hg unshelve --abort rebase aborted unshelve of 'default' aborted $ hg st ? f.orig $ cat f.orig g $ hg shelve --delete default Recreate some conflict again $ cd ../repo $ hg up -C -r 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (leaving bookmark test) $ echo y >> a/a $ hg shelve shelved as default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg up test 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (activating bookmark test) $ hg bookmark * test 4:33f7f61e6c5e $ hg unshelve unshelving change 'default' rebasing shelved changes rebasing 5:e42a7da90865 "changes to: second" (tip) merging a/a warning: conflicts while merging a/a! (edit, then use 'hg resolve --mark') unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue') [1] $ hg bookmark test 4:33f7f61e6c5e Test that resolving all conflicts in one direction (so that the rebase is a no-op), works (issue4398) $ hg revert -a -r . reverting a/a (glob) $ hg resolve -m a/a (no more unresolved files) continue: hg unshelve --continue $ hg unshelve -c rebasing 5:e42a7da90865 "changes to: second" (tip) note: rebase of 5:e42a7da90865 created no changes to commit unshelve of 'default' complete $ hg bookmark * test 4:33f7f61e6c5e $ hg diff $ hg status ? a/a.orig ? foo/foo $ hg summary parent: 4:33f7f61e6c5e tip create conflict branch: default bookmarks: *test commit: 2 unknown (clean) update: (current) phases: 5 draft $ hg shelve --delete --stat abort: options '--delete' and '--stat' may not be used together [255] $ hg shelve --delete --name NAME abort: options '--delete' and '--name' may not be used together [255] Test interactive shelve $ cat <> $HGRCPATH > [ui] > interactive = true > EOF $ echo 'a' >> a/b $ cat a/a >> a/b $ echo 'x' >> a/b $ mv a/b a/a $ echo 'a' >> foo/foo $ hg st M a/a ? a/a.orig ? foo/foo $ cat a/a a a c x x $ cat foo/foo foo a $ hg shelve --interactive --config ui.interactive=false abort: running non-interactively [255] $ hg shelve --interactive << EOF > y > y > n > EOF diff --git a/a/a b/a/a 2 hunks, 2 lines changed examine changes to 'a/a'? [Ynesfdaq?] y @@ -1,3 +1,4 @@ +a a c x record change 1/2 to 'a/a'? [Ynesfdaq?] y @@ -1,3 +2,4 @@ a c x +x record change 2/2 to 'a/a'? [Ynesfdaq?] n shelved as test merging a/a 0 files updated, 1 files merged, 0 files removed, 0 files unresolved $ cat a/a a c x x $ cat foo/foo foo a $ hg st M a/a ? foo/foo $ hg bookmark * test 4:33f7f61e6c5e $ hg unshelve unshelving change 'test' temporarily committing pending changes (restore with 'hg unshelve --abort') rebasing shelved changes rebasing 6:96a1354f65f6 "changes to: create conflict" (tip) merging a/a $ hg bookmark * test 4:33f7f61e6c5e $ cat a/a a a c x x shelve --patch and shelve --stat should work with a single valid shelfname $ hg up --clean . 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (leaving bookmark test) $ hg shelve --list $ echo 'patch a' > shelf-patch-a $ hg add shelf-patch-a $ hg shelve shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo 'patch b' > shelf-patch-b $ hg add shelf-patch-b $ hg shelve shelved as default-01 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg shelve --patch default default-01 abort: --patch expects a single shelf [255] $ hg shelve --stat default default-01 abort: --stat expects a single shelf [255] $ hg shelve --patch default default (*)* changes to: create conflict (glob) diff --git a/shelf-patch-a b/shelf-patch-a new file mode 100644 --- /dev/null +++ b/shelf-patch-a @@ -0,0 +1,1 @@ +patch a $ hg shelve --stat default default (*)* changes to: create conflict (glob) shelf-patch-a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) $ hg shelve --patch nonexistentshelf abort: cannot find shelf nonexistentshelf [255] $ hg shelve --stat nonexistentshelf abort: cannot find shelf nonexistentshelf [255] $ cd .. Shelve from general delta repo uses bundle2 on disk -------------------------------------------------- no general delta $ hg clone --pull repo bundle1 --config format.usegeneraldelta=0 requesting all changes adding changesets adding manifests adding file changes added 5 changesets with 8 changes to 6 files updating to branch default 6 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd bundle1 $ echo babar > jungle $ hg add jungle $ hg shelve shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg debugbundle .hg/shelved/*.hg 45993d65fe9dc3c6d8764b9c3b07fa831ee7d92d $ cd .. with general delta $ hg clone --pull repo bundle2 --config format.usegeneraldelta=1 requesting all changes adding changesets adding manifests adding file changes added 5 changesets with 8 changes to 6 files updating to branch default 6 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd bundle2 $ echo babar > jungle $ hg add jungle $ hg shelve shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg debugbundle .hg/shelved/*.hg Stream params: {'Compression': 'BZ'} changegroup -- "{'version': '02'}" 45993d65fe9dc3c6d8764b9c3b07fa831ee7d92d $ cd .. Test visibility of in-memory changes inside transaction to external hook ------------------------------------------------------------------------ $ cd repo $ echo xxxx >> x $ hg commit -m "#5: changes to invoke rebase" $ cat > $TESTTMP/checkvisibility.sh < echo "==== \$1:" > hg parents --template "VISIBLE {rev}:{node|short}\n" > # test that pending changes are hidden > unset HG_PENDING > hg parents --template "ACTUAL {rev}:{node|short}\n" > echo "====" > EOF $ cat >> .hg/hgrc < [defaults] > # to fix hash id of temporary revisions > unshelve = --date '0 0' > EOF "hg unshelve" at REV5 implies steps below: (1) commit changes in the working directory (REV6) (2) unbundle shelved revision (REV7) (3) rebase: merge REV7 into REV6 (REV6 => REV6, REV7) (4) rebase: commit merged revision (REV8) (5) rebase: update to REV6 (REV8 => REV6) (6) update to REV5 (REV6 => REV5) (7) abort transaction == test visibility to external preupdate hook $ cat >> .hg/hgrc < [hooks] > preupdate.visibility = sh $TESTTMP/checkvisibility.sh preupdate > EOF $ echo nnnn >> n $ sh $TESTTMP/checkvisibility.sh before-unshelving ==== before-unshelving: VISIBLE 5:703117a2acfb ACTUAL 5:703117a2acfb ==== $ hg unshelve --keep default temporarily committing pending changes (restore with 'hg unshelve --abort') rebasing shelved changes rebasing 7:206bf5d4f922 "changes to: create conflict" (tip) ==== preupdate: VISIBLE 6:66b86db80ee4 ACTUAL 5:703117a2acfb ==== ==== preupdate: VISIBLE 8:a0e04704317e ACTUAL 5:703117a2acfb ==== ==== preupdate: VISIBLE 6:66b86db80ee4 ACTUAL 5:703117a2acfb ==== $ cat >> .hg/hgrc < [hooks] > preupdate.visibility = > EOF $ sh $TESTTMP/checkvisibility.sh after-unshelving ==== after-unshelving: VISIBLE 5:703117a2acfb ACTUAL 5:703117a2acfb ==== == test visibility to external update hook $ hg update -q -C 5 $ cat >> .hg/hgrc < [hooks] > update.visibility = sh $TESTTMP/checkvisibility.sh update > EOF $ echo nnnn >> n $ sh $TESTTMP/checkvisibility.sh before-unshelving ==== before-unshelving: VISIBLE 5:703117a2acfb ACTUAL 5:703117a2acfb ==== $ hg unshelve --keep default temporarily committing pending changes (restore with 'hg unshelve --abort') rebasing shelved changes rebasing 7:206bf5d4f922 "changes to: create conflict" (tip) ==== update: VISIBLE 6:66b86db80ee4 VISIBLE 7:206bf5d4f922 ACTUAL 5:703117a2acfb ==== ==== update: VISIBLE 6:66b86db80ee4 ACTUAL 5:703117a2acfb ==== ==== update: VISIBLE 5:703117a2acfb ACTUAL 5:703117a2acfb ==== $ cat >> .hg/hgrc < [hooks] > update.visibility = > EOF $ sh $TESTTMP/checkvisibility.sh after-unshelving ==== after-unshelving: VISIBLE 5:703117a2acfb ACTUAL 5:703117a2acfb ==== $ cd .. test .orig files go where the user wants them to --------------------------------------------------------------- $ hg init salvage $ cd salvage $ echo 'content' > root $ hg commit -A -m 'root' -q $ echo '' > root $ hg shelve -q $ echo 'contADDent' > root $ hg unshelve -q --config 'ui.origbackuppath=.hg/origbackups' warning: conflicts while merging root! (edit, then use 'hg resolve --mark') unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue') [1] $ ls .hg/origbackups root.orig $ rm -rf .hg/origbackups test Abort unshelve always gets user out of the unshelved state --------------------------------------------------------------- Wreak havoc on the unshelve process $ rm .hg/unshelverebasestate $ hg unshelve --abort unshelve of 'default' aborted abort: (No such file or directory|The system cannot find the file specified) (re) [255] Can the user leave the current state? $ hg up -C . 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Try again but with a corrupted shelve state file $ hg strip -r 2 -r 1 -q $ hg up -r 0 -q $ echo '' > root $ hg shelve -q $ echo 'contADDent' > root $ hg unshelve -q warning: conflicts while merging root! (edit, then use 'hg resolve --mark') unresolved conflicts (see 'hg resolve', then 'hg unshelve --continue') [1] $ sed 's/ae8c668541e8/123456789012/' .hg/shelvedstate > ../corrupt-shelvedstate $ mv ../corrupt-shelvedstate .hg/histedit-state $ hg unshelve --abort 2>&1 | grep 'rebase aborted' rebase aborted $ hg up -C . 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. Keep active bookmark while (un)shelving even on shared repo (issue4940) ----------------------------------------------------------------------- $ cat <> $HGRCPATH > [extensions] > share = > EOF $ hg bookmarks -R repo test 4:33f7f61e6c5e $ hg share -B repo share updating working directory 6 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd share $ hg bookmarks test 4:33f7f61e6c5e $ hg bookmarks foo $ hg bookmarks * foo 5:703117a2acfb test 4:33f7f61e6c5e $ echo x >> x $ hg shelve shelved as foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bookmarks * foo 5:703117a2acfb test 4:33f7f61e6c5e $ hg unshelve unshelving change 'foo' $ hg bookmarks * foo 5:703117a2acfb test 4:33f7f61e6c5e $ cd .. Shelve and unshelve unknown files. For the purposes of unshelve, a shelved unknown file is the same as a shelved added file, except that it will be in unknown state after unshelve if and only if it was either absent or unknown before the unshelve operation. $ hg init unknowns $ cd unknowns The simplest case is if I simply have an unknown file that I shelve and unshelve $ echo unknown > unknown $ hg status ? unknown $ hg shelve --unknown shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg status $ hg unshelve unshelving change 'default' $ hg status ? unknown $ rm unknown If I shelve, add the file, and unshelve, does it stay added? $ echo unknown > unknown $ hg shelve -u shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg status $ touch unknown $ hg add unknown $ hg status A unknown $ hg unshelve unshelving change 'default' temporarily committing pending changes (restore with 'hg unshelve --abort') rebasing shelved changes rebasing 1:098df96e7410 "(changes in empty repository)" (tip) merging unknown $ hg status A unknown $ hg forget unknown $ rm unknown And if I shelve, commit, then unshelve, does it become modified? $ echo unknown > unknown $ hg shelve -u shelved as default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg status $ touch unknown $ hg add unknown $ hg commit -qm "Add unknown" $ hg status $ hg unshelve unshelving change 'default' rebasing shelved changes rebasing 1:098df96e7410 "(changes in empty repository)" (tip) merging unknown $ hg status M unknown $ hg remove --force unknown $ hg commit -qm "Remove unknown" $ cd .. mercurial-3.7.3/tests/test-586.t0000644000175000017500000000361512676531525015762 0ustar mpmmpm00000000000000Issue586: removing remote files after merge appears to corrupt the dirstate $ hg init a $ cd a $ echo a > a $ hg ci -Ama adding a $ hg init ../b $ cd ../b $ echo b > b $ hg ci -Amb adding b $ hg pull -f ../a pulling from ../a searching for changes warning: repository is unrelated requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg rm -f a $ hg ci -Amc $ hg st -A C b $ cd .. Issue1433: Traceback after two unrelated pull, two move, a merge and a commit (related to issue586) create test repos $ hg init repoa $ touch repoa/a $ hg -R repoa ci -Am adda adding a $ hg init repob $ touch repob/b $ hg -R repob ci -Am addb adding b $ hg init repoc $ cd repoc $ hg pull ../repoa pulling from ../repoa requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) $ hg update 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ mkdir tst $ hg mv * tst $ hg ci -m "import a in tst" $ hg pull -f ../repob pulling from ../repob searching for changes warning: repository is unrelated requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) merge both repos $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ mkdir src move b content $ hg mv b src $ hg ci -m "import b in src" $ hg manifest src/b tst/a $ cd .. mercurial-3.7.3/tests/test-eol.t0000644000175000017500000002750512676531525016223 0ustar mpmmpm00000000000000Test EOL extension $ cat >> $HGRCPATH < [diff] > git = True > EOF Set up helpers $ cat > switch-eol.py < import sys > try: > import os, msvcrt > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY) > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) > except ImportError: > pass > (old, new) = sys.argv[1] == 'LF' and ('\n', '\r\n') or ('\r\n', '\n') > print "%% switching encoding from %r to %r" % (old, new) > for path in sys.argv[2:]: > data = file(path, 'rb').read() > data = data.replace(old, new) > file(path, 'wb').write(data) > EOF $ seteol () { > if [ $1 = "LF" ]; then > EOL='\n' > else > EOL='\r\n' > fi > } $ makerepo () { > seteol $1 > echo "% setup $1 repository" > hg init repo > cd repo > cat > .hgeol < [repository] > native = $1 > [patterns] > mixed.txt = BIN > **.txt = native > EOF > printf "first${EOL}second${EOL}third${EOL}" > a.txt > hg commit --addremove -m 'checkin' > echo > cd .. > } $ dotest () { > seteol $1 > echo "% hg clone repo repo-$1" > hg clone --noupdate repo repo-$1 > cd repo-$1 > cat > .hg/hgrc < [extensions] > eol = > [eol] > native = $1 > EOF > hg update > echo '% a.txt' > cat a.txt > echo '% hg cat a.txt' > hg cat a.txt > printf "fourth${EOL}" >> a.txt > echo '% a.txt' > cat a.txt > hg diff > python ../switch-eol.py $1 a.txt > echo '% hg diff only reports a single changed line:' > hg diff > echo "% reverting back to $1 format" > hg revert a.txt > cat a.txt > printf "first\r\nsecond\n" > mixed.txt > hg add mixed.txt > echo "% hg commit of inconsistent .txt file marked as binary (should work)" > hg commit -m 'binary file' > echo "% hg commit of inconsistent .txt file marked as native (should fail)" > printf "first\nsecond\r\nthird\nfourth\r\n" > a.txt > hg commit -m 'inconsistent file' > echo "% hg commit --config eol.only-consistent=False (should work)" > hg commit --config eol.only-consistent=False -m 'inconsistent file' > echo "% hg commit of binary .txt file marked as native (binary files always okay)" > printf "first${EOL}\0${EOL}third${EOL}" > a.txt > hg commit -m 'binary file' > cd .. > rm -r repo-$1 > } $ makemixedrepo () { > echo > echo "# setup $1 repository" > hg init mixed > cd mixed > printf "foo\r\nbar\r\nbaz\r\n" > win.txt > printf "foo\nbar\nbaz\n" > unix.txt > #printf "foo\r\nbar\nbaz\r\n" > mixed.txt > hg commit --addremove -m 'created mixed files' > echo "# setting repository-native EOLs to $1" > cat > .hgeol < [repository] > native = $1 > [patterns] > **.txt = native > EOF > hg commit --addremove -m 'added .hgeol' > cd .. > } $ testmixed () { > echo > echo "% hg clone mixed mixed-$1" > hg clone mixed mixed-$1 > cd mixed-$1 > echo '% hg status (eol extension not yet activated)' > hg status > cat > .hg/hgrc < [extensions] > eol = > [eol] > native = $1 > EOF > echo '% hg status (eol activated)' > hg status > echo '% hg commit' > hg commit -m 'synchronized EOLs' > echo '% hg status' > hg status > cd .. > rm -r mixed-$1 > } Basic tests $ makerepo LF % setup LF repository adding .hgeol adding a.txt $ dotest LF % hg clone repo repo-LF 2 files updated, 0 files merged, 0 files removed, 0 files unresolved % a.txt first second third % hg cat a.txt first second third % a.txt first second third fourth diff --git a/a.txt b/a.txt --- a/a.txt +++ b/a.txt @@ -1,3 +1,4 @@ first second third +fourth % switching encoding from '\n' to '\r\n' % hg diff only reports a single changed line: diff --git a/a.txt b/a.txt --- a/a.txt +++ b/a.txt @@ -1,3 +1,4 @@ first second third +fourth % reverting back to LF format first second third % hg commit of inconsistent .txt file marked as binary (should work) % hg commit of inconsistent .txt file marked as native (should fail) abort: inconsistent newline style in a.txt % hg commit --config eol.only-consistent=False (should work) % hg commit of binary .txt file marked as native (binary files always okay) $ dotest CRLF % hg clone repo repo-CRLF 2 files updated, 0 files merged, 0 files removed, 0 files unresolved % a.txt first\r (esc) second\r (esc) third\r (esc) % hg cat a.txt first second third % a.txt first\r (esc) second\r (esc) third\r (esc) fourth\r (esc) diff --git a/a.txt b/a.txt --- a/a.txt +++ b/a.txt @@ -1,3 +1,4 @@ first second third +fourth % switching encoding from '\r\n' to '\n' % hg diff only reports a single changed line: diff --git a/a.txt b/a.txt --- a/a.txt +++ b/a.txt @@ -1,3 +1,4 @@ first second third +fourth % reverting back to CRLF format first\r (esc) second\r (esc) third\r (esc) % hg commit of inconsistent .txt file marked as binary (should work) % hg commit of inconsistent .txt file marked as native (should fail) abort: inconsistent newline style in a.txt % hg commit --config eol.only-consistent=False (should work) % hg commit of binary .txt file marked as native (binary files always okay) $ rm -r repo $ makerepo CRLF % setup CRLF repository adding .hgeol adding a.txt $ dotest LF % hg clone repo repo-LF 2 files updated, 0 files merged, 0 files removed, 0 files unresolved % a.txt first second third % hg cat a.txt first\r (esc) second\r (esc) third\r (esc) % a.txt first second third fourth diff --git a/a.txt b/a.txt --- a/a.txt +++ b/a.txt @@ -1,3 +1,4 @@ first\r (esc) second\r (esc) third\r (esc) +fourth\r (esc) % switching encoding from '\n' to '\r\n' % hg diff only reports a single changed line: diff --git a/a.txt b/a.txt --- a/a.txt +++ b/a.txt @@ -1,3 +1,4 @@ first\r (esc) second\r (esc) third\r (esc) +fourth\r (esc) % reverting back to LF format first second third % hg commit of inconsistent .txt file marked as binary (should work) % hg commit of inconsistent .txt file marked as native (should fail) abort: inconsistent newline style in a.txt % hg commit --config eol.only-consistent=False (should work) % hg commit of binary .txt file marked as native (binary files always okay) $ dotest CRLF % hg clone repo repo-CRLF 2 files updated, 0 files merged, 0 files removed, 0 files unresolved % a.txt first\r (esc) second\r (esc) third\r (esc) % hg cat a.txt first\r (esc) second\r (esc) third\r (esc) % a.txt first\r (esc) second\r (esc) third\r (esc) fourth\r (esc) diff --git a/a.txt b/a.txt --- a/a.txt +++ b/a.txt @@ -1,3 +1,4 @@ first\r (esc) second\r (esc) third\r (esc) +fourth\r (esc) % switching encoding from '\r\n' to '\n' % hg diff only reports a single changed line: diff --git a/a.txt b/a.txt --- a/a.txt +++ b/a.txt @@ -1,3 +1,4 @@ first\r (esc) second\r (esc) third\r (esc) +fourth\r (esc) % reverting back to CRLF format first\r (esc) second\r (esc) third\r (esc) % hg commit of inconsistent .txt file marked as binary (should work) % hg commit of inconsistent .txt file marked as native (should fail) abort: inconsistent newline style in a.txt % hg commit --config eol.only-consistent=False (should work) % hg commit of binary .txt file marked as native (binary files always okay) $ rm -r repo Mixed tests $ makemixedrepo LF # setup LF repository adding unix.txt adding win.txt # setting repository-native EOLs to LF adding .hgeol $ testmixed LF % hg clone mixed mixed-LF updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved % hg status (eol extension not yet activated) % hg status (eol activated) M win.txt % hg commit % hg status $ testmixed CRLF % hg clone mixed mixed-CRLF updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved % hg status (eol extension not yet activated) % hg status (eol activated) M win.txt % hg commit % hg status $ rm -r mixed $ makemixedrepo CRLF # setup CRLF repository adding unix.txt adding win.txt # setting repository-native EOLs to CRLF adding .hgeol $ testmixed LF % hg clone mixed mixed-LF updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved % hg status (eol extension not yet activated) % hg status (eol activated) M unix.txt % hg commit % hg status $ testmixed CRLF % hg clone mixed mixed-CRLF updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved % hg status (eol extension not yet activated) % hg status (eol activated) M unix.txt % hg commit % hg status $ rm -r mixed $ echo '[extensions]' >> $HGRCPATH $ echo 'eol =' >> $HGRCPATH #if unix-permissions Test issue2569 -- eol extension takes write lock on reading: $ hg init repo $ cd repo $ touch .hgeol $ hg status ? .hgeol $ chmod -R -w .hg $ sleep 1 $ touch .hgeol $ hg status --traceback ? .hgeol $ chmod -R u+w .hg $ cd .. #endif Test cleverencode: and cleverdecode: aliases for win32text extension $ cat <> $HGRCPATH > [encode] > **.txt = cleverencode: > [decode] > **.txt = cleverdecode: > EOF $ hg init win32compat $ cd win32compat $ printf "foo\r\nbar\r\nbaz\r\n" > win.txt $ printf "foo\nbar\nbaz\n" > unix.txt $ hg add adding unix.txt adding win.txt $ hg commit -m checkin Check that both files have LF line-endings in the repository: $ hg cat win.txt foo bar baz $ hg cat unix.txt foo bar baz Test handling of a broken .hgeol file: $ touch .hgeol $ hg add .hgeol $ hg commit -m 'clean version' $ echo "bad" > .hgeol $ hg status warning: ignoring .hgeol file due to parse error at .hgeol:1: bad M .hgeol $ hg revert .hgeol warning: ignoring .hgeol file due to parse error at .hgeol:1: bad $ hg status ? .hgeol.orig Test eol.only-consistent can be specified in .hgeol $ cd $TESTTMP $ hg init only-consistent $ cd only-consistent $ printf "first\nsecond\r\n" > a.txt $ hg add a.txt $ cat > .hgeol << EOF > [eol] > only-consistent = True > EOF $ hg commit -m 'inconsistent' abort: inconsistent newline style in a.txt [255] $ cat > .hgeol << EOF > [eol] > only-consistent = False > EOF $ hg commit -m 'consistent' Test trailing newline $ cat >> $HGRCPATH < [extensions] > eol= > EOF setup repository $ cd $TESTTMP $ hg init trailing $ cd trailing $ cat > .hgeol < [patterns] > **.txt = native > [eol] > fix-trailing-newline = False > EOF add text without trailing newline $ printf "first\nsecond" > a.txt $ hg commit --addremove -m 'checking in' adding .hgeol adding a.txt $ rm a.txt $ hg update -C -q $ cat a.txt first second (no-eol) $ cat > .hgeol < [patterns] > **.txt = native > [eol] > fix-trailing-newline = True > EOF $ printf "third\nfourth" > a.txt $ hg commit -m 'checking in with newline fix' $ rm a.txt $ hg update -C -q $ cat a.txt third fourth append a line without trailing newline $ printf "fifth" >> a.txt $ hg commit -m 'adding another line line' $ rm a.txt $ hg update -C -q $ cat a.txt third fourth fifth amend of changesets with renamed/deleted files expose new code paths $ hg mv a.txt b.txt $ hg ci --amend -q $ hg diff -c. diff --git a/a.txt b/b.txt rename from a.txt rename to b.txt --- a/a.txt +++ b/b.txt @@ -1,2 +1,3 @@ third fourth +fifth $ cd .. mercurial-3.7.3/tests/test-diffstat.t0000644000175000017500000000434312676531525017243 0ustar mpmmpm00000000000000 $ hg init repo $ cd repo $ i=0; while [ "$i" -lt 213 ]; do echo a >> a; i=`expr $i + 1`; done $ hg add a $ cp a b $ hg add b Wide diffstat: $ hg diff --stat a | 213 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ b | 213 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 426 insertions(+), 0 deletions(-) diffstat width: $ COLUMNS=24 hg diff --config ui.interactive=true --stat a | 213 ++++++++++++++ b | 213 ++++++++++++++ 2 files changed, 426 insertions(+), 0 deletions(-) $ hg ci -m adda $ cat >> a < a > a > a > EOF Narrow diffstat: $ hg diff --stat a | 3 +++ 1 files changed, 3 insertions(+), 0 deletions(-) $ hg ci -m appenda >>> open("c", "wb").write("\0") $ touch d $ hg add c d Binary diffstat: $ hg diff --stat c | Bin 1 files changed, 0 insertions(+), 0 deletions(-) Binary git diffstat: $ hg diff --stat --git c | Bin d | 0 2 files changed, 0 insertions(+), 0 deletions(-) $ hg ci -m createb >>> open("file with spaces", "wb").write("\0") $ hg add "file with spaces" Filename with spaces diffstat: $ hg diff --stat file with spaces | Bin 1 files changed, 0 insertions(+), 0 deletions(-) Filename with spaces git diffstat: $ hg diff --stat --git file with spaces | Bin 1 files changed, 0 insertions(+), 0 deletions(-) diffstat within directories: $ hg rm -f 'file with spaces' $ mkdir dir1 dir2 $ echo new1 > dir1/new $ echo new2 > dir2/new $ hg add dir1/new dir2/new $ hg diff --stat dir1/new | 1 + dir2/new | 1 + 2 files changed, 2 insertions(+), 0 deletions(-) $ hg diff --stat --root dir1 new | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) $ hg diff --stat --root dir1 dir2 warning: dir2 not inside relative root dir1 $ hg diff --stat --root dir1 -I dir1/old $ cd dir1 $ hg diff --stat . dir1/new | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) $ hg diff --stat --root . new | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) $ hg diff --stat --root ../dir1 ../dir2 warning: ../dir2 not inside relative root . (glob) $ hg diff --stat --root . -I old $ cd .. mercurial-3.7.3/tests/test-excessive-merge.t0000644000175000017500000000577012676531525020537 0ustar mpmmpm00000000000000 $ hg init $ echo foo > a $ echo foo > b $ hg add a b $ hg ci -m "test" $ echo blah > a $ hg ci -m "branch a" $ hg co 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo blah > b $ hg ci -m "branch b" created new head $ HGMERGE=true hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m "merge b/a -> blah" $ hg co 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ HGMERGE=true hg merge 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m "merge a/b -> blah" created new head $ hg log changeset: 4:2ee31f665a86 tag: tip parent: 1:96155394af80 parent: 2:92cc4c306b19 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge a/b -> blah changeset: 3:e16a66a37edd parent: 2:92cc4c306b19 parent: 1:96155394af80 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge b/a -> blah changeset: 2:92cc4c306b19 parent: 0:5e0375449e74 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: branch b changeset: 1:96155394af80 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: branch a changeset: 0:5e0375449e74 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: test $ hg debugindex --changelog rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 60 ..... 0 5e0375449e74 000000000000 000000000000 (re) 1 60 62 ..... 1 96155394af80 5e0375449e74 000000000000 (re) 2 122 62 ..... 2 92cc4c306b19 5e0375449e74 000000000000 (re) 3 184 69 ..... 3 e16a66a37edd 92cc4c306b19 96155394af80 (re) 4 253 29 ..... 4 2ee31f665a86 96155394af80 92cc4c306b19 (re) revision 1 $ hg manifest --debug 1 79d7492df40aa0fa093ec4209be78043c181f094 644 a 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 b revision 2 $ hg manifest --debug 2 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 a 79d7492df40aa0fa093ec4209be78043c181f094 644 b revision 3 $ hg manifest --debug 3 79d7492df40aa0fa093ec4209be78043c181f094 644 a 79d7492df40aa0fa093ec4209be78043c181f094 644 b revision 4 $ hg manifest --debug 4 79d7492df40aa0fa093ec4209be78043c181f094 644 a 79d7492df40aa0fa093ec4209be78043c181f094 644 b $ hg debugindex a rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 5 ..... 0 2ed2a3912a0b 000000000000 000000000000 (re) 1 5 6 ..... 1 79d7492df40a 2ed2a3912a0b 000000000000 (re) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 5 changesets, 4 total revisions mercurial-3.7.3/tests/test-help.t0000644000175000017500000027457112676531525016403 0ustar mpmmpm00000000000000Short help: $ hg Mercurial Distributed SCM basic commands: add add the specified files on the next commit annotate show changeset information by line for each file clone make a copy of an existing repository commit commit the specified files or all outstanding changes diff diff repository (or selected files) export dump the header and diffs for one or more changesets forget forget the specified files on the next commit init create a new repository in the given directory log show revision history of entire repository or files merge merge another revision into working directory pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state update update working directory (or switch revisions) (use "hg help" for the full list of commands or "hg -v" for details) $ hg -q add add the specified files on the next commit annotate show changeset information by line for each file clone make a copy of an existing repository commit commit the specified files or all outstanding changes diff diff repository (or selected files) export dump the header and diffs for one or more changesets forget forget the specified files on the next commit init create a new repository in the given directory log show revision history of entire repository or files merge merge another revision into working directory pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state update update working directory (or switch revisions) $ hg help Mercurial Distributed SCM list of commands: add add the specified files on the next commit addremove add all new files, delete all missing files annotate show changeset information by line for each file archive create an unversioned archive of a repository revision backout reverse effect of earlier changeset bisect subdivision search of changesets bookmarks create a new bookmark or list existing bookmarks branch set or show the current branch name branches list repository named branches bundle create a changegroup file cat output the current or given revision of files clone make a copy of an existing repository commit commit the specified files or all outstanding changes config show combined config settings from all hgrc files copy mark files as copied for the next commit diff diff repository (or selected files) export dump the header and diffs for one or more changesets files list tracked files forget forget the specified files on the next commit graft copy changes from other branches onto the current branch grep search for a pattern in specified files and revisions heads show branch heads help show help for a given topic or a help overview identify identify the working directory or specified revision import import an ordered set of patches incoming show new changesets found in source init create a new repository in the given directory log show revision history of entire repository or files manifest output the current or given revision of the project manifest merge merge another revision into working directory outgoing show changesets not found in the destination paths show aliases for remote repositories phase set or show the current phase name pull pull changes from the specified source push push changes to the specified destination recover roll back an interrupted transaction remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files revert restore files to their checkout state root print the root (top) of the current working directory serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state tag add one or more tags for the current or given revision tags list repository tags unbundle apply one or more changegroup files update update working directory (or switch revisions) verify verify the integrity of the repository version output version and copyright information additional help topics: config Configuration Files dates Date Formats diffs Diff Formats environment Environment Variables extensions Using Additional Features filesets Specifying File Sets glossary Glossary hgignore Syntax for Mercurial Ignore Files hgweb Configuring hgweb internals Technical implementation topics merge-tools Merge Tools multirevs Specifying Multiple Revisions patterns File Name Patterns phases Working with Phases revisions Specifying Single Revisions revsets Specifying Revision Sets scripting Using Mercurial from scripts and automation subrepos Subrepositories templating Template Usage urls URL Paths (use "hg help -v" to show built-in aliases and global options) $ hg -q help add add the specified files on the next commit addremove add all new files, delete all missing files annotate show changeset information by line for each file archive create an unversioned archive of a repository revision backout reverse effect of earlier changeset bisect subdivision search of changesets bookmarks create a new bookmark or list existing bookmarks branch set or show the current branch name branches list repository named branches bundle create a changegroup file cat output the current or given revision of files clone make a copy of an existing repository commit commit the specified files or all outstanding changes config show combined config settings from all hgrc files copy mark files as copied for the next commit diff diff repository (or selected files) export dump the header and diffs for one or more changesets files list tracked files forget forget the specified files on the next commit graft copy changes from other branches onto the current branch grep search for a pattern in specified files and revisions heads show branch heads help show help for a given topic or a help overview identify identify the working directory or specified revision import import an ordered set of patches incoming show new changesets found in source init create a new repository in the given directory log show revision history of entire repository or files manifest output the current or given revision of the project manifest merge merge another revision into working directory outgoing show changesets not found in the destination paths show aliases for remote repositories phase set or show the current phase name pull pull changes from the specified source push push changes to the specified destination recover roll back an interrupted transaction remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files revert restore files to their checkout state root print the root (top) of the current working directory serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state tag add one or more tags for the current or given revision tags list repository tags unbundle apply one or more changegroup files update update working directory (or switch revisions) verify verify the integrity of the repository version output version and copyright information additional help topics: config Configuration Files dates Date Formats diffs Diff Formats environment Environment Variables extensions Using Additional Features filesets Specifying File Sets glossary Glossary hgignore Syntax for Mercurial Ignore Files hgweb Configuring hgweb internals Technical implementation topics merge-tools Merge Tools multirevs Specifying Multiple Revisions patterns File Name Patterns phases Working with Phases revisions Specifying Single Revisions revsets Specifying Revision Sets scripting Using Mercurial from scripts and automation subrepos Subrepositories templating Template Usage urls URL Paths Test extension help: $ hg help extensions --config extensions.rebase= --config extensions.children= Using Additional Features """"""""""""""""""""""""" Mercurial has the ability to add new features through the use of extensions. Extensions may add new commands, add options to existing commands, change the default behavior of commands, or implement hooks. To enable the "foo" extension, either shipped with Mercurial or in the Python search path, create an entry for it in your configuration file, like this: [extensions] foo = You may also specify the full path to an extension: [extensions] myfeature = ~/.hgext/myfeature.py See 'hg help config' for more information on configuration files. Extensions are not loaded by default for a variety of reasons: they can increase startup overhead; they may be meant for advanced usage only; they may provide potentially dangerous abilities (such as letting you destroy or modify history); they might not be ready for prime time; or they may alter some usual behaviors of stock Mercurial. It is thus up to the user to activate extensions as needed. To explicitly disable an extension enabled in a configuration file of broader scope, prepend its path with !: [extensions] # disabling extension bar residing in /path/to/extension/bar.py bar = !/path/to/extension/bar.py # ditto, but no path was supplied for extension baz baz = ! enabled extensions: children command to display child changesets (DEPRECATED) rebase command to move sets of revisions to a different ancestor disabled extensions: acl hooks for controlling repository access blackbox log repository events to a blackbox for debugging bugzilla hooks for integrating with the Bugzilla bug tracker censor erase file content at a given revision churn command to display statistics about repository history clonebundles advertise pre-generated bundles to seed clones color colorize output from some commands convert import revisions from foreign VCS repositories into Mercurial eol automatically manage newlines in repository files extdiff command to allow external programs to compare revisions factotum http authentication with factotum gpg commands to sign and verify changesets hgcia hooks for integrating with the CIA.vc notification service hgk browse the repository in a graphical way highlight syntax highlighting for hgweb (requires Pygments) histedit interactive history editing keyword expand keywords in tracked files largefiles track large binary files mq manage a stack of patches notify hooks for sending email push notifications pager browse command output with an external pager patchbomb command to send changesets as (a series of) patch emails purge command to delete untracked files from the working directory record commands to interactively select changes for commit/qrefresh relink recreates hardlinks between repository clones schemes extend schemes with shortcuts to repository swarms share share a common history between several working directories shelve save and restore changes to the working directory strip strip changesets and their descendants from history transplant command to transplant changesets from another branch win32mbcs allow the use of MBCS paths with problematic encodings zeroconf discover and advertise repositories on the local network Verify that extension keywords appear in help templates $ hg help --config extensions.transplant= templating|grep transplant > /dev/null Test short command list with verbose option $ hg -v help shortlist Mercurial Distributed SCM basic commands: add add the specified files on the next commit annotate, blame show changeset information by line for each file clone make a copy of an existing repository commit, ci commit the specified files or all outstanding changes diff diff repository (or selected files) export dump the header and diffs for one or more changesets forget forget the specified files on the next commit init create a new repository in the given directory log, history show revision history of entire repository or files merge merge another revision into working directory pull pull changes from the specified source push push changes to the specified destination remove, rm remove the specified files on the next commit serve start stand-alone webserver status, st show changed files in the working directory summary, sum summarize working directory state update, up, checkout, co update working directory (or switch revisions) global options ([+] can be repeated): -R --repository REPO repository root directory or name of overlay bundle file --cwd DIR change working directory -y --noninteractive do not prompt, automatically pick the first choice for all prompts -q --quiet suppress output -v --verbose enable additional output --config CONFIG [+] set/override config option (use 'section.name=value') --debug enable debugging output --debugger start debugger --encoding ENCODE set the charset encoding (default: ascii) --encodingmode MODE set the charset encoding mode (default: strict) --traceback always print a traceback on exception --time time how long the command takes --profile print command execution profile --version output version information and exit -h --help display help and exit --hidden consider hidden changesets (use "hg help" for the full list of commands) $ hg add -h hg add [OPTION]... [FILE]... add the specified files on the next commit Schedule files to be version controlled and added to the repository. The files will be added to the repository at the next commit. To undo an add before that, see 'hg forget'. If no names are given, add all files to the repository (except files matching ".hgignore"). Returns 0 if all files are successfully added. options ([+] can be repeated): -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns -S --subrepos recurse into subrepositories -n --dry-run do not perform actions, just print output (some details hidden, use --verbose to show complete help) Verbose help for add $ hg add -hv hg add [OPTION]... [FILE]... add the specified files on the next commit Schedule files to be version controlled and added to the repository. The files will be added to the repository at the next commit. To undo an add before that, see 'hg forget'. If no names are given, add all files to the repository (except files matching ".hgignore"). Examples: - New (unknown) files are added automatically by 'hg add': $ ls foo.c $ hg status ? foo.c $ hg add adding foo.c $ hg status A foo.c - Specific files to be added can be specified: $ ls bar.c foo.c $ hg status ? bar.c ? foo.c $ hg add bar.c $ hg status A bar.c ? foo.c Returns 0 if all files are successfully added. options ([+] can be repeated): -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns -S --subrepos recurse into subrepositories -n --dry-run do not perform actions, just print output global options ([+] can be repeated): -R --repository REPO repository root directory or name of overlay bundle file --cwd DIR change working directory -y --noninteractive do not prompt, automatically pick the first choice for all prompts -q --quiet suppress output -v --verbose enable additional output --config CONFIG [+] set/override config option (use 'section.name=value') --debug enable debugging output --debugger start debugger --encoding ENCODE set the charset encoding (default: ascii) --encodingmode MODE set the charset encoding mode (default: strict) --traceback always print a traceback on exception --time time how long the command takes --profile print command execution profile --version output version information and exit -h --help display help and exit --hidden consider hidden changesets Test help option with version option $ hg add -h --version Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) Copyright (C) 2005-2016 Matt Mackall and others This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. $ hg add --skjdfks hg add: option --skjdfks not recognized hg add [OPTION]... [FILE]... add the specified files on the next commit options ([+] can be repeated): -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns -S --subrepos recurse into subrepositories -n --dry-run do not perform actions, just print output (use "hg add -h" to show more help) [255] Test ambiguous command help $ hg help ad list of commands: add add the specified files on the next commit addremove add all new files, delete all missing files (use "hg help -v ad" to show built-in aliases and global options) Test command without options $ hg help verify hg verify verify the integrity of the repository Verify the integrity of the current repository. This will perform an extensive check of the repository's integrity, validating the hashes and checksums of each entry in the changelog, manifest, and tracked files, as well as the integrity of their crosslinks and indices. Please see https://mercurial-scm.org/wiki/RepositoryCorruption for more information about recovery from corruption of the repository. Returns 0 on success, 1 if errors are encountered. (some details hidden, use --verbose to show complete help) $ hg help diff hg diff [OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]... diff repository (or selected files) Show differences between revisions for the specified files. Differences between files are shown using the unified diff format. Note: 'hg diff' may generate unexpected results for merges, as it will default to comparing against the working directory's first parent changeset if no revisions are specified. When two revision arguments are given, then changes are shown between those revisions. If only one revision is specified then that revision is compared to the working directory, and, when no revisions are specified, the working directory files are compared to its first parent. Alternatively you can specify -c/--change with a revision to see the changes in that changeset relative to its first parent. Without the -a/--text option, diff will avoid generating diffs of files it detects as binary. With -a, diff will generate a diff anyway, probably with undesirable results. Use the -g/--git option to generate diffs in the git extended diff format. For more information, read 'hg help diffs'. Returns 0 on success. options ([+] can be repeated): -r --rev REV [+] revision -c --change REV change made by revision -a --text treat all files as text -g --git use git extended diff format --nodates omit dates from diff headers --noprefix omit a/ and b/ prefixes from filenames -p --show-function show which function each change is in --reverse produce a diff that undoes the changes -w --ignore-all-space ignore white space when comparing lines -b --ignore-space-change ignore changes in the amount of white space -B --ignore-blank-lines ignore changes whose lines are all blank -U --unified NUM number of lines of context to show --stat output diffstat-style summary of changes --root DIR produce diffs relative to subdirectory -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns -S --subrepos recurse into subrepositories (some details hidden, use --verbose to show complete help) $ hg help status hg status [OPTION]... [FILE]... aliases: st show changed files in the working directory Show status of files in the repository. If names are given, only files that match are shown. Files that are clean or ignored or the source of a copy/move operation, are not listed unless -c/--clean, -i/--ignored, -C/--copies or -A/--all are given. Unless options described with "show only ..." are given, the options -mardu are used. Option -q/--quiet hides untracked (unknown and ignored) files unless explicitly requested with -u/--unknown or -i/--ignored. Note: 'hg status' may appear to disagree with diff if permissions have changed or a merge has occurred. The standard diff format does not report permission changes and diff only reports changes relative to one merge parent. If one revision is given, it is used as the base revision. If two revisions are given, the differences between them are shown. The --change option can also be used as a shortcut to list the changed files of a revision from its first parent. The codes used to show the status of files are: M = modified A = added R = removed C = clean ! = missing (deleted by non-hg command, but still tracked) ? = not tracked I = ignored = origin of the previous file (with --copies) Returns 0 on success. options ([+] can be repeated): -A --all show status of all files -m --modified show only modified files -a --added show only added files -r --removed show only removed files -d --deleted show only deleted (but tracked) files -c --clean show only files without changes -u --unknown show only unknown (not tracked) files -i --ignored show only ignored files -n --no-status hide status prefix -C --copies show source of copied files -0 --print0 end filenames with NUL, for use with xargs --rev REV [+] show difference from revision --change REV list the changed files of a revision -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns -S --subrepos recurse into subrepositories (some details hidden, use --verbose to show complete help) $ hg -q help status hg status [OPTION]... [FILE]... show changed files in the working directory $ hg help foo abort: no such help topic: foo (try "hg help --keyword foo") [255] $ hg skjdfks hg: unknown command 'skjdfks' Mercurial Distributed SCM basic commands: add add the specified files on the next commit annotate show changeset information by line for each file clone make a copy of an existing repository commit commit the specified files or all outstanding changes diff diff repository (or selected files) export dump the header and diffs for one or more changesets forget forget the specified files on the next commit init create a new repository in the given directory log show revision history of entire repository or files merge merge another revision into working directory pull pull changes from the specified source push push changes to the specified destination remove remove the specified files on the next commit serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state update update working directory (or switch revisions) (use "hg help" for the full list of commands or "hg -v" for details) [255] Make sure that we don't run afoul of the help system thinking that this is a section and erroring out weirdly. $ hg .log hg: unknown command '.log' (did you mean log?) [255] $ hg log. hg: unknown command 'log.' (did you mean log?) [255] $ hg pu.lh hg: unknown command 'pu.lh' (did you mean one of pull, push?) [255] $ cat > helpext.py < import os > from mercurial import cmdutil, commands > > cmdtable = {} > command = cmdutil.command(cmdtable) > > @command('nohelp', > [('', 'longdesc', 3, 'x'*90), > ('n', '', None, 'normal desc'), > ('', 'newline', '', 'line1\nline2')], > 'hg nohelp', > norepo=True) > @command('debugoptDEP', [('', 'dopt', None, 'option is (DEPRECATED)')]) > @command('debugoptEXP', [('', 'eopt', None, 'option is (EXPERIMENTAL)')]) > def nohelp(ui, *args, **kwargs): > pass > > EOF $ echo '[extensions]' >> $HGRCPATH $ echo "helpext = `pwd`/helpext.py" >> $HGRCPATH Test command with no help text $ hg help nohelp hg nohelp (no help text available) options: --longdesc VALUE xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx (default: 3) -n -- normal desc --newline VALUE line1 line2 (some details hidden, use --verbose to show complete help) $ hg help -k nohelp Commands: nohelp hg nohelp Extension Commands: nohelp (no help text available) Test that default list of commands omits extension commands $ hg help Mercurial Distributed SCM list of commands: add add the specified files on the next commit addremove add all new files, delete all missing files annotate show changeset information by line for each file archive create an unversioned archive of a repository revision backout reverse effect of earlier changeset bisect subdivision search of changesets bookmarks create a new bookmark or list existing bookmarks branch set or show the current branch name branches list repository named branches bundle create a changegroup file cat output the current or given revision of files clone make a copy of an existing repository commit commit the specified files or all outstanding changes config show combined config settings from all hgrc files copy mark files as copied for the next commit diff diff repository (or selected files) export dump the header and diffs for one or more changesets files list tracked files forget forget the specified files on the next commit graft copy changes from other branches onto the current branch grep search for a pattern in specified files and revisions heads show branch heads help show help for a given topic or a help overview identify identify the working directory or specified revision import import an ordered set of patches incoming show new changesets found in source init create a new repository in the given directory log show revision history of entire repository or files manifest output the current or given revision of the project manifest merge merge another revision into working directory outgoing show changesets not found in the destination paths show aliases for remote repositories phase set or show the current phase name pull pull changes from the specified source push push changes to the specified destination recover roll back an interrupted transaction remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files revert restore files to their checkout state root print the root (top) of the current working directory serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state tag add one or more tags for the current or given revision tags list repository tags unbundle apply one or more changegroup files update update working directory (or switch revisions) verify verify the integrity of the repository version output version and copyright information enabled extensions: helpext (no help text available) additional help topics: config Configuration Files dates Date Formats diffs Diff Formats environment Environment Variables extensions Using Additional Features filesets Specifying File Sets glossary Glossary hgignore Syntax for Mercurial Ignore Files hgweb Configuring hgweb internals Technical implementation topics merge-tools Merge Tools multirevs Specifying Multiple Revisions patterns File Name Patterns phases Working with Phases revisions Specifying Single Revisions revsets Specifying Revision Sets scripting Using Mercurial from scripts and automation subrepos Subrepositories templating Template Usage urls URL Paths (use "hg help -v" to show built-in aliases and global options) Test list of internal help commands $ hg help debug debug commands (internal and unsupported): debugancestor find the ancestor revision of two revisions in a given index debugapplystreamclonebundle apply a stream clone bundle file debugbuilddag builds a repo with a given DAG from scratch in the current empty repo debugbundle lists the contents of a bundle debugcheckstate validate the correctness of the current dirstate debugcommands list all available commands and options debugcomplete returns the completion list associated with the given command debugcreatestreamclonebundle create a stream clone bundle file debugdag format the changelog or an index DAG as a concise textual description debugdata dump the contents of a data file revision debugdate parse and display a date debugdeltachain dump information about delta chains in a revlog debugdirstate show the contents of the current dirstate debugdiscovery runs the changeset discovery protocol in isolation debugextensions show information about active extensions debugfileset parse and apply a fileset specification debugfsinfo show information detected about current filesystem debuggetbundle retrieves a bundle from a repo debugignore display the combined ignore pattern and information about ignored files debugindex dump the contents of an index file debugindexdot dump an index DAG as a graphviz dot file debuginstall test Mercurial installation debugknown test whether node ids are known to a repo debuglocks show or modify state of locks debugmergestate print merge state debugnamecomplete complete "names" - tags, open branch names, bookmark names debugobsolete create arbitrary obsolete marker debugoptDEP (no help text available) debugoptEXP (no help text available) debugpathcomplete complete part or all of a tracked path debugpushkey access the pushkey key/value protocol debugpvec (no help text available) debugrebuilddirstate rebuild the dirstate as it would look like for the given revision debugrebuildfncache rebuild the fncache file debugrename dump rename information debugrevlog show data and statistics about a revlog debugrevspec parse and apply a revision specification debugsetparents manually set the parents of the current working directory debugsub (no help text available) debugsuccessorssets show set of successors for revision debugwalk show how files match on given patterns debugwireargs (no help text available) (use "hg help -v debug" to show built-in aliases and global options) internals topic renders index of available sub-topics $ hg help internals Technical implementation topics """"""""""""""""""""""""""""""" bundles container for exchange of repository data changegroups representation of revlog data revlogs revision storage mechanism sub-topics can be accessed $ hg help internals.changegroups Changegroups ============ Changegroups are representations of repository revlog data, specifically the changelog, manifest, and filelogs. There are 3 versions of changegroups: "1", "2", and "3". From a high- level, versions "1" and "2" are almost exactly the same, with the only difference being a header on entries in the changeset segment. Version "3" adds support for exchanging treemanifests and includes revlog flags in the delta header. Changegroups consists of 3 logical segments: +---------------------------------+ | | | | | changeset | manifest | filelogs | | | | | +---------------------------------+ The principle building block of each segment is a *chunk*. A *chunk* is a framed piece of data: +---------------------------------------+ | | | | length | data | | (32 bits) | bytes | | | | +---------------------------------------+ Each chunk starts with a 32-bit big-endian signed integer indicating the length of the raw data that follows. There is a special case chunk that has 0 length ("0x00000000"). We call this an *empty chunk*. Delta Groups ------------ A *delta group* expresses the content of a revlog as a series of deltas, or patches against previous revisions. Delta groups consist of 0 or more *chunks* followed by the *empty chunk* to signal the end of the delta group: +------------------------------------------------------------------------+ | | | | | | | chunk0 length | chunk0 data | chunk1 length | chunk1 data | 0x0 | | (32 bits) | (various) | (32 bits) | (various) | (32 bits) | | | | | | | +------------------------------------------------------------+-----------+ Each *chunk*'s data consists of the following: +-----------------------------------------+ | | | | | delta header | mdiff header | delta | | (various) | (12 bytes) | (various) | | | | | +-----------------------------------------+ The *length* field is the byte length of the remaining 3 logical pieces of data. The *delta* is a diff from an existing entry in the changelog. The *delta header* is different between versions "1", "2", and "3" of the changegroup format. Version 1: +------------------------------------------------------+ | | | | | | node | p1 node | p2 node | link node | | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | | | | | | +------------------------------------------------------+ Version 2: +------------------------------------------------------------------+ | | | | | | | node | p1 node | p2 node | base node | link node | | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | | | | | | | +------------------------------------------------------------------+ Version 3: +------------------------------------------------------------------------------+ | | | | | | | | node | p1 node | p2 node | base node | link node | flags | | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | (20 bytes) | (2 bytes) | | | | | | | | +------------------------------------------------------------------------------+ The *mdiff header* consists of 3 32-bit big-endian signed integers describing offsets at which to apply the following delta content: +-------------------------------------+ | | | | | offset | old length | new length | | (32 bits) | (32 bits) | (32 bits) | | | | | +-------------------------------------+ In version 1, the delta is always applied against the previous node from the changegroup or the first parent if this is the first entry in the changegroup. In version 2, the delta base node is encoded in the entry in the changegroup. This allows the delta to be expressed against any parent, which can result in smaller deltas and more efficient encoding of data. Changeset Segment ----------------- The *changeset segment* consists of a single *delta group* holding changelog data. It is followed by an *empty chunk* to denote the boundary to the *manifests segment*. Manifest Segment ---------------- The *manifest segment* consists of a single *delta group* holding manifest data. It is followed by an *empty chunk* to denote the boundary to the *filelogs segment*. Filelogs Segment ---------------- The *filelogs* segment consists of multiple sub-segments, each corresponding to an individual file whose data is being described: +--------------------------------------+ | | | | | | filelog0 | filelog1 | filelog2 | ... | | | | | | +--------------------------------------+ In version "3" of the changegroup format, filelogs may include directory logs when treemanifests are in use. directory logs are identified by having a trailing '/' on their filename (see below). The final filelog sub-segment is followed by an *empty chunk* to denote the end of the segment and the overall changegroup. Each filelog sub-segment consists of the following: +------------------------------------------+ | | | | | filename size | filename | delta group | | (32 bits) | (various) | (various) | | | | | +------------------------------------------+ That is, a *chunk* consisting of the filename (not terminated or padded) followed by N chunks constituting the *delta group* for this file. Test list of commands with command with no help text $ hg help helpext helpext extension - no help text available list of commands: nohelp (no help text available) (use "hg help -v helpext" to show built-in aliases and global options) test deprecated and experimental options are hidden in command help $ hg help debugoptDEP hg debugoptDEP (no help text available) options: (some details hidden, use --verbose to show complete help) $ hg help debugoptEXP hg debugoptEXP (no help text available) options: (some details hidden, use --verbose to show complete help) test deprecated and experimental options is shown with -v $ hg help -v debugoptDEP | grep dopt --dopt option is (DEPRECATED) $ hg help -v debugoptEXP | grep eopt --eopt option is (EXPERIMENTAL) #if gettext test deprecated option is hidden with translation with untranslated description (use many globy for not failing on changed transaction) $ LANGUAGE=sv hg help debugoptDEP hg debugoptDEP (*) (glob) options: (some details hidden, use --verbose to show complete help) #endif Test commands that collide with topics (issue4240) $ hg config -hq hg config [-u] [NAME]... show combined config settings from all hgrc files $ hg showconfig -hq hg config [-u] [NAME]... show combined config settings from all hgrc files Test a help topic $ hg help revs Specifying Single Revisions """"""""""""""""""""""""""" Mercurial supports several ways to specify individual revisions. A plain integer is treated as a revision number. Negative integers are treated as sequential offsets from the tip, with -1 denoting the tip, -2 denoting the revision prior to the tip, and so forth. A 40-digit hexadecimal string is treated as a unique revision identifier. A hexadecimal string less than 40 characters long is treated as a unique revision identifier and is referred to as a short-form identifier. A short-form identifier is only valid if it is the prefix of exactly one full-length identifier. Any other string is treated as a bookmark, tag, or branch name. A bookmark is a movable pointer to a revision. A tag is a permanent name associated with a revision. A branch name denotes the tipmost open branch head of that branch - or if they are all closed, the tipmost closed head of the branch. Bookmark, tag, and branch names must not contain the ":" character. The reserved name "tip" always identifies the most recent revision. The reserved name "null" indicates the null revision. This is the revision of an empty repository, and the parent of revision 0. The reserved name "." indicates the working directory parent. If no working directory is checked out, it is equivalent to null. If an uncommitted merge is in progress, "." is the revision of the first parent. Test repeated config section name $ hg help config.host "http_proxy.host" Host name and (optional) port of the proxy server, for example "myproxy:8000". "smtp.host" Host name of mail server, e.g. "mail.example.com". Unrelated trailing paragraphs shouldn't be included $ hg help config.extramsg | grep '^$' Test capitalized section name $ hg help scripting.HGPLAIN > /dev/null Help subsection: $ hg help config.charsets |grep "Email example:" > /dev/null [1] Show nested definitions ("profiling.type"[break]"ls"[break]"stat"[break]) $ hg help config.type | egrep '^$'|wc -l \s*3 (re) Separate sections from subsections $ hg help config.format | egrep '^ ("|-)|^\s*$' | uniq "format" -------- "usegeneraldelta" "dotencode" "usefncache" "usestore" "profiling" ----------- "format" "progress" ---------- "format" Last item in help config.*: $ hg help config.`hg help config|grep '^ "'| \ > tail -1|sed 's![ "]*!!g'`| \ > grep "hg help -c config" > /dev/null [1] note to use help -c for general hg help config: $ hg help config |grep "hg help -c config" > /dev/null Test templating help $ hg help templating | egrep '(desc|diffstat|firstline|nonempty) ' desc String. The text of the changeset description. diffstat String. Statistics of changes with the following format: firstline Any text. Returns the first line of text. nonempty Any text. Returns '(none)' if the string is empty. Test deprecated items $ hg help -v templating | grep currentbookmark currentbookmark $ hg help templating | (grep currentbookmark || true) Test help hooks $ cat > helphook1.py < from mercurial import help > > def rewrite(ui, topic, doc): > return doc + '\nhelphook1\n' > > def extsetup(ui): > help.addtopichook('revsets', rewrite) > EOF $ cat > helphook2.py < from mercurial import help > > def rewrite(ui, topic, doc): > return doc + '\nhelphook2\n' > > def extsetup(ui): > help.addtopichook('revsets', rewrite) > EOF $ echo '[extensions]' >> $HGRCPATH $ echo "helphook1 = `pwd`/helphook1.py" >> $HGRCPATH $ echo "helphook2 = `pwd`/helphook2.py" >> $HGRCPATH $ hg help revsets | grep helphook helphook1 helphook2 help -c should only show debug --debug $ hg help -c --debug|egrep debug|wc -l|egrep '^\s*0\s*$' [1] help -c should only show deprecated for -v $ hg help -c -v|egrep DEPRECATED|wc -l|egrep '^\s*0\s*$' [1] Test -s / --system $ hg help config.files -s windows |grep 'etc/mercurial' | \ > wc -l | sed -e 's/ //g' 0 $ hg help config.files --system unix | grep 'USER' | \ > wc -l | sed -e 's/ //g' 0 Test -e / -c / -k combinations $ hg help -c|egrep '^[A-Z].*:|^ debug' Commands: $ hg help -e|egrep '^[A-Z].*:|^ debug' Extensions: $ hg help -k|egrep '^[A-Z].*:|^ debug' Topics: Commands: Extensions: Extension Commands: $ hg help -c schemes abort: no such help topic: schemes (try "hg help --keyword schemes") [255] $ hg help -e schemes |head -1 schemes extension - extend schemes with shortcuts to repository swarms $ hg help -c -k dates |egrep '^(Topics|Extensions|Commands):' Commands: $ hg help -e -k a |egrep '^(Topics|Extensions|Commands):' Extensions: $ hg help -e -c -k date |egrep '^(Topics|Extensions|Commands):' Extensions: Commands: $ hg help -c commit > /dev/null $ hg help -e -c commit > /dev/null $ hg help -e commit > /dev/null abort: no such help topic: commit (try "hg help --keyword commit") [255] Test keyword search help $ cat > prefixedname.py < '''matched against word "clone" > ''' > EOF $ echo '[extensions]' >> $HGRCPATH $ echo "dot.dot.prefixedname = `pwd`/prefixedname.py" >> $HGRCPATH $ hg help -k clone Topics: config Configuration Files extensions Using Additional Features glossary Glossary phases Working with Phases subrepos Subrepositories urls URL Paths Commands: bookmarks create a new bookmark or list existing bookmarks clone make a copy of an existing repository paths show aliases for remote repositories update update working directory (or switch revisions) Extensions: clonebundles advertise pre-generated bundles to seed clones prefixedname matched against word "clone" relink recreates hardlinks between repository clones Extension Commands: qclone clone main and patch repository at same time Test unfound topic $ hg help nonexistingtopicthatwillneverexisteverever abort: no such help topic: nonexistingtopicthatwillneverexisteverever (try "hg help --keyword nonexistingtopicthatwillneverexisteverever") [255] Test unfound keyword $ hg help --keyword nonexistingwordthatwillneverexisteverever abort: no matches (try "hg help" for a list of topics) [255] Test omit indicating for help $ cat > addverboseitems.py < '''extension to test omit indicating. > > This paragraph is never omitted (for extension) > > .. container:: verbose > > This paragraph is omitted, > if :hg:\`help\` is invoked without \`\`-v\`\` (for extension) > > This paragraph is never omitted, too (for extension) > ''' > > from mercurial import help, commands > testtopic = """This paragraph is never omitted (for topic). > > .. container:: verbose > > This paragraph is omitted, > if :hg:\`help\` is invoked without \`\`-v\`\` (for topic) > > This paragraph is never omitted, too (for topic) > """ > def extsetup(ui): > help.helptable.append((["topic-containing-verbose"], > "This is the topic to test omit indicating.", > lambda ui: testtopic)) > EOF $ echo '[extensions]' >> $HGRCPATH $ echo "addverboseitems = `pwd`/addverboseitems.py" >> $HGRCPATH $ hg help addverboseitems addverboseitems extension - extension to test omit indicating. This paragraph is never omitted (for extension) This paragraph is never omitted, too (for extension) (some details hidden, use --verbose to show complete help) no commands defined $ hg help -v addverboseitems addverboseitems extension - extension to test omit indicating. This paragraph is never omitted (for extension) This paragraph is omitted, if 'hg help' is invoked without "-v" (for extension) This paragraph is never omitted, too (for extension) no commands defined $ hg help topic-containing-verbose This is the topic to test omit indicating. """""""""""""""""""""""""""""""""""""""""" This paragraph is never omitted (for topic). This paragraph is never omitted, too (for topic) (some details hidden, use --verbose to show complete help) $ hg help -v topic-containing-verbose This is the topic to test omit indicating. """""""""""""""""""""""""""""""""""""""""" This paragraph is never omitted (for topic). This paragraph is omitted, if 'hg help' is invoked without "-v" (for topic) This paragraph is never omitted, too (for topic) Test section lookup $ hg help revset.merge "merge()" Changeset is a merge changeset. $ hg help glossary.dag DAG The repository of changesets of a distributed version control system (DVCS) can be described as a directed acyclic graph (DAG), consisting of nodes and edges, where nodes correspond to changesets and edges imply a parent -> child relation. This graph can be visualized by graphical tools such as 'hg log --graph'. In Mercurial, the DAG is limited by the requirement for children to have at most two parents. $ hg help hgrc.paths "paths" ------- Assigns symbolic names and behavior to repositories. Options are symbolic names defining the URL or directory that is the location of the repository. Example: [paths] my_server = https://example.com/my_repo local_path = /home/me/repo These symbolic names can be used from the command line. To pull from "my_server": 'hg pull my_server'. To push to "local_path": 'hg push local_path'. Options containing colons (":") denote sub-options that can influence behavior for that specific path. Example: [paths] my_server = https://example.com/my_path my_server:pushurl = ssh://example.com/my_path The following sub-options can be defined: "pushurl" The URL to use for push operations. If not defined, the location defined by the path's main entry is used. The following special named paths exist: "default" The URL or directory to use when no source or remote is specified. 'hg clone' will automatically define this path to the location the repository was cloned from. "default-push" (deprecated) The URL or directory for the default 'hg push' location. "default:pushurl" should be used instead. $ hg help glossary.mcguffin abort: help section not found [255] $ hg help glossary.mc.guffin abort: help section not found [255] $ hg help template.files files List of strings. All files modified, added, or removed by this changeset. Test dynamic list of merge tools only shows up once $ hg help merge-tools Merge Tools """"""""""" To merge files Mercurial uses merge tools. A merge tool combines two different versions of a file into a merged file. Merge tools are given the two files and the greatest common ancestor of the two file versions, so they can determine the changes made on both branches. Merge tools are used both for 'hg resolve', 'hg merge', 'hg update', 'hg backout' and in several extensions. Usually, the merge tool tries to automatically reconcile the files by combining all non-overlapping changes that occurred separately in the two different evolutions of the same initial base file. Furthermore, some interactive merge programs make it easier to manually resolve conflicting merges, either in a graphical way, or by inserting some conflict markers. Mercurial does not include any interactive merge programs but relies on external tools for that. Available merge tools ===================== External merge tools and their properties are configured in the merge- tools configuration section - see hgrc(5) - but they can often just be named by their executable. A merge tool is generally usable if its executable can be found on the system and if it can handle the merge. The executable is found if it is an absolute or relative executable path or the name of an application in the executable search path. The tool is assumed to be able to handle the merge if it can handle symlinks if the file is a symlink, if it can handle binary files if the file is binary, and if a GUI is available if the tool requires a GUI. There are some internal merge tools which can be used. The internal merge tools are: ":dump" Creates three versions of the files to merge, containing the contents of local, other and base. These files can then be used to perform a merge manually. If the file to be merged is named "a.txt", these files will accordingly be named "a.txt.local", "a.txt.other" and "a.txt.base" and they will be placed in the same directory as "a.txt". ":fail" Rather than attempting to merge files that were modified on both branches, it marks them as unresolved. The resolve command must be used to resolve these conflicts. ":local" Uses the local version of files as the merged version. ":merge" Uses the internal non-interactive simple merge algorithm for merging files. It will fail if there are any conflicts and leave markers in the partially merged file. Markers will have two sections, one for each side of merge. ":merge-local" Like :merge, but resolve all conflicts non-interactively in favor of the local changes. ":merge-other" Like :merge, but resolve all conflicts non-interactively in favor of the other changes. ":merge3" Uses the internal non-interactive simple merge algorithm for merging files. It will fail if there are any conflicts and leave markers in the partially merged file. Marker will have three sections, one from each side of the merge and one for the base content. ":other" Uses the other version of files as the merged version. ":prompt" Asks the user which of the local or the other version to keep as the merged version. ":tagmerge" Uses the internal tag merge algorithm (experimental). ":union" Uses the internal non-interactive simple merge algorithm for merging files. It will use both left and right sides for conflict regions. No markers are inserted. Internal tools are always available and do not require a GUI but will by default not handle symlinks or binary files. Choosing a merge tool ===================== Mercurial uses these rules when deciding which merge tool to use: 1. If a tool has been specified with the --tool option to merge or resolve, it is used. If it is the name of a tool in the merge-tools configuration, its configuration is used. Otherwise the specified tool must be executable by the shell. 2. If the "HGMERGE" environment variable is present, its value is used and must be executable by the shell. 3. If the filename of the file to be merged matches any of the patterns in the merge-patterns configuration section, the first usable merge tool corresponding to a matching pattern is used. Here, binary capabilities of the merge tool are not considered. 4. If ui.merge is set it will be considered next. If the value is not the name of a configured tool, the specified value is used and must be executable by the shell. Otherwise the named tool is used if it is usable. 5. If any usable merge tools are present in the merge-tools configuration section, the one with the highest priority is used. 6. If a program named "hgmerge" can be found on the system, it is used - but it will by default not be used for symlinks and binary files. 7. If the file to be merged is not binary and is not a symlink, then internal ":merge" is used. 8. The merge of the file fails and must be resolved before commit. Note: After selecting a merge program, Mercurial will by default attempt to merge the files using a simple merge algorithm first. Only if it doesn't succeed because of conflicting changes Mercurial will actually execute the merge program. Whether to use the simple merge algorithm first can be controlled by the premerge setting of the merge tool. Premerge is enabled by default unless the file is binary or a symlink. See the merge-tools and ui sections of hgrc(5) for details on the configuration of merge tools. Test usage of section marks in help documents $ cd "$TESTDIR"/../doc $ python check-seclevel.py $ cd $TESTTMP #if serve Test the help pages in hgweb. Dish up an empty repo; serve it cold. $ hg init "$TESTTMP/test" $ hg serve -R "$TESTTMP/test" -n test -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py 127.0.0.1:$HGPORT "help" 200 Script output follows Help: Index

                        Topics

                        config Configuration Files
                        dates Date Formats
                        diffs Diff Formats
                        environment Environment Variables
                        extensions Using Additional Features
                        filesets Specifying File Sets
                        glossary Glossary
                        hgignore Syntax for Mercurial Ignore Files
                        hgweb Configuring hgweb
                        internals Technical implementation topics
                        merge-tools Merge Tools
                        multirevs Specifying Multiple Revisions
                        patterns File Name Patterns
                        phases Working with Phases
                        revisions Specifying Single Revisions
                        revsets Specifying Revision Sets
                        scripting Using Mercurial from scripts and automation
                        subrepos Subrepositories
                        templating Template Usage
                        urls URL Paths
                        topic-containing-verbose This is the topic to test omit indicating.

                        Main Commands

                        add add the specified files on the next commit
                        annotate show changeset information by line for each file
                        clone make a copy of an existing repository
                        commit commit the specified files or all outstanding changes
                        diff diff repository (or selected files)
                        export dump the header and diffs for one or more changesets
                        forget forget the specified files on the next commit
                        init create a new repository in the given directory
                        log show revision history of entire repository or files
                        merge merge another revision into working directory
                        pull pull changes from the specified source
                        push push changes to the specified destination
                        remove remove the specified files on the next commit
                        serve start stand-alone webserver
                        status show changed files in the working directory
                        summary summarize working directory state
                        update update working directory (or switch revisions)

                        Other Commands

                        addremove add all new files, delete all missing files
                        archive create an unversioned archive of a repository revision
                        backout reverse effect of earlier changeset
                        bisect subdivision search of changesets
                        bookmarks create a new bookmark or list existing bookmarks
                        branch set or show the current branch name
                        branches list repository named branches
                        bundle create a changegroup file
                        cat output the current or given revision of files
                        config show combined config settings from all hgrc files
                        copy mark files as copied for the next commit
                        files list tracked files
                        graft copy changes from other branches onto the current branch
                        grep search for a pattern in specified files and revisions
                        heads show branch heads
                        help show help for a given topic or a help overview
                        identify identify the working directory or specified revision
                        import import an ordered set of patches
                        incoming show new changesets found in source
                        manifest output the current or given revision of the project manifest
                        nohelp (no help text available)
                        outgoing show changesets not found in the destination
                        paths show aliases for remote repositories
                        phase set or show the current phase name
                        recover roll back an interrupted transaction
                        rename rename files; equivalent of copy + remove
                        resolve redo merges or set/view the merge status of files
                        revert restore files to their checkout state
                        root print the root (top) of the current working directory
                        tag add one or more tags for the current or given revision
                        tags list repository tags
                        unbundle apply one or more changegroup files
                        verify verify the integrity of the repository
                        version output version and copyright information
                        $ get-with-headers.py 127.0.0.1:$HGPORT "help/add" 200 Script output follows Help: add

                        Help: add

                        hg add [OPTION]... [FILE]...

                        add the specified files on the next commit

                        Schedule files to be version controlled and added to the repository.

                        The files will be added to the repository at the next commit. To undo an add before that, see 'hg forget'.

                        If no names are given, add all files to the repository (except files matching ".hgignore").

                        Examples:

                        • New (unknown) files are added automatically by 'hg add':
                            \$ ls (re)
                            foo.c
                            \$ hg status (re)
                            ? foo.c
                            \$ hg add (re)
                            adding foo.c
                            \$ hg status (re)
                            A foo.c
                            
                        • Specific files to be added can be specified:
                            \$ ls (re)
                            bar.c  foo.c
                            \$ hg status (re)
                            ? bar.c
                            ? foo.c
                            \$ hg add bar.c (re)
                            \$ hg status (re)
                            A bar.c
                            ? foo.c
                            

                        Returns 0 if all files are successfully added.

                        options ([+] can be repeated):

                        -I --include PATTERN [+] include names matching the given patterns
                        -X --exclude PATTERN [+] exclude names matching the given patterns
                        -S --subrepos recurse into subrepositories
                        -n --dry-run do not perform actions, just print output

                        global options ([+] can be repeated):

                        -R --repository REPO repository root directory or name of overlay bundle file
                        --cwd DIR change working directory
                        -y --noninteractive do not prompt, automatically pick the first choice for all prompts
                        -q --quiet suppress output
                        -v --verbose enable additional output
                        --config CONFIG [+] set/override config option (use 'section.name=value')
                        --debug enable debugging output
                        --debugger start debugger
                        --encoding ENCODE set the charset encoding (default: ascii)
                        --encodingmode MODE set the charset encoding mode (default: strict)
                        --traceback always print a traceback on exception
                        --time time how long the command takes
                        --profile print command execution profile
                        --version output version information and exit
                        -h --help display help and exit
                        --hidden consider hidden changesets
                        $ get-with-headers.py 127.0.0.1:$HGPORT "help/remove" 200 Script output follows Help: remove

                        Help: remove

                        hg remove [OPTION]... FILE...

                        aliases: rm

                        remove the specified files on the next commit

                        Schedule the indicated files for removal from the current branch.

                        This command schedules the files to be removed at the next commit. To undo a remove before that, see 'hg revert'. To undo added files, see 'hg forget'.

                        -A/--after can be used to remove only files that have already been deleted, -f/--force can be used to force deletion, and -Af can be used to remove files from the next revision without deleting them from the working directory.

                        The following table details the behavior of remove for different file states (columns) and option combinations (rows). The file states are Added [A], Clean [C], Modified [M] and Missing [!] (as reported by 'hg status'). The actions are Warn, Remove (from branch) and Delete (from disk):

                        opt/state A C M !
                        none W RD W R
                        -f R RD RD R
                        -A W W W R
                        -Af R R R R

                        Note:

                        'hg remove' never deletes files in Added [A] state from the working directory, not even if "--force" is specified.

                        Returns 0 on success, 1 if any warnings encountered.

                        options ([+] can be repeated):

                        -A --after record delete for missing files
                        -f --force remove (and delete) file even if added or modified
                        -S --subrepos recurse into subrepositories
                        -I --include PATTERN [+] include names matching the given patterns
                        -X --exclude PATTERN [+] exclude names matching the given patterns

                        global options ([+] can be repeated):

                        -R --repository REPO repository root directory or name of overlay bundle file
                        --cwd DIR change working directory
                        -y --noninteractive do not prompt, automatically pick the first choice for all prompts
                        -q --quiet suppress output
                        -v --verbose enable additional output
                        --config CONFIG [+] set/override config option (use 'section.name=value')
                        --debug enable debugging output
                        --debugger start debugger
                        --encoding ENCODE set the charset encoding (default: ascii)
                        --encodingmode MODE set the charset encoding mode (default: strict)
                        --traceback always print a traceback on exception
                        --time time how long the command takes
                        --profile print command execution profile
                        --version output version information and exit
                        -h --help display help and exit
                        --hidden consider hidden changesets
                        $ get-with-headers.py 127.0.0.1:$HGPORT "help/revisions" 200 Script output follows Help: revisions

                        Help: revisions

                        Specifying Single Revisions

                        Mercurial supports several ways to specify individual revisions.

                        A plain integer is treated as a revision number. Negative integers are treated as sequential offsets from the tip, with -1 denoting the tip, -2 denoting the revision prior to the tip, and so forth.

                        A 40-digit hexadecimal string is treated as a unique revision identifier.

                        A hexadecimal string less than 40 characters long is treated as a unique revision identifier and is referred to as a short-form identifier. A short-form identifier is only valid if it is the prefix of exactly one full-length identifier.

                        Any other string is treated as a bookmark, tag, or branch name. A bookmark is a movable pointer to a revision. A tag is a permanent name associated with a revision. A branch name denotes the tipmost open branch head of that branch - or if they are all closed, the tipmost closed head of the branch. Bookmark, tag, and branch names must not contain the ":" character.

                        The reserved name "tip" always identifies the most recent revision.

                        The reserved name "null" indicates the null revision. This is the revision of an empty repository, and the parent of revision 0.

                        The reserved name "." indicates the working directory parent. If no working directory is checked out, it is equivalent to null. If an uncommitted merge is in progress, "." is the revision of the first parent.

                        Sub-topic indexes rendered properly $ get-with-headers.py 127.0.0.1:$HGPORT "help/internals" 200 Script output follows Help: internals

                        Topics

                        bundles container for exchange of repository data
                        changegroups representation of revlog data
                        revlogs revision storage mechanism
                        Sub-topic topics rendered properly $ get-with-headers.py 127.0.0.1:$HGPORT "help/internals.changegroups" 200 Script output follows Help: internals.changegroups

                        Help: internals.changegroups

                        representation of revlog data

                        Changegroups

                        Changegroups are representations of repository revlog data, specifically the changelog, manifest, and filelogs.

                        There are 3 versions of changegroups: "1", "2", and "3". From a high-level, versions "1" and "2" are almost exactly the same, with the only difference being a header on entries in the changeset segment. Version "3" adds support for exchanging treemanifests and includes revlog flags in the delta header.

                        Changegroups consists of 3 logical segments:

                          +---------------------------------+
                          |           |          |          |
                          | changeset | manifest | filelogs |
                          |           |          |          |
                          +---------------------------------+
                          

                        The principle building block of each segment is a *chunk*. A *chunk* is a framed piece of data:

                          +---------------------------------------+
                          |           |                           |
                          |  length   |           data            |
                          | (32 bits) |       <length> bytes      |
                          |           |                           |
                          +---------------------------------------+
                          

                        Each chunk starts with a 32-bit big-endian signed integer indicating the length of the raw data that follows.

                        There is a special case chunk that has 0 length ("0x00000000"). We call this an *empty chunk*.

                        Delta Groups

                        A *delta group* expresses the content of a revlog as a series of deltas, or patches against previous revisions.

                        Delta groups consist of 0 or more *chunks* followed by the *empty chunk* to signal the end of the delta group:

                          +------------------------------------------------------------------------+
                          |                |             |               |             |           |
                          | chunk0 length  | chunk0 data | chunk1 length | chunk1 data |    0x0    |
                          |   (32 bits)    |  (various)  |   (32 bits)   |  (various)  | (32 bits) |
                          |                |             |               |             |           |
                          +------------------------------------------------------------+-----------+
                          

                        Each *chunk*'s data consists of the following:

                          +-----------------------------------------+
                          |              |              |           |
                          | delta header | mdiff header |   delta   |
                          |  (various)   |  (12 bytes)  | (various) |
                          |              |              |           |
                          +-----------------------------------------+
                          

                        The *length* field is the byte length of the remaining 3 logical pieces of data. The *delta* is a diff from an existing entry in the changelog.

                        The *delta header* is different between versions "1", "2", and "3" of the changegroup format.

                        Version 1:

                          +------------------------------------------------------+
                          |            |             |             |             |
                          |    node    |   p1 node   |   p2 node   |  link node  |
                          | (20 bytes) |  (20 bytes) |  (20 bytes) |  (20 bytes) |
                          |            |             |             |             |
                          +------------------------------------------------------+
                          

                        Version 2:

                          +------------------------------------------------------------------+
                          |            |             |             |            |            |
                          |    node    |   p1 node   |   p2 node   | base node  | link node  |
                          | (20 bytes) |  (20 bytes) |  (20 bytes) | (20 bytes) | (20 bytes) |
                          |            |             |             |            |            |
                          +------------------------------------------------------------------+
                          

                        Version 3:

                          +------------------------------------------------------------------------------+
                          |            |             |             |            |            |           |
                          |    node    |   p1 node   |   p2 node   | base node  | link node  | flags     |
                          | (20 bytes) |  (20 bytes) |  (20 bytes) | (20 bytes) | (20 bytes) | (2 bytes) |
                          |            |             |             |            |            |           |
                          +------------------------------------------------------------------------------+
                          

                        The *mdiff header* consists of 3 32-bit big-endian signed integers describing offsets at which to apply the following delta content:

                          +-------------------------------------+
                          |           |            |            |
                          |  offset   | old length | new length |
                          | (32 bits) |  (32 bits) |  (32 bits) |
                          |           |            |            |
                          +-------------------------------------+
                          

                        In version 1, the delta is always applied against the previous node from the changegroup or the first parent if this is the first entry in the changegroup.

                        In version 2, the delta base node is encoded in the entry in the changegroup. This allows the delta to be expressed against any parent, which can result in smaller deltas and more efficient encoding of data.

                        Changeset Segment

                        The *changeset segment* consists of a single *delta group* holding changelog data. It is followed by an *empty chunk* to denote the boundary to the *manifests segment*.

                        Manifest Segment

                        The *manifest segment* consists of a single *delta group* holding manifest data. It is followed by an *empty chunk* to denote the boundary to the *filelogs segment*.

                        Filelogs Segment

                        The *filelogs* segment consists of multiple sub-segments, each corresponding to an individual file whose data is being described:

                          +--------------------------------------+
                          |          |          |          |     |
                          | filelog0 | filelog1 | filelog2 | ... |
                          |          |          |          |     |
                          +--------------------------------------+
                          

                        In version "3" of the changegroup format, filelogs may include directory logs when treemanifests are in use. directory logs are identified by having a trailing '/' on their filename (see below).

                        The final filelog sub-segment is followed by an *empty chunk* to denote the end of the segment and the overall changegroup.

                        Each filelog sub-segment consists of the following:

                          +------------------------------------------+
                          |               |            |             |
                          | filename size |  filename  | delta group |
                          |   (32 bits)   |  (various) |  (various)  |
                          |               |            |             |
                          +------------------------------------------+
                          

                        That is, a *chunk* consisting of the filename (not terminated or padded) followed by N chunks constituting the *delta group* for this file.

                        $ killdaemons.py #endif mercurial-3.7.3/tests/test-empty-group.t0000644000175000017500000000571512676531525017733 0ustar mpmmpm00000000000000# A B # # 3 4 3 # |\/| |\ # |/\| | \ # 1 2 1 2 # \ / \ / # 0 0 # # if the result of the merge of 1 and 2 # is the same in 3 and 4, no new manifest # will be created and the manifest group # will be empty during the pull # # (plus we test a failure where outgoing # wrongly reported the number of csets) $ hg init a $ cd a $ touch init $ hg ci -A -m 0 adding init $ touch x y $ hg ci -A -m 1 adding x adding y $ hg update 0 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ touch x y $ hg ci -A -m 2 adding x adding y created new head $ hg merge 1 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -A -m m1 $ hg update -C 1 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 2 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -A -m m2 created new head $ cd .. $ hg clone -r 3 a b adding changesets adding manifests adding file changes added 4 changesets with 3 changes to 3 files updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone -r 4 a c adding changesets adding manifests adding file changes added 4 changesets with 3 changes to 3 files updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R a outgoing b comparing with b searching for changes changeset: 4:1ec3c74fc0e0 tag: tip parent: 1:79f9e10cd04e parent: 2:8e1bb01c1a24 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: m2 $ hg -R a outgoing c comparing with c searching for changes changeset: 3:d15a0c284984 parent: 2:8e1bb01c1a24 parent: 1:79f9e10cd04e user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: m1 $ hg -R b outgoing c comparing with c searching for changes changeset: 3:d15a0c284984 tag: tip parent: 2:8e1bb01c1a24 parent: 1:79f9e10cd04e user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: m1 $ hg -R c outgoing b comparing with b searching for changes changeset: 3:1ec3c74fc0e0 tag: tip parent: 1:79f9e10cd04e parent: 2:8e1bb01c1a24 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: m2 $ hg -R b pull a pulling from a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg -R c pull a pulling from a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) mercurial-3.7.3/tests/test-clonebundles.t0000644000175000017500000003775012676531525020124 0ustar mpmmpm00000000000000Set up a server $ cat >> $HGRCPATH << EOF > [format] > usegeneraldelta=yes > EOF $ hg init server $ cd server $ cat >> .hg/hgrc << EOF > [extensions] > clonebundles = > EOF $ touch foo $ hg -q commit -A -m 'add foo' $ touch bar $ hg -q commit -A -m 'add bar' $ hg serve -d -p $HGPORT --pid-file hg.pid --accesslog access.log $ cat hg.pid >> $DAEMON_PIDS $ cd .. Missing manifest should not result in server lookup $ hg --verbose clone -U http://localhost:$HGPORT no-manifest requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files $ cat server/access.log * - - [*] "GET /?cmd=capabilities HTTP/1.1" 200 - (glob) * - - [*] "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob) * - - [*] "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=aaff8d2ffbbf07a46dd1f05d8ae7877e3f56e2a2&listkeys=phase%2Cbookmarks (glob) * - - [*] "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases (glob) Empty manifest file results in retrieval (the extension only checks if the manifest file exists) $ touch server/.hg/clonebundles.manifest $ hg --verbose clone -U http://localhost:$HGPORT empty-manifest no clone bundles available on remote; falling back to regular clone requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files Manifest file with invalid URL aborts $ echo 'http://does.not.exist/bundle.hg' > server/.hg/clonebundles.manifest $ hg clone http://localhost:$HGPORT 404-url applying clone bundle from http://does.not.exist/bundle.hg error fetching bundle: (.* not known|getaddrinfo failed) (re) abort: error applying bundle (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false") [255] Server is not running aborts $ echo "http://localhost:$HGPORT1/bundle.hg" > server/.hg/clonebundles.manifest $ hg clone http://localhost:$HGPORT server-not-runner applying clone bundle from http://localhost:$HGPORT1/bundle.hg error fetching bundle: * refused* (glob) abort: error applying bundle (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false") [255] Server returns 404 $ python $TESTDIR/dumbhttp.py -p $HGPORT1 --pid http.pid $ cat http.pid >> $DAEMON_PIDS $ hg clone http://localhost:$HGPORT running-404 applying clone bundle from http://localhost:$HGPORT1/bundle.hg HTTP error fetching bundle: HTTP Error 404: File not found abort: error applying bundle (if this error persists, consider contacting the server operator or disable clone bundles via "--config ui.clonebundles=false") [255] We can override failure to fall back to regular clone $ hg --config ui.clonebundlefallback=true clone -U http://localhost:$HGPORT 404-fallback applying clone bundle from http://localhost:$HGPORT1/bundle.hg HTTP error fetching bundle: HTTP Error 404: File not found falling back to normal clone requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files Bundle with partial content works $ hg -R server bundle --type gzip-v1 --base null -r 53245c60e682 partial.hg 1 changesets found We verify exact bundle content as an extra check against accidental future changes. If this output changes, we could break old clients. $ f --size --hexdump partial.hg partial.hg: size=207 0000: 48 47 31 30 47 5a 78 9c 63 60 60 98 17 ac 12 93 |HG10GZx.c``.....| 0010: f0 ac a9 23 45 70 cb bf 0d 5f 59 4e 4a 7f 79 21 |...#Ep..._YNJ.y!| 0020: 9b cc 40 24 20 a0 d7 ce 2c d1 38 25 cd 24 25 d5 |..@$ ...,.8%.$%.| 0030: d8 c2 22 cd 38 d9 24 cd 22 d5 c8 22 cd 24 cd 32 |..".8.$."..".$.2| 0040: d1 c2 d0 c4 c8 d2 32 d1 38 39 29 c9 34 cd d4 80 |......2.89).4...| 0050: ab 24 b5 b8 84 cb 40 c1 80 2b 2d 3f 9f 8b 2b 31 |.$....@..+-?..+1| 0060: 25 45 01 c8 80 9a d2 9b 65 fb e5 9e 45 bf 8d 7f |%E......e...E...| 0070: 9f c6 97 9f 2b 44 34 67 d9 ec 8e 0f a0 92 0b 75 |....+D4g.......u| 0080: 41 d6 24 59 18 a4 a4 9a a6 18 1a 5b 98 9b 5a 98 |A.$Y.......[..Z.| 0090: 9a 18 26 9b a6 19 98 1a 99 99 26 a6 18 9a 98 24 |..&.......&....$| 00a0: 26 59 a6 25 5a 98 a5 18 a6 24 71 41 35 b1 43 dc |&Y.%Z....$qA5.C.| 00b0: 16 b2 83 f7 e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a |.....E..V....R..| 00c0: 78 ed fc d5 76 f1 36 35 dc 05 00 36 ed 5e c7 |x...v.65...6.^.| $ echo "http://localhost:$HGPORT1/partial.hg" > server/.hg/clonebundles.manifest $ hg clone -U http://localhost:$HGPORT partial-bundle applying clone bundle from http://localhost:$HGPORT1/partial.hg adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files finished applying clone bundle searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Incremental pull doesn't fetch bundle $ hg clone -r 53245c60e682 -U http://localhost:$HGPORT partial-clone adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files $ cd partial-clone $ hg pull pulling from http://localhost:$HGPORT/ searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) $ cd .. Bundle with full content works $ hg -R server bundle --type gzip-v2 --base null -r tip full.hg 2 changesets found Again, we perform an extra check against bundle content changes. If this content changes, clone bundles produced by new Mercurial versions may not be readable by old clients. $ f --size --hexdump full.hg full.hg: size=406 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress| 0010: 69 6f 6e 3d 47 5a 78 9c 63 60 60 90 e5 76 f6 70 |ion=GZx.c``..v.p| 0020: f4 73 77 75 0f f2 0f 0d 60 00 02 46 06 76 a6 b2 |.swu....`..F.v..| 0030: d4 a2 e2 cc fc 3c 03 23 06 06 e6 65 40 b1 4d c1 |.....<.#...e@.M.| 0040: 2a 31 09 cf 9a 3a 52 04 b7 fc db f0 95 e5 a4 f4 |*1...:R.........| 0050: 97 17 b2 c9 0c 14 00 02 e6 d9 99 25 1a a7 a4 99 |...........%....| 0060: a4 a4 1a 5b 58 a4 19 27 9b a4 59 a4 1a 59 a4 99 |...[X..'..Y..Y..| 0070: a4 59 26 5a 18 9a 18 59 5a 26 1a 27 27 25 99 a6 |.Y&Z...YZ&.''%..| 0080: 99 1a 70 95 a4 16 97 70 19 28 18 70 a5 e5 e7 73 |..p....p.(.p...s| 0090: 71 25 a6 a4 28 00 19 40 13 0e ac fa df ab ff 7b |q%..(..@.......{| 00a0: 3f fb 92 dc 8b 1f 62 bb 9e b7 d7 d9 87 3d 5a 44 |?.....b......=ZD| 00b0: ac 2f b0 a9 c3 66 1e 54 b9 26 08 a7 1a 1b 1a a7 |./...f.T.&......| 00c0: 25 1b 9a 1b 99 19 9a 5a 18 9b a6 18 19 00 dd 67 |%......Z.......g| 00d0: 61 61 98 06 f4 80 49 4a 8a 65 52 92 41 9a 81 81 |aa....IJ.eR.A...| 00e0: a5 11 17 50 31 30 58 19 cc 80 98 25 29 b1 08 c4 |...P10X....%)...| 00f0: 37 07 79 19 88 d9 41 ee 07 8a 41 cd 5d 98 65 fb |7.y...A...A.].e.| 0100: e5 9e 45 bf 8d 7f 9f c6 97 9f 2b 44 34 67 d9 ec |..E.......+D4g..| 0110: 8e 0f a0 61 a8 eb 82 82 2e c9 c2 20 25 d5 34 c5 |...a....... %.4.| 0120: d0 d8 c2 dc d4 c2 d4 c4 30 d9 34 cd c0 d4 c8 cc |........0.4.....| 0130: 34 31 c5 d0 c4 24 31 c9 32 2d d1 c2 2c c5 30 25 |41...$1.2-..,.0%| 0140: 09 e4 ee 85 8f 85 ff 88 ab 89 36 c7 2a c4 47 34 |..........6.*.G4| 0150: fe f8 ec 7b 73 37 3f c3 24 62 1d 8d 4d 1d 9e 40 |...{s7?.$b..M..@| 0160: 06 3b 10 14 36 a4 38 10 04 d8 21 01 9a b1 83 f7 |.;..6.8...!.....| 0170: e9 45 8b d2 56 c7 a3 1f 82 52 d7 8a 78 ed fc d5 |.E..V....R..x...| 0180: 76 f1 36 25 81 89 c7 ad ec 90 34 48 75 2b 89 49 |v.6%......4Hu+.I| 0190: bf 00 d6 97 f0 8d |......| $ echo "http://localhost:$HGPORT1/full.hg" > server/.hg/clonebundles.manifest $ hg clone -U http://localhost:$HGPORT full-bundle applying clone bundle from http://localhost:$HGPORT1/full.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files finished applying clone bundle searching for changes no changes found Feature works over SSH $ hg clone -U -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/server ssh-full-clone applying clone bundle from http://localhost:$HGPORT1/full.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files finished applying clone bundle searching for changes no changes found Entry with unknown BUNDLESPEC is filtered and not used $ cat > server/.hg/clonebundles.manifest << EOF > http://bad.entry1 BUNDLESPEC=UNKNOWN > http://bad.entry2 BUNDLESPEC=xz-v1 > http://bad.entry3 BUNDLESPEC=none-v100 > http://localhost:$HGPORT1/full.hg BUNDLESPEC=gzip-v2 > EOF $ hg clone -U http://localhost:$HGPORT filter-unknown-type applying clone bundle from http://localhost:$HGPORT1/full.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files finished applying clone bundle searching for changes no changes found Automatic fallback when all entries are filtered $ cat > server/.hg/clonebundles.manifest << EOF > http://bad.entry BUNDLESPEC=UNKNOWN > EOF $ hg clone -U http://localhost:$HGPORT filter-all no compatible clone bundles available on server; falling back to regular clone (you may want to report this to the server operator) requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files URLs requiring SNI are filtered in Python <2.7.9 $ cp full.hg sni.hg $ cat > server/.hg/clonebundles.manifest << EOF > http://localhost:$HGPORT1/sni.hg REQUIRESNI=true > http://localhost:$HGPORT1/full.hg > EOF #if sslcontext Python 2.7.9+ support SNI $ hg clone -U http://localhost:$HGPORT sni-supported applying clone bundle from http://localhost:$HGPORT1/sni.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files finished applying clone bundle searching for changes no changes found #else Python <2.7.9 will filter SNI URLs $ hg clone -U http://localhost:$HGPORT sni-unsupported applying clone bundle from http://localhost:$HGPORT1/full.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files finished applying clone bundle searching for changes no changes found #endif Stream clone bundles are supported $ hg -R server debugcreatestreamclonebundle packed.hg writing 613 bytes for 4 files bundle requirements: generaldelta, revlogv1 No bundle spec should work $ cat > server/.hg/clonebundles.manifest << EOF > http://localhost:$HGPORT1/packed.hg > EOF $ hg clone -U http://localhost:$HGPORT stream-clone-no-spec applying clone bundle from http://localhost:$HGPORT1/packed.hg 4 files to transfer, 613 bytes of data transferred 613 bytes in *.* seconds (*) (glob) finished applying clone bundle searching for changes no changes found Bundle spec without parameters should work $ cat > server/.hg/clonebundles.manifest << EOF > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1 > EOF $ hg clone -U http://localhost:$HGPORT stream-clone-vanilla-spec applying clone bundle from http://localhost:$HGPORT1/packed.hg 4 files to transfer, 613 bytes of data transferred 613 bytes in *.* seconds (*) (glob) finished applying clone bundle searching for changes no changes found Bundle spec with format requirements should work $ cat > server/.hg/clonebundles.manifest << EOF > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv1 > EOF $ hg clone -U http://localhost:$HGPORT stream-clone-supported-requirements applying clone bundle from http://localhost:$HGPORT1/packed.hg 4 files to transfer, 613 bytes of data transferred 613 bytes in *.* seconds (*) (glob) finished applying clone bundle searching for changes no changes found Stream bundle spec with unknown requirements should be filtered out $ cat > server/.hg/clonebundles.manifest << EOF > http://localhost:$HGPORT1/packed.hg BUNDLESPEC=none-packed1;requirements%3Drevlogv42 > EOF $ hg clone -U http://localhost:$HGPORT stream-clone-unsupported-requirements no compatible clone bundles available on server; falling back to regular clone (you may want to report this to the server operator) requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files Set up manifest for testing preferences (Remember, the TYPE does not have to match reality - the URL is important) $ cp full.hg gz-a.hg $ cp full.hg gz-b.hg $ cp full.hg bz2-a.hg $ cp full.hg bz2-b.hg $ cat > server/.hg/clonebundles.manifest << EOF > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2 extra=a > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2 extra=a > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b > EOF Preferring an undefined attribute will take first entry $ hg --config ui.clonebundleprefers=foo=bar clone -U http://localhost:$HGPORT prefer-foo applying clone bundle from http://localhost:$HGPORT1/gz-a.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files finished applying clone bundle searching for changes no changes found Preferring bz2 type will download first entry of that type $ hg --config ui.clonebundleprefers=COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-bz applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files finished applying clone bundle searching for changes no changes found Preferring multiple values of an option works $ hg --config ui.clonebundleprefers=COMPRESSION=unknown,COMPRESSION=bzip2 clone -U http://localhost:$HGPORT prefer-multiple-bz applying clone bundle from http://localhost:$HGPORT1/bz2-a.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files finished applying clone bundle searching for changes no changes found Sorting multiple values should get us back to original first entry $ hg --config ui.clonebundleprefers=BUNDLESPEC=unknown,BUNDLESPEC=gzip-v2,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-multiple-gz applying clone bundle from http://localhost:$HGPORT1/gz-a.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files finished applying clone bundle searching for changes no changes found Preferring multiple attributes has correct order $ hg --config ui.clonebundleprefers=extra=b,BUNDLESPEC=bzip2-v2 clone -U http://localhost:$HGPORT prefer-separate-attributes applying clone bundle from http://localhost:$HGPORT1/bz2-b.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files finished applying clone bundle searching for changes no changes found Test where attribute is missing from some entries $ cat > server/.hg/clonebundles.manifest << EOF > http://localhost:$HGPORT1/gz-a.hg BUNDLESPEC=gzip-v2 > http://localhost:$HGPORT1/bz2-a.hg BUNDLESPEC=bzip2-v2 > http://localhost:$HGPORT1/gz-b.hg BUNDLESPEC=gzip-v2 extra=b > http://localhost:$HGPORT1/bz2-b.hg BUNDLESPEC=bzip2-v2 extra=b > EOF $ hg --config ui.clonebundleprefers=extra=b clone -U http://localhost:$HGPORT prefer-partially-defined-attribute applying clone bundle from http://localhost:$HGPORT1/gz-b.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files finished applying clone bundle searching for changes no changes found mercurial-3.7.3/tests/test-setdiscovery.t0000644000175000017500000002662212676531525020166 0ustar mpmmpm00000000000000 Function to test discovery between two repos in both directions, using both the local shortcut (which is currently not activated by default) and the full remotable protocol: $ testdesc() { # revs_a, revs_b, dagdesc > if [ -d foo ]; then rm -rf foo; fi > hg init foo > cd foo > hg debugbuilddag "$3" > hg clone . a $1 --quiet > hg clone . b $2 --quiet > echo > echo "% -- a -> b tree" > hg -R a debugdiscovery b --verbose --old > echo > echo "% -- a -> b set" > hg -R a debugdiscovery b --verbose --debug --config progress.debug=true > echo > echo "% -- b -> a tree" > hg -R b debugdiscovery a --verbose --old --config > echo > echo "% -- b -> a set" > hg -R b debugdiscovery a --verbose --debug --config progress.debug=true > cd .. > } Small superset: $ testdesc '-ra1 -ra2' '-rb1 -rb2 -rb3' ' > +2:f +1:a1:b1 > +5 :b2 > b tree comparing with b searching for changes unpruned common: 01241442b3c2 66f7d451a68b b5714e113bc0 common heads: 01241442b3c2 b5714e113bc0 local is subset % -- a -> b set comparing with b query 1; heads searching for changes all local heads known remotely common heads: 01241442b3c2 b5714e113bc0 local is subset % -- b -> a tree comparing with a searching for changes unpruned common: 01241442b3c2 b5714e113bc0 common heads: 01241442b3c2 b5714e113bc0 remote is subset % -- b -> a set comparing with a query 1; heads searching for changes all remote heads known locally common heads: 01241442b3c2 b5714e113bc0 remote is subset Many new: $ testdesc '-ra1 -ra2' '-rb' ' > +2:f +3:a1 +3:b > b tree comparing with b searching for changes unpruned common: bebd167eb94d common heads: bebd167eb94d % -- a -> b set comparing with b query 1; heads searching for changes taking initial sample searching: 2 queries query 2; still undecided: 29, sample size is: 29 2 total queries common heads: bebd167eb94d % -- b -> a tree comparing with a searching for changes unpruned common: 66f7d451a68b bebd167eb94d common heads: bebd167eb94d % -- b -> a set comparing with a query 1; heads searching for changes taking initial sample searching: 2 queries query 2; still undecided: 2, sample size is: 2 2 total queries common heads: bebd167eb94d Both sides many new with stub: $ testdesc '-ra1 -ra2' '-rb' ' > +2:f +2:a1 +30 :b > b tree comparing with b searching for changes unpruned common: 2dc09a01254d common heads: 2dc09a01254d % -- a -> b set comparing with b query 1; heads searching for changes taking initial sample searching: 2 queries query 2; still undecided: 29, sample size is: 29 2 total queries common heads: 2dc09a01254d % -- b -> a tree comparing with a searching for changes unpruned common: 2dc09a01254d 66f7d451a68b common heads: 2dc09a01254d % -- b -> a set comparing with a query 1; heads searching for changes taking initial sample searching: 2 queries query 2; still undecided: 29, sample size is: 29 2 total queries common heads: 2dc09a01254d Both many new: $ testdesc '-ra' '-rb' ' > +2:f +30 :b > b tree comparing with b searching for changes unpruned common: 66f7d451a68b common heads: 66f7d451a68b % -- a -> b set comparing with b query 1; heads searching for changes taking quick initial sample searching: 2 queries query 2; still undecided: 31, sample size is: 31 2 total queries common heads: 66f7d451a68b % -- b -> a tree comparing with a searching for changes unpruned common: 66f7d451a68b common heads: 66f7d451a68b % -- b -> a set comparing with a query 1; heads searching for changes taking quick initial sample searching: 2 queries query 2; still undecided: 31, sample size is: 31 2 total queries common heads: 66f7d451a68b Both many new skewed: $ testdesc '-ra' '-rb' ' > +2:f +30 :b > b tree comparing with b searching for changes unpruned common: 66f7d451a68b common heads: 66f7d451a68b % -- a -> b set comparing with b query 1; heads searching for changes taking quick initial sample searching: 2 queries query 2; still undecided: 51, sample size is: 51 2 total queries common heads: 66f7d451a68b % -- b -> a tree comparing with a searching for changes unpruned common: 66f7d451a68b common heads: 66f7d451a68b % -- b -> a set comparing with a query 1; heads searching for changes taking quick initial sample searching: 2 queries query 2; still undecided: 31, sample size is: 31 2 total queries common heads: 66f7d451a68b Both many new on top of long history: $ testdesc '-ra' '-rb' ' > +1000:f +30 :b > b tree comparing with b searching for changes unpruned common: 7ead0cba2838 common heads: 7ead0cba2838 % -- a -> b set comparing with b query 1; heads searching for changes taking quick initial sample searching: 2 queries query 2; still undecided: 1049, sample size is: 11 sampling from both directions searching: 3 queries query 3; still undecided: 31, sample size is: 31 3 total queries common heads: 7ead0cba2838 % -- b -> a tree comparing with a searching for changes unpruned common: 7ead0cba2838 common heads: 7ead0cba2838 % -- b -> a set comparing with a query 1; heads searching for changes taking quick initial sample searching: 2 queries query 2; still undecided: 1029, sample size is: 11 sampling from both directions searching: 3 queries query 3; still undecided: 15, sample size is: 15 3 total queries common heads: 7ead0cba2838 One with >200 heads, which used to use up all of the sample: $ hg init manyheads $ cd manyheads $ echo "+300:r @a" >dagdesc $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3 *r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3*r+3" >>dagdesc # 20 heads $ echo "@b *r+3" >>dagdesc # one more head $ hg debugbuilddag > $DAEMON_PIDS $ hg -R b incoming http://localhost:$HGPORT/ -T '{node|short}\n' comparing with http://localhost:$HGPORT/ searching for changes e64a39e7da8b $ killdaemons.py $ cut -d' ' -f6- access.log | grep -v cmd=known # cmd=known uses random sampling "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D513314ca8b3ae4dac8eec56966265b00fcf866db "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=513314ca8b3ae4dac8eec56966265b00fcf866db&heads=e64a39e7da8b0d54bc63e81169aff001c13b3477 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases $ cat errors.log $ cd .. Issue 4438 - test coverage for 3ef893520a85 issues. $ mkdir issue4438 $ cd issue4438 #if false generate new bundles: $ hg init r1 $ for i in `python $TESTDIR/seq.py 101`; do hg -R r1 up -qr null && hg -R r1 branch -q b$i && hg -R r1 ci -qmb$i; done $ hg clone -q r1 r2 $ for i in `python $TESTDIR/seq.py 10`; do hg -R r1 up -qr null && hg -R r1 branch -q c$i && hg -R r1 ci -qmc$i; done $ hg -R r2 branch -q r2change && hg -R r2 ci -qmr2change $ hg -R r1 bundle -qa $TESTDIR/bundles/issue4438-r1.hg $ hg -R r2 bundle -qa $TESTDIR/bundles/issue4438-r2.hg #else use existing bundles: $ hg clone -q $TESTDIR/bundles/issue4438-r1.hg r1 $ hg clone -q $TESTDIR/bundles/issue4438-r2.hg r2 #endif Set iteration order could cause wrong and unstable results - fixed in 73cfaa348650: $ hg -R r1 outgoing r2 -T'{rev} ' comparing with r2 searching for changes 101 102 103 104 105 106 107 108 109 110 (no-eol) The case where all the 'initialsamplesize' samples already were common would give 'all remote heads known locally' without checking the remaining heads - fixed in 86c35b7ae300: $ cat >> $TESTTMP/unrandomsample.py << EOF > import random > def sample(population, k): > return sorted(population)[:k] > random.sample = sample > EOF $ cat >> r1/.hg/hgrc << EOF > [extensions] > unrandomsample = $TESTTMP/unrandomsample.py > EOF $ hg -R r1 outgoing r2 -T'{rev} ' comparing with r2 searching for changes 101 102 103 104 105 106 107 108 109 110 (no-eol) $ cd .. mercurial-3.7.3/tests/test-bisect.t0000644000175000017500000004114712676531525016713 0ustar mpmmpm00000000000000 $ hg init committing changes $ count=0 $ echo > a $ while test $count -lt 32 ; do > echo 'a' >> a > test $count -eq 0 && hg add > hg ci -m "msg $count" -d "$count 0" > count=`expr $count + 1` > done adding a $ hg log changeset: 31:58c80a7c8a40 tag: tip user: test date: Thu Jan 01 00:00:31 1970 +0000 summary: msg 31 changeset: 30:ed2d2f24b11c user: test date: Thu Jan 01 00:00:30 1970 +0000 summary: msg 30 changeset: 29:b5bd63375ab9 user: test date: Thu Jan 01 00:00:29 1970 +0000 summary: msg 29 changeset: 28:8e0c2264c8af user: test date: Thu Jan 01 00:00:28 1970 +0000 summary: msg 28 changeset: 27:288867a866e9 user: test date: Thu Jan 01 00:00:27 1970 +0000 summary: msg 27 changeset: 26:3efc6fd51aeb user: test date: Thu Jan 01 00:00:26 1970 +0000 summary: msg 26 changeset: 25:02a84173a97a user: test date: Thu Jan 01 00:00:25 1970 +0000 summary: msg 25 changeset: 24:10e0acd3809e user: test date: Thu Jan 01 00:00:24 1970 +0000 summary: msg 24 changeset: 23:5ec79163bff4 user: test date: Thu Jan 01 00:00:23 1970 +0000 summary: msg 23 changeset: 22:06c7993750ce user: test date: Thu Jan 01 00:00:22 1970 +0000 summary: msg 22 changeset: 21:e5db6aa3fe2a user: test date: Thu Jan 01 00:00:21 1970 +0000 summary: msg 21 changeset: 20:7128fb4fdbc9 user: test date: Thu Jan 01 00:00:20 1970 +0000 summary: msg 20 changeset: 19:52798545b482 user: test date: Thu Jan 01 00:00:19 1970 +0000 summary: msg 19 changeset: 18:86977a90077e user: test date: Thu Jan 01 00:00:18 1970 +0000 summary: msg 18 changeset: 17:03515f4a9080 user: test date: Thu Jan 01 00:00:17 1970 +0000 summary: msg 17 changeset: 16:a2e6ea4973e9 user: test date: Thu Jan 01 00:00:16 1970 +0000 summary: msg 16 changeset: 15:e7fa0811edb0 user: test date: Thu Jan 01 00:00:15 1970 +0000 summary: msg 15 changeset: 14:ce8f0998e922 user: test date: Thu Jan 01 00:00:14 1970 +0000 summary: msg 14 changeset: 13:9d7d07bc967c user: test date: Thu Jan 01 00:00:13 1970 +0000 summary: msg 13 changeset: 12:1941b52820a5 user: test date: Thu Jan 01 00:00:12 1970 +0000 summary: msg 12 changeset: 11:7b4cd9578619 user: test date: Thu Jan 01 00:00:11 1970 +0000 summary: msg 11 changeset: 10:7c5eff49a6b6 user: test date: Thu Jan 01 00:00:10 1970 +0000 summary: msg 10 changeset: 9:eb44510ef29a user: test date: Thu Jan 01 00:00:09 1970 +0000 summary: msg 9 changeset: 8:453eb4dba229 user: test date: Thu Jan 01 00:00:08 1970 +0000 summary: msg 8 changeset: 7:03750880c6b5 user: test date: Thu Jan 01 00:00:07 1970 +0000 summary: msg 7 changeset: 6:a3d5c6fdf0d3 user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: msg 6 changeset: 5:7874a09ea728 user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: msg 5 changeset: 4:9b2ba8336a65 user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: msg 4 changeset: 3:b53bea5e2fcb user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: msg 3 changeset: 2:db07c04beaca user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: msg 2 changeset: 1:5cd978ea5149 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: msg 1 changeset: 0:b99c7b9c8e11 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: msg 0 $ hg up -C 0 files updated, 0 files merged, 0 files removed, 0 files unresolved bisect test $ hg bisect -r $ hg bisect -b $ hg summary parent: 31:58c80a7c8a40 tip msg 31 branch: default commit: (clean) update: (current) phases: 32 draft $ hg bisect -g 1 Testing changeset 16:a2e6ea4973e9 (30 changesets remaining, ~4 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g Testing changeset 23:5ec79163bff4 (15 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved skip $ hg bisect -s Testing changeset 24:10e0acd3809e (15 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g Testing changeset 27:288867a866e9 (7 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g Testing changeset 29:b5bd63375ab9 (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b Testing changeset 28:8e0c2264c8af (2 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g The first bad revision is: changeset: 29:b5bd63375ab9 user: test date: Thu Jan 01 00:00:29 1970 +0000 summary: msg 29 mark revsets instead of single revs $ hg bisect -r $ hg bisect -b "0::3" $ hg bisect -s "13::16" $ hg bisect -g "26::tip" Testing changeset 12:1941b52820a5 (23 changesets remaining, ~4 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat .hg/bisect.state bad b99c7b9c8e11558adef3fad9af211c58d46f325b bad 5cd978ea51499179507ee7b6f340d2dbaa401185 bad db07c04beaca44cf24832541e7f4a2346a95275b bad b53bea5e2fcb30d3e00bd3409507a5659ce0fd8b current 1941b52820a544549596820a8ae006842b0e2c64 good 3efc6fd51aeb8594398044c6c846ca59ae021203 good 288867a866e9adb7a29880b66936c874b80f4651 good 8e0c2264c8af790daf3585ada0669d93dee09c83 good b5bd63375ab9a290419f2024b7f4ee9ea7ce90a8 good ed2d2f24b11c368fa8aa0da9f4e1db580abade59 good 58c80a7c8a4025a94cedaf7b4a4e3124e8909a96 skip 9d7d07bc967ca98ad0600c24953fd289ad5fa991 skip ce8f0998e922c179e80819d5066fbe46e2998784 skip e7fa0811edb063f6319531f0d0a865882138e180 skip a2e6ea4973e9196ddd3386493b0c214b41fd97d3 bisect reverse test $ hg bisect -r $ hg bisect -b null $ hg bisect -g tip Testing changeset 15:e7fa0811edb0 (32 changesets remaining, ~5 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g Testing changeset 7:03750880c6b5 (16 changesets remaining, ~4 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved skip $ hg bisect -s Testing changeset 6:a3d5c6fdf0d3 (16 changesets remaining, ~4 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g Testing changeset 2:db07c04beaca (7 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g Testing changeset 0:b99c7b9c8e11 (3 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -b Testing changeset 1:5cd978ea5149 (2 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g The first good revision is: changeset: 1:5cd978ea5149 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: msg 1 $ hg bisect -r $ hg bisect -g tip $ hg bisect -b tip abort: inconsistent state, 31:58c80a7c8a40 is good and bad [255] $ hg bisect -r $ hg bisect -g null $ hg bisect -bU tip Testing changeset 15:e7fa0811edb0 (32 changesets remaining, ~5 tests) $ hg id 5cd978ea5149 Issue1228: hg bisect crashes when you skip the last rev in bisection Issue1182: hg bisect exception $ hg bisect -r $ hg bisect -b 4 $ hg bisect -g 0 Testing changeset 2:db07c04beaca (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s Testing changeset 1:5cd978ea5149 (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s Testing changeset 3:b53bea5e2fcb (4 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s Due to skipped revisions, the first bad revision could be any of: changeset: 1:5cd978ea5149 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: msg 1 changeset: 2:db07c04beaca user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: msg 2 changeset: 3:b53bea5e2fcb user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: msg 3 changeset: 4:9b2ba8336a65 user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: msg 4 reproduce non converging bisect, issue1182 $ hg bisect -r $ hg bisect -g 0 $ hg bisect -b 2 Testing changeset 1:5cd978ea5149 (2 changesets remaining, ~1 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s Due to skipped revisions, the first bad revision could be any of: changeset: 1:5cd978ea5149 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: msg 1 changeset: 2:db07c04beaca user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: msg 2 test no action $ hg bisect -r $ hg bisect abort: cannot bisect (no known good revisions) [255] reproduce AssertionError, issue1445 $ hg bisect -r $ hg bisect -b 6 $ hg bisect -g 0 Testing changeset 3:b53bea5e2fcb (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s Testing changeset 2:db07c04beaca (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s Testing changeset 4:9b2ba8336a65 (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s Testing changeset 1:5cd978ea5149 (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -s Testing changeset 5:7874a09ea728 (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect -g The first bad revision is: changeset: 6:a3d5c6fdf0d3 user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: msg 6 $ hg log -r "bisect(good)" changeset: 0:b99c7b9c8e11 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: msg 0 changeset: 5:7874a09ea728 user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: msg 5 $ hg log -r "bisect(bad)" changeset: 6:a3d5c6fdf0d3 user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: msg 6 $ hg log -r "bisect(current)" changeset: 5:7874a09ea728 user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: msg 5 $ hg log -r "bisect(skip)" changeset: 1:5cd978ea5149 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: msg 1 changeset: 2:db07c04beaca user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: msg 2 changeset: 3:b53bea5e2fcb user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: msg 3 changeset: 4:9b2ba8336a65 user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: msg 4 test legacy bisected() keyword $ hg log -r "bisected(bad)" changeset: 6:a3d5c6fdf0d3 user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: msg 6 $ set +e test invalid command assuming that the shell returns 127 if command not found ... $ hg bisect -r $ hg bisect --command 'exit 127' abort: failed to execute exit 127 [255] test bisecting command $ cat > script.py < #!/usr/bin/env python > import sys > from mercurial import ui, hg > repo = hg.repository(ui.ui(), '.') > if repo['.'].rev() < 6: > sys.exit(1) > EOF $ chmod +x script.py $ hg bisect -r $ hg up -qr tip $ hg bisect --command "python \"$TESTTMP/script.py\" and some parameters" changeset 31:58c80a7c8a40: good abort: cannot bisect (no known bad revisions) [255] $ hg up -qr 0 $ hg bisect --command "python \"$TESTTMP/script.py\" and some parameters" changeset 0:b99c7b9c8e11: bad changeset 15:e7fa0811edb0: good changeset 7:03750880c6b5: good changeset 3:b53bea5e2fcb: bad changeset 5:7874a09ea728: bad changeset 6:a3d5c6fdf0d3: good The first good revision is: changeset: 6:a3d5c6fdf0d3 user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: msg 6 test bisecting via a command without updating the working dir, and ensure that the bisect state file is updated before running a test command $ hg update null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ cat > script.sh <<'EOF' > #!/bin/sh > test -n "$HG_NODE" || (echo HG_NODE missing; exit 127) > current="`hg log -r \"bisect(current)\" --template {node}`" > test "$current" = "$HG_NODE" || (echo current is bad: $current; exit 127) > rev="`hg log -r $HG_NODE --template {rev}`" > test "$rev" -ge 6 > EOF $ chmod +x script.sh $ hg bisect -r $ hg bisect --good tip --noupdate $ hg bisect --bad 0 --noupdate Testing changeset 15:e7fa0811edb0 (31 changesets remaining, ~4 tests) $ hg bisect --command "sh \"$TESTTMP/script.sh\" and some params" --noupdate changeset 15:e7fa0811edb0: good changeset 7:03750880c6b5: good changeset 3:b53bea5e2fcb: bad changeset 5:7874a09ea728: bad changeset 6:a3d5c6fdf0d3: good The first good revision is: changeset: 6:a3d5c6fdf0d3 user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: msg 6 ensure that we still don't have a working dir $ hg parents test the same case, this time with updating $ cat > script.sh <<'EOF' > #!/bin/sh > test -n "$HG_NODE" || (echo HG_NODE missing; exit 127) > current="`hg log -r \"bisect(current)\" --template {node}`" > test "$current" = "$HG_NODE" || (echo current is bad: $current; exit 127) > rev="`hg log -r . --template {rev}`" > test "$rev" -ge 6 > EOF $ chmod +x script.sh $ hg bisect -r $ hg up -qr tip $ hg bisect --command "sh \"$TESTTMP/script.sh\" and some params" changeset 31:58c80a7c8a40: good abort: cannot bisect (no known bad revisions) [255] $ hg up -qr 0 $ hg bisect --command "sh \"$TESTTMP/script.sh\" and some params" changeset 0:b99c7b9c8e11: bad changeset 15:e7fa0811edb0: good changeset 7:03750880c6b5: good changeset 3:b53bea5e2fcb: bad changeset 5:7874a09ea728: bad changeset 6:a3d5c6fdf0d3: good The first good revision is: changeset: 6:a3d5c6fdf0d3 user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: msg 6 Check that bisect does not break on obsolete changesets ========================================================= $ cat >> $HGRCPATH << EOF > [experimental] > evolution=createmarkers > EOF tip is obsolete --------------------- $ hg debugobsolete `hg id --debug -i -r tip` $ hg bisect --reset $ hg bisect --good 15 $ hg bisect --bad 30 Testing changeset 22:06c7993750ce (15 changesets remaining, ~3 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect --command true changeset 22:06c7993750ce: good changeset 26:3efc6fd51aeb: good changeset 28:8e0c2264c8af: good changeset 29:b5bd63375ab9: good The first bad revision is: changeset: 30:ed2d2f24b11c tag: tip user: test date: Thu Jan 01 00:00:30 1970 +0000 summary: msg 30 Changeset in the bad:good range is obsolete --------------------------------------------- $ hg up 30 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 'a' >> a $ hg ci -m "msg 32" -d "32 0" $ hg bisect --reset $ hg bisect --good . $ hg bisect --bad 25 Testing changeset 28:8e0c2264c8af (6 changesets remaining, ~2 tests) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bisect --command true changeset 28:8e0c2264c8af: good changeset 26:3efc6fd51aeb: good The first good revision is: changeset: 26:3efc6fd51aeb user: test date: Thu Jan 01 00:00:26 1970 +0000 summary: msg 26 mercurial-3.7.3/tests/test-ancestor.py.out0000644000175000017500000000125612676531525020250 0ustar mpmmpm00000000000000% lazy ancestor set for [], stoprev = 0, inclusive = False membership: [] iteration: [] % lazy ancestor set for [11, 13], stoprev = 0, inclusive = False membership: [7, 8, 3, 4, 1, 0] iteration: [3, 7, 8, 1, 4, 0, 2] % lazy ancestor set for [1, 3], stoprev = 0, inclusive = False membership: [1, 0] iteration: [0, 1] % lazy ancestor set for [11, 13], stoprev = 0, inclusive = True membership: [11, 13, 7, 8, 3, 4, 1, 0] iteration: [11, 13, 3, 7, 8, 1, 4, 0, 2] % lazy ancestor set for [11, 13], stoprev = 6, inclusive = False membership: [7, 8] iteration: [7, 8] % lazy ancestor set for [11, 13], stoprev = 6, inclusive = True membership: [11, 13, 7, 8] iteration: [11, 13, 7, 8] mercurial-3.7.3/tests/test-update-issue1456.t0000644000175000017500000000153212676531525020364 0ustar mpmmpm00000000000000#require execbit $ rm -rf a $ hg init a $ cd a $ echo foo > foo $ hg ci -qAm0 $ echo toremove > toremove $ echo todelete > todelete $ chmod +x foo toremove todelete $ hg ci -qAm1 Test that local removed/deleted, remote removed works with flags $ hg rm toremove $ rm todelete $ hg co -q 0 $ echo dirty > foo $ hg up -c abort: uncommitted changes [255] $ hg up -q $ cat foo dirty $ hg st -A M foo C todelete C toremove Validate update of standalone execute bit change: $ hg up -C 0 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ chmod -x foo $ hg ci -m removeexec nothing changed [1] $ hg up -C 0 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg up 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg st $ cd .. mercurial-3.7.3/tests/test-log.t0000644000175000017500000014156412676531525016227 0ustar mpmmpm00000000000000Log on empty repository: checking consistency $ hg init empty $ cd empty $ hg log $ hg log -r 1 abort: unknown revision '1'! [255] $ hg log -r -1:0 abort: unknown revision '-1'! [255] $ hg log -r 'branch(name)' abort: unknown revision 'name'! [255] $ hg log -r null -q -1:000000000000 The g is crafted to have 2 filelog topological heads in a linear changeset graph $ hg init a $ cd a $ echo a > a $ echo f > f $ hg ci -Ama -d '1 0' adding a adding f $ hg cp a b $ hg cp f g $ hg ci -mb -d '2 0' $ mkdir dir $ hg mv b dir $ echo g >> g $ echo f >> f $ hg ci -mc -d '3 0' $ hg mv a b $ hg cp -f f g $ echo a > d $ hg add d $ hg ci -md -d '4 0' $ hg mv dir/b e $ hg ci -me -d '5 0' Make sure largefiles doesn't interfere with logging a regular file $ hg --debug log a -T '{rev}: {desc}\n' --config extensions.largefiles= updated patterns: ['.hglf/a', 'a'] 0: a $ hg log a changeset: 0:9161b9aeaf16 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a $ hg log glob:a* changeset: 3:2ca5ba701980 user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: d changeset: 0:9161b9aeaf16 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a $ hg --debug log glob:a* -T '{rev}: {desc}\n' --config extensions.largefiles= updated patterns: ['glob:.hglf/a*', 'glob:a*'] 3: d 0: a log on directory $ hg log dir changeset: 4:7e4639b4691b tag: tip user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: e changeset: 2:f8954cd4dc1f user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: c $ hg log somethingthatdoesntexist dir changeset: 4:7e4639b4691b tag: tip user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: e changeset: 2:f8954cd4dc1f user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: c -f, non-existent directory $ hg log -f dir abort: cannot follow file not in parent revision: "dir" [255] -f, directory $ hg up -q 3 $ hg log -f dir changeset: 2:f8954cd4dc1f user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: c -f, directory with --patch $ hg log -f dir -p changeset: 2:f8954cd4dc1f user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: c diff -r d89b0a12d229 -r f8954cd4dc1f dir/b --- /dev/null* (glob) +++ b/dir/b* (glob) @@ -0,0 +1,1 @@ +a -f, pattern $ hg log -f -I 'dir**' -p changeset: 2:f8954cd4dc1f user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: c diff -r d89b0a12d229 -r f8954cd4dc1f dir/b --- /dev/null* (glob) +++ b/dir/b* (glob) @@ -0,0 +1,1 @@ +a $ hg up -q 4 -f, a wrong style $ hg log -f -l1 --style something abort: style 'something' not found (available styles: bisect, changelog, compact, default, phases, status, xml) [255] -f, phases style $ hg log -f -l1 --style phases changeset: 4:7e4639b4691b tag: tip phase: draft user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: e $ hg log -f -l1 --style phases -q 4:7e4639b4691b -f, but no args $ hg log -f changeset: 4:7e4639b4691b tag: tip user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: e changeset: 3:2ca5ba701980 user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: d changeset: 2:f8954cd4dc1f user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: c changeset: 1:d89b0a12d229 user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: b changeset: 0:9161b9aeaf16 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a one rename $ hg up -q 2 $ hg log -vf a changeset: 0:9161b9aeaf16 user: test date: Thu Jan 01 00:00:01 1970 +0000 files: a f description: a many renames $ hg up -q tip $ hg log -vf e changeset: 4:7e4639b4691b tag: tip user: test date: Thu Jan 01 00:00:05 1970 +0000 files: dir/b e description: e changeset: 2:f8954cd4dc1f user: test date: Thu Jan 01 00:00:03 1970 +0000 files: b dir/b f g description: c changeset: 1:d89b0a12d229 user: test date: Thu Jan 01 00:00:02 1970 +0000 files: b g description: b changeset: 0:9161b9aeaf16 user: test date: Thu Jan 01 00:00:01 1970 +0000 files: a f description: a log -pf dir/b $ hg up -q 3 $ hg log -pf dir/b changeset: 2:f8954cd4dc1f user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: c diff -r d89b0a12d229 -r f8954cd4dc1f dir/b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dir/b Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +a changeset: 1:d89b0a12d229 user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: b diff -r 9161b9aeaf16 -r d89b0a12d229 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +a changeset: 0:9161b9aeaf16 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a diff -r 000000000000 -r 9161b9aeaf16 a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a log -pf b inside dir $ hg --cwd=dir log -pf b changeset: 2:f8954cd4dc1f user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: c diff -r d89b0a12d229 -r f8954cd4dc1f dir/b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dir/b Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +a changeset: 1:d89b0a12d229 user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: b diff -r 9161b9aeaf16 -r d89b0a12d229 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +a changeset: 0:9161b9aeaf16 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a diff -r 000000000000 -r 9161b9aeaf16 a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a log -pf, but no args $ hg log -pf changeset: 3:2ca5ba701980 user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: d diff -r f8954cd4dc1f -r 2ca5ba701980 a --- a/a Thu Jan 01 00:00:03 1970 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -a diff -r f8954cd4dc1f -r 2ca5ba701980 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,1 @@ +a diff -r f8954cd4dc1f -r 2ca5ba701980 d --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/d Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,1 @@ +a diff -r f8954cd4dc1f -r 2ca5ba701980 g --- a/g Thu Jan 01 00:00:03 1970 +0000 +++ b/g Thu Jan 01 00:00:04 1970 +0000 @@ -1,2 +1,2 @@ f -g +f changeset: 2:f8954cd4dc1f user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: c diff -r d89b0a12d229 -r f8954cd4dc1f b --- a/b Thu Jan 01 00:00:02 1970 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -a diff -r d89b0a12d229 -r f8954cd4dc1f dir/b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/dir/b Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +a diff -r d89b0a12d229 -r f8954cd4dc1f f --- a/f Thu Jan 01 00:00:02 1970 +0000 +++ b/f Thu Jan 01 00:00:03 1970 +0000 @@ -1,1 +1,2 @@ f +f diff -r d89b0a12d229 -r f8954cd4dc1f g --- a/g Thu Jan 01 00:00:02 1970 +0000 +++ b/g Thu Jan 01 00:00:03 1970 +0000 @@ -1,1 +1,2 @@ f +g changeset: 1:d89b0a12d229 user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: b diff -r 9161b9aeaf16 -r d89b0a12d229 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +a diff -r 9161b9aeaf16 -r d89b0a12d229 g --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/g Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +f changeset: 0:9161b9aeaf16 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a diff -r 000000000000 -r 9161b9aeaf16 a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a diff -r 000000000000 -r 9161b9aeaf16 f --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/f Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +f log -vf dir/b $ hg log -vf dir/b changeset: 2:f8954cd4dc1f user: test date: Thu Jan 01 00:00:03 1970 +0000 files: b dir/b f g description: c changeset: 1:d89b0a12d229 user: test date: Thu Jan 01 00:00:02 1970 +0000 files: b g description: b changeset: 0:9161b9aeaf16 user: test date: Thu Jan 01 00:00:01 1970 +0000 files: a f description: a -f and multiple filelog heads $ hg up -q 2 $ hg log -f g --template '{rev}\n' 2 1 0 $ hg up -q tip $ hg log -f g --template '{rev}\n' 3 2 0 log copies with --copies $ hg log -vC --template '{rev} {file_copies}\n' 4 e (dir/b) 3 b (a)g (f) 2 dir/b (b) 1 b (a)g (f) 0 log copies switch without --copies, with old filecopy template $ hg log -v --template '{rev} {file_copies_switch%filecopy}\n' 4 3 2 1 0 log copies switch with --copies $ hg log -vC --template '{rev} {file_copies_switch}\n' 4 e (dir/b) 3 b (a)g (f) 2 dir/b (b) 1 b (a)g (f) 0 log copies with hardcoded style and with --style=default $ hg log -vC -r4 changeset: 4:7e4639b4691b tag: tip user: test date: Thu Jan 01 00:00:05 1970 +0000 files: dir/b e copies: e (dir/b) description: e $ hg log -vC -r4 --style=default changeset: 4:7e4639b4691b tag: tip user: test date: Thu Jan 01 00:00:05 1970 +0000 files: dir/b e copies: e (dir/b) description: e $ hg log -vC -r4 -Tjson [ { "rev": 4, "node": "7e4639b4691b9f84b81036a8d4fb218ce3c5e3a3", "branch": "default", "phase": "draft", "user": "test", "date": [5, 0], "desc": "e", "bookmarks": [], "tags": ["tip"], "parents": ["2ca5ba7019804f1f597249caddf22a64d34df0ba"], "files": ["dir/b", "e"], "copies": {"e": "dir/b"} } ] log copies, non-linear manifest $ hg up -C 3 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg mv dir/b e $ echo foo > foo $ hg ci -Ame2 -d '6 0' adding foo created new head $ hg log -v --template '{rev} {file_copies}\n' -r 5 5 e (dir/b) log copies, execute bit set #if execbit $ chmod +x e $ hg ci -me3 -d '7 0' $ hg log -v --template '{rev} {file_copies}\n' -r 6 6 #endif log -p d $ hg log -pv d changeset: 3:2ca5ba701980 user: test date: Thu Jan 01 00:00:04 1970 +0000 files: a b d g description: d diff -r f8954cd4dc1f -r 2ca5ba701980 d --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/d Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,1 @@ +a log --removed file $ hg log --removed -v a changeset: 3:2ca5ba701980 user: test date: Thu Jan 01 00:00:04 1970 +0000 files: a b d g description: d changeset: 0:9161b9aeaf16 user: test date: Thu Jan 01 00:00:01 1970 +0000 files: a f description: a log --removed revrange file $ hg log --removed -v -r0:2 a changeset: 0:9161b9aeaf16 user: test date: Thu Jan 01 00:00:01 1970 +0000 files: a f description: a $ cd .. log --follow tests $ hg init follow $ cd follow $ echo base > base $ hg ci -Ambase -d '1 0' adding base $ echo r1 >> base $ hg ci -Amr1 -d '1 0' $ echo r2 >> base $ hg ci -Amr2 -d '1 0' $ hg up -C 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b1 > b1 log -r "follow('set:clean()')" $ hg log -r "follow('set:clean()')" changeset: 0:67e992f2c4f3 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: base changeset: 1:3d5bf5654eda user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: r1 $ hg ci -Amb1 -d '1 0' adding b1 created new head log -f $ hg log -f changeset: 3:e62f78d544b4 tag: tip parent: 1:3d5bf5654eda user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b1 changeset: 1:3d5bf5654eda user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: r1 changeset: 0:67e992f2c4f3 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: base log -r follow('glob:b*') $ hg log -r "follow('glob:b*')" changeset: 0:67e992f2c4f3 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: base changeset: 1:3d5bf5654eda user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: r1 changeset: 3:e62f78d544b4 tag: tip parent: 1:3d5bf5654eda user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b1 log -f -r '1 + 4' $ hg up -C 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo b2 > b2 $ hg ci -Amb2 -d '1 0' adding b2 created new head $ hg log -f -r '1 + 4' changeset: 4:ddb82e70d1a1 tag: tip parent: 0:67e992f2c4f3 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b2 changeset: 1:3d5bf5654eda user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: r1 changeset: 0:67e992f2c4f3 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: base log -r "follow('set:grep(b2)')" $ hg log -r "follow('set:grep(b2)')" changeset: 4:ddb82e70d1a1 tag: tip parent: 0:67e992f2c4f3 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b2 log -f -r null $ hg log -f -r null changeset: -1:000000000000 user: date: Thu Jan 01 00:00:00 1970 +0000 $ hg log -f -r null -G o changeset: -1:000000000000 user: date: Thu Jan 01 00:00:00 1970 +0000 log -f with null parent $ hg up -C null 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg log -f log -r . with two parents $ hg up -C 3 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg log -r . changeset: 3:e62f78d544b4 parent: 1:3d5bf5654eda user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b1 log -r . with one parent $ hg ci -mm12 -d '1 0' $ hg log -r . changeset: 5:302e9dd6890d tag: tip parent: 3:e62f78d544b4 parent: 4:ddb82e70d1a1 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: m12 $ echo postm >> b1 $ hg ci -Amb1.1 -d'1 0' log --follow-first $ hg log --follow-first changeset: 6:2404bbcab562 tag: tip user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b1.1 changeset: 5:302e9dd6890d parent: 3:e62f78d544b4 parent: 4:ddb82e70d1a1 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: m12 changeset: 3:e62f78d544b4 parent: 1:3d5bf5654eda user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b1 changeset: 1:3d5bf5654eda user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: r1 changeset: 0:67e992f2c4f3 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: base log -P 2 $ hg log -P 2 changeset: 6:2404bbcab562 tag: tip user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b1.1 changeset: 5:302e9dd6890d parent: 3:e62f78d544b4 parent: 4:ddb82e70d1a1 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: m12 changeset: 4:ddb82e70d1a1 parent: 0:67e992f2c4f3 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b2 changeset: 3:e62f78d544b4 parent: 1:3d5bf5654eda user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b1 log -r tip -p --git $ hg log -r tip -p --git changeset: 6:2404bbcab562 tag: tip user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b1.1 diff --git a/b1 b/b1 --- a/b1 +++ b/b1 @@ -1,1 +1,2 @@ b1 +postm log -r "" $ hg log -r '' hg: parse error: empty query [255] log -r $ hg log -r 1000000000000000000000000000000000000000 abort: unknown revision '1000000000000000000000000000000000000000'! [255] log -k r1 $ hg log -k r1 changeset: 1:3d5bf5654eda user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: r1 log -p -l2 --color=always $ hg --config extensions.color= --config color.mode=ansi \ > log -p -l2 --color=always \x1b[0;33mchangeset: 6:2404bbcab562\x1b[0m (esc) tag: tip user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b1.1 \x1b[0;1mdiff -r 302e9dd6890d -r 2404bbcab562 b1\x1b[0m (esc) \x1b[0;31;1m--- a/b1 Thu Jan 01 00:00:01 1970 +0000\x1b[0m (esc) \x1b[0;32;1m+++ b/b1 Thu Jan 01 00:00:01 1970 +0000\x1b[0m (esc) \x1b[0;35m@@ -1,1 +1,2 @@\x1b[0m (esc) b1 \x1b[0;32m+postm\x1b[0m (esc) \x1b[0;33mchangeset: 5:302e9dd6890d\x1b[0m (esc) parent: 3:e62f78d544b4 parent: 4:ddb82e70d1a1 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: m12 \x1b[0;1mdiff -r e62f78d544b4 -r 302e9dd6890d b2\x1b[0m (esc) \x1b[0;31;1m--- /dev/null Thu Jan 01 00:00:00 1970 +0000\x1b[0m (esc) \x1b[0;32;1m+++ b/b2 Thu Jan 01 00:00:01 1970 +0000\x1b[0m (esc) \x1b[0;35m@@ -0,0 +1,1 @@\x1b[0m (esc) \x1b[0;32m+b2\x1b[0m (esc) log -r tip --stat $ hg log -r tip --stat changeset: 6:2404bbcab562 tag: tip user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b1.1 b1 | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) $ cd .. Test that log should respect the order of -rREV even if multiple OR conditions are specified (issue5100): $ hg init revorder $ cd revorder $ hg branch -q b0 $ echo 0 >> f0 $ hg ci -qAm k0 -u u0 $ hg branch -q b1 $ echo 1 >> f1 $ hg ci -qAm k1 -u u1 $ hg branch -q b2 $ echo 2 >> f2 $ hg ci -qAm k2 -u u2 $ hg update -q b2 $ echo 3 >> f2 $ hg ci -qAm k2 -u u2 $ hg update -q b1 $ echo 4 >> f1 $ hg ci -qAm k1 -u u1 $ hg update -q b0 $ echo 5 >> f0 $ hg ci -qAm k0 -u u0 summary of revisions: $ hg log -G -T '{rev} {branch} {author} {desc} {files}\n' @ 5 b0 u0 k0 f0 | | o 4 b1 u1 k1 f1 | | | | o 3 b2 u2 k2 f2 | | | | | o 2 b2 u2 k2 f2 | |/ | o 1 b1 u1 k1 f1 |/ o 0 b0 u0 k0 f0 log -b BRANCH in ascending order: $ hg log -r0:tip -T '{rev} {branch}\n' -b b0 -b b1 0 b0 1 b1 4 b1 5 b0 $ hg log -r0:tip -T '{rev} {branch}\n' -b b1 -b b0 0 b0 1 b1 4 b1 5 b0 log --only-branch BRANCH in descending order: $ hg log -rtip:0 -T '{rev} {branch}\n' --only-branch b1 --only-branch b2 4 b1 3 b2 2 b2 1 b1 $ hg log -rtip:0 -T '{rev} {branch}\n' --only-branch b2 --only-branch b1 4 b1 3 b2 2 b2 1 b1 log -u USER in ascending order, against compound set: $ hg log -r'::head()' -T '{rev} {author}\n' -u u0 -u u2 0 u0 2 u2 3 u2 5 u0 $ hg log -r'::head()' -T '{rev} {author}\n' -u u2 -u u0 0 u0 2 u2 3 u2 5 u0 log -k TEXT in descending order, against compound set: $ hg log -r'5 + reverse(::3)' -T '{rev} {desc}\n' -k k0 -k k1 -k k2 5 k0 3 k2 2 k2 1 k1 0 k0 $ hg log -r'5 + reverse(::3)' -T '{rev} {desc}\n' -k k2 -k k1 -k k0 5 k0 3 k2 2 k2 1 k1 0 k0 log FILE in ascending order, against dagrange: $ hg log -r1:: -T '{rev} {files}\n' f1 f2 1 f1 2 f2 3 f2 4 f1 $ hg log -r1:: -T '{rev} {files}\n' f2 f1 1 f1 2 f2 3 f2 4 f1 $ cd .. User $ hg init usertest $ cd usertest $ echo a > a $ hg ci -A -m "a" -u "User One " adding a $ echo b > b $ hg ci -A -m "b" -u "User Two " adding b $ hg log -u "User One " changeset: 0:29a4c94f1924 user: User One date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ hg log -u "user1" -u "user2" changeset: 1:e834b5e69c0e tag: tip user: User Two date: Thu Jan 01 00:00:00 1970 +0000 summary: b changeset: 0:29a4c94f1924 user: User One date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ hg log -u "user3" $ cd .. $ hg init branches $ cd branches $ echo a > a $ hg ci -A -m "commit on default" adding a $ hg branch test marked working directory as branch test (branches are permanent and global, did you want a bookmark?) $ echo b > b $ hg ci -A -m "commit on test" adding b $ hg up default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo c > c $ hg ci -A -m "commit on default" adding c $ hg up test 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo c > c $ hg ci -A -m "commit on test" adding c log -b default $ hg log -b default changeset: 2:c3a4f03cc9a7 parent: 0:24427303d56f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on default changeset: 0:24427303d56f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on default log -b test $ hg log -b test changeset: 3:f5d8de11c2e2 branch: test tag: tip parent: 1:d32277701ccb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on test changeset: 1:d32277701ccb branch: test user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on test log -b dummy $ hg log -b dummy abort: unknown revision 'dummy'! [255] log -b . $ hg log -b . changeset: 3:f5d8de11c2e2 branch: test tag: tip parent: 1:d32277701ccb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on test changeset: 1:d32277701ccb branch: test user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on test log -b default -b test $ hg log -b default -b test changeset: 3:f5d8de11c2e2 branch: test tag: tip parent: 1:d32277701ccb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on test changeset: 2:c3a4f03cc9a7 parent: 0:24427303d56f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on default changeset: 1:d32277701ccb branch: test user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on test changeset: 0:24427303d56f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on default log -b default -b . $ hg log -b default -b . changeset: 3:f5d8de11c2e2 branch: test tag: tip parent: 1:d32277701ccb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on test changeset: 2:c3a4f03cc9a7 parent: 0:24427303d56f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on default changeset: 1:d32277701ccb branch: test user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on test changeset: 0:24427303d56f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on default log -b . -b test $ hg log -b . -b test changeset: 3:f5d8de11c2e2 branch: test tag: tip parent: 1:d32277701ccb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on test changeset: 1:d32277701ccb branch: test user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on test log -b 2 $ hg log -b 2 changeset: 2:c3a4f03cc9a7 parent: 0:24427303d56f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on default changeset: 0:24427303d56f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on default #if gettext Test that all log names are translated (e.g. branches, bookmarks, tags): $ hg bookmark babar -r tip $ HGENCODING=UTF-8 LANGUAGE=de hg log -r tip \xc3\x84nderung: 3:f5d8de11c2e2 (esc) Zweig: test Lesezeichen: babar Marke: tip Vorg\xc3\xa4nger: 1:d32277701ccb (esc) Nutzer: test Datum: Thu Jan 01 00:00:00 1970 +0000 Zusammenfassung: commit on test $ hg bookmark -d babar #endif log -p --cwd dir (in subdir) $ mkdir dir $ hg log -p --cwd dir changeset: 3:f5d8de11c2e2 branch: test tag: tip parent: 1:d32277701ccb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on test diff -r d32277701ccb -r f5d8de11c2e2 c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +c changeset: 2:c3a4f03cc9a7 parent: 0:24427303d56f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on default diff -r 24427303d56f -r c3a4f03cc9a7 c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +c changeset: 1:d32277701ccb branch: test user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on test diff -r 24427303d56f -r d32277701ccb b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +b changeset: 0:24427303d56f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on default diff -r 000000000000 -r 24427303d56f a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +a log -p -R repo $ cd dir $ hg log -p -R .. ../a changeset: 0:24427303d56f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit on default diff -r 000000000000 -r 24427303d56f a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +a $ cd ../.. $ hg init follow2 $ cd follow2 # Build the following history: # tip - o - x - o - x - x # \ / # o - o - o - x # \ / # o # # Where "o" is a revision containing "foo" and # "x" is a revision without "foo" $ touch init $ hg ci -A -m "init, unrelated" adding init $ echo 'foo' > init $ hg ci -m "change, unrelated" $ echo 'foo' > foo $ hg ci -A -m "add unrelated old foo" adding foo $ hg rm foo $ hg ci -m "delete foo, unrelated" $ echo 'related' > foo $ hg ci -A -m "add foo, related" adding foo $ hg up 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ touch branch $ hg ci -A -m "first branch, unrelated" adding branch created new head $ touch foo $ hg ci -A -m "create foo, related" adding foo $ echo 'change' > foo $ hg ci -m "change foo, related" $ hg up 6 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 'change foo in branch' > foo $ hg ci -m "change foo in branch, related" created new head $ hg merge 7 merging foo warning: conflicts while merging foo! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ echo 'merge 1' > foo $ hg resolve -m foo (no more unresolved files) $ hg ci -m "First merge, related" $ hg merge 4 merging foo warning: conflicts while merging foo! (edit, then use 'hg resolve --mark') 1 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ echo 'merge 2' > foo $ hg resolve -m foo (no more unresolved files) $ hg ci -m "Last merge, related" $ hg log --graph @ changeset: 10:4dae8563d2c5 |\ tag: tip | | parent: 9:7b35701b003e | | parent: 4:88176d361b69 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: Last merge, related | | | o changeset: 9:7b35701b003e | |\ parent: 8:e5416ad8a855 | | | parent: 7:87fe3144dcfa | | | user: test | | | date: Thu Jan 01 00:00:00 1970 +0000 | | | summary: First merge, related | | | | | o changeset: 8:e5416ad8a855 | | | parent: 6:dc6c325fe5ee | | | user: test | | | date: Thu Jan 01 00:00:00 1970 +0000 | | | summary: change foo in branch, related | | | | o | changeset: 7:87fe3144dcfa | |/ user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: change foo, related | | | o changeset: 6:dc6c325fe5ee | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: create foo, related | | | o changeset: 5:73db34516eb9 | | parent: 0:e87515fd044a | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: first branch, unrelated | | o | changeset: 4:88176d361b69 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: add foo, related | | o | changeset: 3:dd78ae4afb56 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: delete foo, unrelated | | o | changeset: 2:c4c64aedf0f7 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: add unrelated old foo | | o | changeset: 1:e5faa7440653 |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: change, unrelated | o changeset: 0:e87515fd044a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: init, unrelated $ hg --traceback log -f foo changeset: 10:4dae8563d2c5 tag: tip parent: 9:7b35701b003e parent: 4:88176d361b69 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Last merge, related changeset: 9:7b35701b003e parent: 8:e5416ad8a855 parent: 7:87fe3144dcfa user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: First merge, related changeset: 8:e5416ad8a855 parent: 6:dc6c325fe5ee user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change foo in branch, related changeset: 7:87fe3144dcfa user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change foo, related changeset: 6:dc6c325fe5ee user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: create foo, related changeset: 4:88176d361b69 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo, related Also check when maxrev < lastrevfilelog $ hg --traceback log -f -r4 foo changeset: 4:88176d361b69 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo, related changeset: 2:c4c64aedf0f7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add unrelated old foo $ cd .. Issue2383: hg log showing _less_ differences than hg diff $ hg init issue2383 $ cd issue2383 Create a test repo: $ echo a > a $ hg ci -Am0 adding a $ echo b > b $ hg ci -Am1 adding b $ hg co 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo b > a $ hg ci -m2 created new head Merge: $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) Make sure there's a file listed in the merge to trigger the bug: $ echo c > a $ hg ci -m3 Two files shown here in diff: $ hg diff --rev 2:3 diff -r b09be438c43a -r 8e07aafe1edc a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -b +c diff -r b09be438c43a -r 8e07aafe1edc b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +b Diff here should be the same: $ hg log -vpr 3 changeset: 3:8e07aafe1edc tag: tip parent: 2:b09be438c43a parent: 1:925d80f479bb user: test date: Thu Jan 01 00:00:00 1970 +0000 files: a description: 3 diff -r b09be438c43a -r 8e07aafe1edc a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -b +c diff -r b09be438c43a -r 8e07aafe1edc b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +b $ cd .. 'hg log -r rev fn' when last(filelog(fn)) != rev $ hg init simplelog $ cd simplelog $ echo f > a $ hg ci -Am'a' -d '0 0' adding a $ echo f >> a $ hg ci -Am'a bis' -d '1 0' $ hg log -r0 a changeset: 0:9f758d63dcde user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a enable obsolete to test hidden feature $ cat >> $HGRCPATH << EOF > [experimental] > evolution=createmarkers > EOF $ hg log --template='{rev}:{node}\n' 1:a765632148dc55d38c35c4f247c618701886cb2f 0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05 $ hg debugobsolete a765632148dc55d38c35c4f247c618701886cb2f $ hg up null -q $ hg log --template='{rev}:{node}\n' 0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05 $ hg log --template='{rev}:{node}\n' --hidden 1:a765632148dc55d38c35c4f247c618701886cb2f 0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05 $ hg log -r a abort: hidden revision 'a'! (use --hidden to access hidden revisions) [255] test that parent prevent a changeset to be hidden $ hg up 1 -q --hidden $ hg log --template='{rev}:{node}\n' 1:a765632148dc55d38c35c4f247c618701886cb2f 0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05 test that second parent prevent a changeset to be hidden too $ hg debugsetparents 0 1 # nothing suitable to merge here $ hg log --template='{rev}:{node}\n' 1:a765632148dc55d38c35c4f247c618701886cb2f 0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05 $ hg debugsetparents 1 $ hg up -q null bookmarks prevent a changeset being hidden $ hg bookmark --hidden -r 1 X $ hg log --template '{rev}:{node}\n' 1:a765632148dc55d38c35c4f247c618701886cb2f 0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05 $ hg bookmark -d X divergent bookmarks are not hidden $ hg bookmark --hidden -r 1 X@foo $ hg log --template '{rev}:{node}\n' 1:a765632148dc55d38c35c4f247c618701886cb2f 0:9f758d63dcde62d547ebfb08e1e7ee96535f2b05 clear extensions configuration $ echo '[extensions]' >> $HGRCPATH $ echo "obs=!" >> $HGRCPATH $ cd .. test -u/-k for problematic encoding # unicode: cp932: # u30A2 0x83 0x41(= 'A') # u30C2 0x83 0x61(= 'a') $ hg init problematicencoding $ cd problematicencoding $ python > setup.sh < print u''' > echo a > text > hg add text > hg --encoding utf-8 commit -u '\u30A2' -m none > echo b > text > hg --encoding utf-8 commit -u '\u30C2' -m none > echo c > text > hg --encoding utf-8 commit -u none -m '\u30A2' > echo d > text > hg --encoding utf-8 commit -u none -m '\u30C2' > '''.encode('utf-8') > EOF $ sh < setup.sh test in problematic encoding $ python > test.sh < print u''' > hg --encoding cp932 log --template '{rev}\\n' -u '\u30A2' > echo ==== > hg --encoding cp932 log --template '{rev}\\n' -u '\u30C2' > echo ==== > hg --encoding cp932 log --template '{rev}\\n' -k '\u30A2' > echo ==== > hg --encoding cp932 log --template '{rev}\\n' -k '\u30C2' > '''.encode('cp932') > EOF $ sh < test.sh 0 ==== 1 ==== 2 0 ==== 3 1 $ cd .. test hg log on non-existent files and on directories $ hg init issue1340 $ cd issue1340 $ mkdir d1; mkdir D2; mkdir D3.i; mkdir d4.hg; mkdir d5.d; mkdir .d6 $ echo 1 > d1/f1 $ echo 1 > D2/f1 $ echo 1 > D3.i/f1 $ echo 1 > d4.hg/f1 $ echo 1 > d5.d/f1 $ echo 1 > .d6/f1 $ hg -q add . $ hg commit -m "a bunch of weird directories" $ hg log -l1 d1/f1 | grep changeset changeset: 0:65624cd9070a $ hg log -l1 f1 $ hg log -l1 . | grep changeset changeset: 0:65624cd9070a $ hg log -l1 ./ | grep changeset changeset: 0:65624cd9070a $ hg log -l1 d1 | grep changeset changeset: 0:65624cd9070a $ hg log -l1 D2 | grep changeset changeset: 0:65624cd9070a $ hg log -l1 D2/f1 | grep changeset changeset: 0:65624cd9070a $ hg log -l1 D3.i | grep changeset changeset: 0:65624cd9070a $ hg log -l1 D3.i/f1 | grep changeset changeset: 0:65624cd9070a $ hg log -l1 d4.hg | grep changeset changeset: 0:65624cd9070a $ hg log -l1 d4.hg/f1 | grep changeset changeset: 0:65624cd9070a $ hg log -l1 d5.d | grep changeset changeset: 0:65624cd9070a $ hg log -l1 d5.d/f1 | grep changeset changeset: 0:65624cd9070a $ hg log -l1 .d6 | grep changeset changeset: 0:65624cd9070a $ hg log -l1 .d6/f1 | grep changeset changeset: 0:65624cd9070a issue3772: hg log -r :null showing revision 0 as well $ hg log -r :null changeset: 0:65624cd9070a tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a bunch of weird directories changeset: -1:000000000000 user: date: Thu Jan 01 00:00:00 1970 +0000 $ hg log -r null:null changeset: -1:000000000000 user: date: Thu Jan 01 00:00:00 1970 +0000 working-directory revision requires special treatment $ hg log -r 'wdir()' changeset: 2147483647:ffffffffffff parent: 0:65624cd9070a user: test date: [A-Za-z0-9:+ ]+ (re) $ hg log -r 'wdir()' -q 2147483647:ffffffffffff $ hg log -r 'wdir()' --debug changeset: 2147483647:ffffffffffffffffffffffffffffffffffffffff phase: draft parent: 0:65624cd9070a035fa7191a54f2b8af39f16b0c08 parent: -1:0000000000000000000000000000000000000000 user: test date: [A-Za-z0-9:+ ]+ (re) extra: branch=default $ hg log -r 'wdir()' -Tjson [ { "rev": null, "node": null, "branch": "default", "phase": "draft", "user": "test", "date": [*, 0], (glob) "desc": "", "bookmarks": [], "tags": [], "parents": ["65624cd9070a035fa7191a54f2b8af39f16b0c08"] } ] $ hg log -r 'wdir()' -Tjson -q [ { "rev": null, "node": null } ] $ hg log -r 'wdir()' -Tjson --debug [ { "rev": null, "node": null, "branch": "default", "phase": "draft", "user": "test", "date": [*, 0], (glob) "desc": "", "bookmarks": [], "tags": [], "parents": ["65624cd9070a035fa7191a54f2b8af39f16b0c08"], "manifest": null, "extra": {"branch": "default"}, "modified": [], "added": [], "removed": [] } ] Check that adding an arbitrary name shows up in log automatically $ cat > ../names.py < """A small extension to test adding arbitrary names to a repo""" > from mercurial.namespaces import namespace > > def reposetup(ui, repo): > foo = {'foo': repo[0].node()} > names = lambda r: foo.keys() > namemap = lambda r, name: foo.get(name) > nodemap = lambda r, node: [name for name, n in foo.iteritems() > if n == node] > ns = namespace("bars", templatename="bar", logname="barlog", > colorname="barcolor", listnames=names, namemap=namemap, > nodemap=nodemap) > > repo.names.addnamespace(ns) > EOF $ hg --config extensions.names=../names.py log -r 0 changeset: 0:65624cd9070a tag: tip barlog: foo user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a bunch of weird directories $ hg --config extensions.names=../names.py \ > --config extensions.color= --config color.log.barcolor=red \ > --color=always log -r 0 \x1b[0;33mchangeset: 0:65624cd9070a\x1b[0m (esc) tag: tip \x1b[0;31mbarlog: foo\x1b[0m (esc) user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a bunch of weird directories $ hg --config extensions.names=../names.py log -r 0 --template '{bars}\n' foo $ cd .. hg log -f dir across branches $ hg init acrossbranches $ cd acrossbranches $ mkdir d $ echo a > d/a && hg ci -Aqm a $ echo b > d/a && hg ci -Aqm b $ hg up -q 0 $ echo b > d/a && hg ci -Aqm c $ hg log -f d -T '{desc}' -G @ c | o a Ensure that largefiles doesn't interfere with following a normal file $ hg --config extensions.largefiles= log -f d -T '{desc}' -G @ c | o a $ hg log -f d/a -T '{desc}' -G @ c | o a $ cd .. hg log -f with linkrev pointing to another branch ------------------------------------------------- create history with a filerev whose linkrev points to another branch $ hg init branchedlinkrev $ cd branchedlinkrev $ echo 1 > a $ hg commit -Am 'content1' adding a $ echo 2 > a $ hg commit -m 'content2' $ hg up --rev 'desc(content1)' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo unrelated > unrelated $ hg commit -Am 'unrelated' adding unrelated created new head $ hg graft -r 'desc(content2)' grafting 1:2294ae80ad84 "content2" $ echo 3 > a $ hg commit -m 'content3' $ hg log -G @ changeset: 4:50b9b36e9c5d | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content3 | o changeset: 3:15b2327059e5 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content2 | o changeset: 2:2029acd1168c | parent: 0:ae0a3c9f9e95 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: unrelated | | o changeset: 1:2294ae80ad84 |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content2 | o changeset: 0:ae0a3c9f9e95 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: content1 log -f on the file should list the graft result. $ hg log -Gf a @ changeset: 4:50b9b36e9c5d | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content3 | o changeset: 3:15b2327059e5 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content2 | o changeset: 0:ae0a3c9f9e95 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: content1 plain log lists the original version (XXX we should probably list both) $ hg log -G a @ changeset: 4:50b9b36e9c5d | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content3 | | o changeset: 1:2294ae80ad84 |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content2 | o changeset: 0:ae0a3c9f9e95 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: content1 hg log -f from the grafted changeset (The bootstrap should properly take the topology in account) $ hg up 'desc(content3)^' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -Gf a @ changeset: 3:15b2327059e5 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content2 | o changeset: 0:ae0a3c9f9e95 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: content1 Test that we use the first non-hidden changeset in that case. (hide the changeset) $ hg log -T '{node}\n' -r 1 2294ae80ad8447bc78383182eeac50cb049df623 $ hg debugobsolete 2294ae80ad8447bc78383182eeac50cb049df623 $ hg log -G o changeset: 4:50b9b36e9c5d | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content3 | @ changeset: 3:15b2327059e5 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content2 | o changeset: 2:2029acd1168c | parent: 0:ae0a3c9f9e95 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: unrelated | o changeset: 0:ae0a3c9f9e95 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: content1 Check that log on the file does not drop the file revision. $ hg log -G a o changeset: 4:50b9b36e9c5d | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content3 | @ changeset: 3:15b2327059e5 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content2 | o changeset: 0:ae0a3c9f9e95 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: content1 Even when a head revision is linkrev-shadowed. $ hg log -T '{node}\n' -r 4 50b9b36e9c5df2c6fc6dcefa8ad0da929e84aed2 $ hg debugobsolete 50b9b36e9c5df2c6fc6dcefa8ad0da929e84aed2 $ hg log -G a @ changeset: 3:15b2327059e5 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: content2 | o changeset: 0:ae0a3c9f9e95 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: content1 $ cd .. Even when the file revision is missing from some head: $ hg init issue4490 $ cd issue4490 $ echo '[experimental]' >> .hg/hgrc $ echo 'evolution=createmarkers' >> .hg/hgrc $ echo a > a $ hg ci -Am0 adding a $ echo b > b $ hg ci -Am1 adding b $ echo B > b $ hg ci --amend -m 1 $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo c > c $ hg ci -Am2 adding c created new head $ hg up 'head() and not .' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg log -G o changeset: 4:db815d6d32e6 | tag: tip | parent: 0:f7b1eb17ad24 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: 2 | | @ changeset: 3:9bc8ce7f9356 |/ parent: 0:f7b1eb17ad24 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: 1 | o changeset: 0:f7b1eb17ad24 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 $ hg log -f -G b @ changeset: 3:9bc8ce7f9356 | parent: 0:f7b1eb17ad24 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: 1 | $ hg log -G b @ changeset: 3:9bc8ce7f9356 | parent: 0:f7b1eb17ad24 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: 1 | $ cd .. Check proper report when the manifest changes but not the file issue4499 ------------------------------------------------------------------------ $ hg init issue4499 $ cd issue4499 $ for f in A B C D F E G H I J K L M N O P Q R S T U; do > echo 1 > $f; > hg add $f; > done $ hg commit -m 'A1B1C1' $ echo 2 > A $ echo 2 > B $ echo 2 > C $ hg commit -m 'A2B2C2' $ hg up 0 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 3 > A $ echo 2 > B $ echo 2 > C $ hg commit -m 'A3B2C2' created new head $ hg log -G @ changeset: 2:fe5fc3d0eb17 | tag: tip | parent: 0:abf4f0e38563 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: A3B2C2 | | o changeset: 1:07dcc6b312c0 |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: A2B2C2 | o changeset: 0:abf4f0e38563 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: A1B1C1 Log -f on B should reports current changesets $ hg log -fG B @ changeset: 2:fe5fc3d0eb17 | tag: tip | parent: 0:abf4f0e38563 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: A3B2C2 | o changeset: 0:abf4f0e38563 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: A1B1C1 $ cd .. mercurial-3.7.3/tests/test-convert-p4.t0000644000175000017500000001002512676531525017432 0ustar mpmmpm00000000000000#require p4 $ echo "[extensions]" >> $HGRCPATH $ echo "convert = " >> $HGRCPATH create p4 depot $ P4ROOT=`pwd`/depot; export P4ROOT $ P4AUDIT=$P4ROOT/audit; export P4AUDIT $ P4JOURNAL=$P4ROOT/journal; export P4JOURNAL $ P4LOG=$P4ROOT/log; export P4LOG $ P4PORT=localhost:$HGPORT; export P4PORT $ P4DEBUG=1; export P4DEBUG start the p4 server $ [ ! -d $P4ROOT ] && mkdir $P4ROOT $ p4d -f -J off >$P4ROOT/stdout 2>$P4ROOT/stderr & $ echo $! >> $DAEMON_PIDS $ trap "echo stopping the p4 server ; p4 admin stop" EXIT $ # wait for the server to initialize $ while ! p4 ; do > sleep 1 > done >/dev/null 2>/dev/null create a client spec $ P4CLIENT=hg-p4-import; export P4CLIENT $ DEPOTPATH=//depot/test-mercurial-import/... $ p4 client -o | sed '/^View:/,$ d' >p4client $ echo View: >>p4client $ echo " $DEPOTPATH //$P4CLIENT/..." >>p4client $ p4 client -i a $ mkdir b $ echo c > b/c $ p4 add a b/c //depot/test-mercurial-import/a#1 - opened for add //depot/test-mercurial-import/b/c#1 - opened for add $ p4 submit -d initial Submitting change 1. Locking 2 files ... add //depot/test-mercurial-import/a#1 add //depot/test-mercurial-import/b/c#1 Change 1 submitted. change some files $ p4 edit a //depot/test-mercurial-import/a#1 - opened for edit $ echo aa >> a $ p4 submit -d "change a" Submitting change 2. Locking 1 files ... edit //depot/test-mercurial-import/a#2 Change 2 submitted. $ p4 edit b/c //depot/test-mercurial-import/b/c#1 - opened for edit $ echo cc >> b/c $ p4 submit -d "change b/c" Submitting change 3. Locking 1 files ... edit //depot/test-mercurial-import/b/c#2 Change 3 submitted. convert $ hg convert -s p4 $DEPOTPATH dst initializing destination dst repository reading p4 views collecting p4 changelists 1 initial 2 change a 3 change b/c scanning source... sorting... converting... 2 initial 1 change a 0 change b/c $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n' rev=2 desc="change b/c" tags="tip" files="b/c" rev=1 desc="change a" tags="" files="a" rev=0 desc="initial" tags="" files="a b/c" change some files $ p4 edit a b/c //depot/test-mercurial-import/a#2 - opened for edit //depot/test-mercurial-import/b/c#2 - opened for edit $ echo aaa >> a $ echo ccc >> b/c $ p4 submit -d "change a b/c" Submitting change 4. Locking 2 files ... edit //depot/test-mercurial-import/a#3 edit //depot/test-mercurial-import/b/c#3 Change 4 submitted. convert again $ hg convert -s p4 $DEPOTPATH dst reading p4 views collecting p4 changelists 1 initial 2 change a 3 change b/c 4 change a b/c scanning source... sorting... converting... 0 change a b/c $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n' rev=3 desc="change a b/c" tags="tip" files="a b/c" rev=2 desc="change b/c" tags="" files="b/c" rev=1 desc="change a" tags="" files="a" rev=0 desc="initial" tags="" files="a b/c" interesting names $ echo dddd > "d d" $ mkdir " e" $ echo fff >" e/ f" $ p4 add "d d" " e/ f" //depot/test-mercurial-import/d d#1 - opened for add //depot/test-mercurial-import/ e/ f#1 - opened for add $ p4 submit -d "add d e f" Submitting change 5. Locking 2 files ... add //depot/test-mercurial-import/ e/ f#1 add //depot/test-mercurial-import/d d#1 Change 5 submitted. convert again $ hg convert -s p4 $DEPOTPATH dst reading p4 views collecting p4 changelists 1 initial 2 change a 3 change b/c 4 change a b/c 5 add d e f scanning source... sorting... converting... 0 add d e f $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n' rev=4 desc="add d e f" tags="tip" files=" e/ f d d" rev=3 desc="change a b/c" tags="" files="a b/c" rev=2 desc="change b/c" tags="" files="b/c" rev=1 desc="change a" tags="" files="a" rev=0 desc="initial" tags="" files="a b/c" exit trap: stopping the p4 server mercurial-3.7.3/tests/test-branch-tag-confict.t0000644000175000017500000000311412676531525021063 0ustar mpmmpm00000000000000Initial setup. $ hg init repo $ cd repo $ touch thefile $ hg ci -A -m 'Initial commit.' adding thefile Create a tag. $ hg tag branchortag Create a branch with the same name as the tag. $ hg branch branchortag marked working directory as branch branchortag (branches are permanent and global, did you want a bookmark?) $ hg ci -m 'Create a branch with the same name as a tag.' This is what we have: $ hg log changeset: 2:10519b3f489a branch: branchortag tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Create a branch with the same name as a tag. changeset: 1:2635c45ca99b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag branchortag for changeset f57387372b5d changeset: 0:f57387372b5d tag: branchortag user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Initial commit. Update to the tag: $ hg up 'tag(branchortag)' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg parents changeset: 0:f57387372b5d tag: branchortag user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Initial commit. Updating to the branch: $ hg up 'branch(branchortag)' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg parents changeset: 2:10519b3f489a branch: branchortag tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Create a branch with the same name as a tag. $ cd .. mercurial-3.7.3/tests/test-eolfilename.t0000644000175000017500000000272712676531525017723 0ustar mpmmpm00000000000000#require eol-in-paths https://bz.mercurial-scm.org/352 test issue352 $ hg init foo $ cd foo $ A=`printf 'he\rllo'` $ echo foo > "$A" $ hg add adding he\r (no-eol) (esc) llo abort: '\n' and '\r' disallowed in filenames: 'he\rllo' [255] $ hg ci -A -m m adding he\r (no-eol) (esc) llo abort: '\n' and '\r' disallowed in filenames: 'he\rllo' [255] $ rm "$A" $ echo foo > "hell > o" $ hg add adding hell o abort: '\n' and '\r' disallowed in filenames: 'hell\no' [255] $ hg ci -A -m m adding hell o abort: '\n' and '\r' disallowed in filenames: 'hell\no' [255] $ echo foo > "$A" $ hg debugwalk f he\r (no-eol) (esc) llo he\r (no-eol) (esc) llo f hell o hell o $ echo bla > quickfox $ hg add quickfox $ hg ci -m 2 $ A=`printf 'quick\rfox'` $ hg cp quickfox "$A" abort: '\n' and '\r' disallowed in filenames: 'quick\rfox' [255] $ hg mv quickfox "$A" abort: '\n' and '\r' disallowed in filenames: 'quick\rfox' [255] https://bz.mercurial-scm.org/2036 $ cd .. test issue2039 $ hg init bar $ cd bar $ cat <> $HGRCPATH > [extensions] > color = > [color] > mode = ansi > EOF $ A=`printf 'foo\nbar'` $ B=`printf 'foo\nbar.baz'` $ touch "$A" $ touch "$B" $ hg status --color=always \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mfoo\x1b[0m (esc) \x1b[0;35;1;4mbar\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mfoo\x1b[0m (esc) \x1b[0;35;1;4mbar.baz\x1b[0m (esc) $ cd .. mercurial-3.7.3/tests/test-filelog.py.out0000644000175000017500000000012612676531525020046 0ustar mpmmpm00000000000000ERROR: FIXME: This is a known failure of filelog.size for data starting with \1\n OK. mercurial-3.7.3/tests/test-username-newline.t0000644000175000017500000000076712676531525020723 0ustar mpmmpm00000000000000 $ hg init $ touch a $ unset HGUSER $ echo "[ui]" >> .hg/hgrc $ echo "username= foo" >> .hg/hgrc $ echo " bar1" >> .hg/hgrc $ hg ci -Am m adding a abort: username 'foo\nbar1' contains a newline [255] $ rm .hg/hgrc $ HGUSER=`(echo foo; echo bar2)` hg ci -Am m abort: username 'foo\nbar2' contains a newline [255] $ hg ci -Am m -u "`(echo foo; echo bar3)`" transaction abort! rollback completed abort: username 'foo\nbar3' contains a newline! [255] mercurial-3.7.3/tests/test-patchbomb.t0000644000175000017500000025354112676531525017404 0ustar mpmmpm00000000000000Note for future hackers of patchbomb: this file is a bit heavy on wildcards in test expectations due to how many things like hostnames tend to make it into outputs. As a result, you may need to perform the following regular expression substitutions: @$HOSTNAME> -> @*> (glob) Mercurial-patchbomb/.* -> Mercurial-patchbomb/* (glob) /mixed; boundary="===+[0-9]+==" -> /mixed; boundary="===*== (glob)" --===+[0-9]+=+--$ -> --===*=-- (glob) --===+[0-9]+=+$ -> --===*= (glob) $ cat > prune-blank-after-boundary.py < import sys > skipblank = False > trim = lambda x: x.strip(' \r\n') > for l in sys.stdin: > if trim(l).endswith('=--') or trim(l).endswith('=='): > skipblank = True > print l, > continue > if not trim(l) and skipblank: > continue > skipblank = False > print l, > EOF $ FILTERBOUNDARY="python `pwd`/prune-blank-after-boundary.py" $ echo "[format]" >> $HGRCPATH $ echo "usegeneraldelta=yes" >> $HGRCPATH $ echo "[extensions]" >> $HGRCPATH $ echo "patchbomb=" >> $HGRCPATH $ hg init t $ cd t $ echo a > a $ hg commit -Ama -d '1 0' adding a $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -r tip this patch series consists of 1 patches. displaying [PATCH] a ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: <8580ff50825a50c8f716.60@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.60@*> (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a $ hg --config ui.interactive=1 email --confirm -n -f quux -t foo -c bar -r tip< n > EOF this patch series consists of 1 patches. Final summary: From: quux To: foo Cc: bar Subject: [PATCH] a a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) are you sure you want to send (yn)? n abort: patchbomb canceled [255] $ hg --config ui.interactive=1 --config patchbomb.confirm=true email -n -f quux -t foo -c bar -r tip< n > EOF this patch series consists of 1 patches. Final summary: From: quux To: foo Cc: bar Subject: [PATCH] a a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) are you sure you want to send (yn)? n abort: patchbomb canceled [255] Test diff.git is respected $ hg --config diff.git=True email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -r tip this patch series consists of 1 patches. displaying [PATCH] a ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: <8580ff50825a50c8f716.60@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.60@*> (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff --git a/a b/a new file mode 100644 --- /dev/null +++ b/a @@ -0,0 +1,1 @@ +a Test breaking format changes aren't $ hg --config diff.noprefix=True email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -r tip this patch series consists of 1 patches. displaying [PATCH] a ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: <8580ff50825a50c8f716.60@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.60@*> (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a $ echo b > b $ hg commit -Amb -d '2 0' adding b $ hg email --date '1970-1-1 0:2' -n -f quux -t foo -c bar -s test -r 0:tip this patch series consists of 2 patches. Write the introductory message for the patch series. displaying [PATCH 0 of 2] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 0 of 2] test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:02:00 +0000 From: quux To: foo Cc: bar displaying [PATCH 1 of 2] a ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 1 of 2] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 2 Message-Id: <8580ff50825a50c8f716.121@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.121@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:02:01 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a displaying [PATCH 2 of 2] b ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 2 of 2] b X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9 X-Mercurial-Series-Index: 2 X-Mercurial-Series-Total: 2 Message-Id: <97d72e5f12c7e84f8506.122@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.121@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:02:02 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 2 0 # Thu Jan 01 00:00:02 1970 +0000 # Node ID 97d72e5f12c7e84f85064aa72e5a297142c36ed9 # Parent 8580ff50825a50c8f716709acdf8de0deddcd6ab b diff -r 8580ff50825a -r 97d72e5f12c7 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +b .hg/last-email.txt $ cat > editor.sh << '__EOF__' > echo "a precious introductory message" > "$1" > __EOF__ $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg email -n -t foo -s test -r 0:tip > /dev/null $ cat .hg/last-email.txt a precious introductory message $ hg email -m test.mbox -f quux -t foo -c bar -s test 0:tip \ > --config extensions.progress= --config progress.assume-tty=1 \ > --config progress.delay=0 --config progress.refresh=0 \ > --config progress.width=60 this patch series consists of 2 patches. Write the introductory message for the patch series. \r (no-eol) (esc) sending [ ] 0/3\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) sending [==============> ] 1/3\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) sending [=============================> ] 2/3\r (no-eol) (esc) \r (esc) sending [PATCH 0 of 2] test ... sending [PATCH 1 of 2] a ... sending [PATCH 2 of 2] b ... $ cd .. $ hg clone -q t t2 $ cd t2 $ echo c > c $ hg commit -Amc -d '3 0' adding c $ cat > description < a multiline > > description > EOF test bundle and description: $ hg email --date '1970-1-1 0:3' -n -f quux -t foo \ > -c bar -s test -r tip -b --desc description | $FILTERBOUNDARY searching for changes 1 changesets found displaying test ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:03:00 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit a multiline description --===*= (glob) Content-Type: application/x-mercurial-bundle MIME-Version: 1.0 Content-Disposition: attachment; filename="bundle.hg" Content-Transfer-Encoding: base64 SEcyMAAAAA5Db21wcmVzc2lvbj1CWkJaaDkxQVkmU1lCZFwPAAAKf//7nFYSWD1/4H7R09C/I70I Ak0E4peoSIYIgQCgGUQOcLABGY2hqoAAAaBMTTAAAahgTCZoAAAAAMQaqn5GmapojQ00DEGI/VGJ kDAJoGTDUAAyM0QaAEqalPTUaMhoyDIDR6IxAGEGmgAehMRhDRsoyB6TYTC8JyLN+jTGqitRAgRJ b3SRlhd8/+VxlAUqAilLoKPEEyxFQkaEGo+DzItFeNiFAo8NMMweVtvXJFIMhjoKC18DeYwjLKBz wrMcs86qJrctDNJorwBMuLcqvTVWHh1IlsIaaaYSUIP2IZsogT1+pSSZS+bSTJrgfKsO9go/f0HF uW4Yr2vXpxDreOgSIAdK/xC8Yay48SLpxIuqc/BZ6rVZCgG21rr0zhCaEgXOTqNaYEvANvg0B0Qo dgtqAs1FDcZgzYitwJh6ZAG0C4mA7FPrp9b7h0h/A44Xgd+0it1gvF0mFE/CCPwymXS+OisOOCAF mDUDAC1pBvsXckU4UJBCZFwP --===============*==-- (glob) with a specific bundle type (binary part must be different) $ hg email --date '1970-1-1 0:3' -n -f quux -t foo \ > -c bar -s test -r tip -b --desc description \ > --config patchbomb.bundletype=gzip-v1 | $FILTERBOUNDARY searching for changes 1 changesets found displaying test ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:03:00 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit a multiline description --===*= (glob) Content-Type: application/x-mercurial-bundle MIME-Version: 1.0 Content-Disposition: attachment; filename="bundle.hg" Content-Transfer-Encoding: base64 SEcxMEdaeJxjYGBY8V9n/iLGbtFfJZuNk/euDCpWfrRy/vTrevFCx1/4t7J5LdeL0ix0Opx3kwEL wKYXKqUJwqnG5sYWSWmmJsaWlqYWaRaWJpaWiWamZpYWRgZGxolJiabmSQbmZqlcQMV6QGwCxGzG CgZcySARUyA2A2LGZKiZ3Y+Lu786z4z4MWXmsrAZCsqrl1az5y21PMcjpbThzWeXGT+/nutbmvvz zXYS3BoGxdrJDIYmlimJJiZpRokmqYYmaSYWFknmSSkmhqbmliamiZYWxuYmBhbJBgZcUBNZQe5K Epm7xF/LT+RLx/a9juFTomaYO/Rgsx4rwBN+IMCUDLOKAQBrsmti --===============*==-- (glob) utf-8 patch: $ $PYTHON -c 'fp = open("utf", "wb"); fp.write("h\xC3\xB6mma!\n"); fp.close();' $ hg commit -A -d '4 0' -m 'utf-8 content' adding description adding utf no mime encoding for email --test: $ hg email --date '1970-1-1 0:4' -f quux -t foo -c bar -r tip -n this patch series consists of 1 patches. displaying [PATCH] utf-8 content ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit Subject: [PATCH] utf-8 content X-Mercurial-Node: 909a00e13e9d78b575aeee23dddbada46d5a143f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: <909a00e13e9d78b575ae.240@*> (glob) X-Mercurial-Series-Id: <909a00e13e9d78b575ae.240@*> (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:04:00 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Node ID 909a00e13e9d78b575aeee23dddbada46d5a143f # Parent ff2c9fa2018b15fa74b33363bda9527323e2a99f utf-8 content diff -r ff2c9fa2018b -r 909a00e13e9d description --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/description Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,3 @@ +a multiline + +description diff -r ff2c9fa2018b -r 909a00e13e9d utf --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/utf Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,1 @@ +h\xc3\xb6mma! (esc) mime encoded mbox (base64): $ hg email --date '1970-1-1 0:4' -f 'Q ' -t foo -c bar -r tip -m mbox this patch series consists of 1 patches. sending [PATCH] utf-8 content ... $ cat mbox From quux ... ... .. ..:..:.. .... (re) Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: base64 Subject: [PATCH] utf-8 content X-Mercurial-Node: 909a00e13e9d78b575aeee23dddbada46d5a143f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: <909a00e13e9d78b575ae.240@*> (glob) X-Mercurial-Series-Id: <909a00e13e9d78b575ae.240@*> (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:04:00 +0000 From: Q To: foo Cc: bar IyBIRyBjaGFuZ2VzZXQgcGF0Y2gKIyBVc2VyIHRlc3QKIyBEYXRlIDQgMAojICAgICAgVGh1IEph biAwMSAwMDowMDowNCAxOTcwICswMDAwCiMgTm9kZSBJRCA5MDlhMDBlMTNlOWQ3OGI1NzVhZWVl MjNkZGRiYWRhNDZkNWExNDNmCiMgUGFyZW50ICBmZjJjOWZhMjAxOGIxNWZhNzRiMzMzNjNiZGE5 NTI3MzIzZTJhOTlmCnV0Zi04IGNvbnRlbnQKCmRpZmYgLXIgZmYyYzlmYTIwMThiIC1yIDkwOWEw MGUxM2U5ZCBkZXNjcmlwdGlvbgotLS0gL2Rldi9udWxsCVRodSBKYW4gMDEgMDA6MDA6MDAgMTk3 MCArMDAwMAorKysgYi9kZXNjcmlwdGlvbglUaHUgSmFuIDAxIDAwOjAwOjA0IDE5NzAgKzAwMDAK QEAgLTAsMCArMSwzIEBACithIG11bHRpbGluZQorCitkZXNjcmlwdGlvbgpkaWZmIC1yIGZmMmM5 ZmEyMDE4YiAtciA5MDlhMDBlMTNlOWQgdXRmCi0tLSAvZGV2L251bGwJVGh1IEphbiAwMSAwMDow MDowMCAxOTcwICswMDAwCisrKyBiL3V0ZglUaHUgSmFuIDAxIDAwOjAwOjA0IDE5NzAgKzAwMDAK QEAgLTAsMCArMSwxIEBACitow7ZtbWEhCg== $ $PYTHON -c 'print open("mbox").read().split("\n\n")[1].decode("base64")' # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Node ID 909a00e13e9d78b575aeee23dddbada46d5a143f # Parent ff2c9fa2018b15fa74b33363bda9527323e2a99f utf-8 content diff -r ff2c9fa2018b -r 909a00e13e9d description --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/description Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,3 @@ +a multiline + +description diff -r ff2c9fa2018b -r 909a00e13e9d utf --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/utf Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,1 @@ +h\xc3\xb6mma! (esc) $ rm mbox mime encoded mbox (quoted-printable): $ $PYTHON -c 'fp = open("long", "wb"); fp.write("%s\nfoo\n\nbar\n" % ("x" * 1024)); fp.close();' $ hg commit -A -d '4 0' -m 'long line' adding long no mime encoding for email --test: $ hg email --date '1970-1-1 0:4' -f quux -t foo -c bar -r tip -n this patch series consists of 1 patches. displaying [PATCH] long line ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: quoted-printable Subject: [PATCH] long line X-Mercurial-Node: a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:04:00 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Node ID a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 # Parent 909a00e13e9d78b575aeee23dddbada46d5a143f long line diff -r 909a00e13e9d -r a2ea8fc83dd8 long --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/long Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,4 @@ +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +foo + +bar mime encoded mbox (quoted-printable): $ hg email --date '1970-1-1 0:4' -f quux -t foo -c bar -r tip -m mbox this patch series consists of 1 patches. sending [PATCH] long line ... $ cat mbox From quux ... ... .. ..:..:.. .... (re) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: quoted-printable Subject: [PATCH] long line X-Mercurial-Node: a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:04:00 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Node ID a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 # Parent 909a00e13e9d78b575aeee23dddbada46d5a143f long line diff -r 909a00e13e9d -r a2ea8fc83dd8 long --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/long Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,4 @@ +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +foo + +bar $ rm mbox iso-8859-1 patch: $ $PYTHON -c 'fp = open("isolatin", "wb"); fp.write("h\xF6mma!\n"); fp.close();' $ hg commit -A -d '5 0' -m 'isolatin 8-bit encoding' adding isolatin fake ascii mbox: $ hg email --date '1970-1-1 0:5' -f quux -t foo -c bar -r tip -m mbox this patch series consists of 1 patches. sending [PATCH] isolatin 8-bit encoding ... $ cat mbox From quux ... ... .. ..:..:.. .... (re) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit Subject: [PATCH] isolatin 8-bit encoding X-Mercurial-Node: 240fb913fc1b7ff15ddb9f33e73d82bf5277c720 X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: <240fb913fc1b7ff15ddb.300@*> (glob) X-Mercurial-Series-Id: <240fb913fc1b7ff15ddb.300@*> (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:05:00 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 5 0 # Thu Jan 01 00:00:05 1970 +0000 # Node ID 240fb913fc1b7ff15ddb9f33e73d82bf5277c720 # Parent a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 isolatin 8-bit encoding diff -r a2ea8fc83dd8 -r 240fb913fc1b isolatin --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/isolatin Thu Jan 01 00:00:05 1970 +0000 @@ -0,0 +1,1 @@ +h\xf6mma! (esc) test diffstat for single patch: $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -d -y -r 2 this patch series consists of 1 patches. Final summary: From: quux To: foo Cc: bar Subject: [PATCH] test c | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) are you sure you want to send (yn)? y displaying [PATCH] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH] test X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar c | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c test diffstat for multiple patches: $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -d -y \ > -r 0:1 this patch series consists of 2 patches. Write the introductory message for the patch series. Final summary: From: quux To: foo Cc: bar Subject: [PATCH 0 of 2] test a | 1 + b | 1 + 2 files changed, 2 insertions(+), 0 deletions(-) Subject: [PATCH 1 of 2] a a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) Subject: [PATCH 2 of 2] b b | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) are you sure you want to send (yn)? y displaying [PATCH 0 of 2] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 0 of 2] test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar a | 1 + b | 1 + 2 files changed, 2 insertions(+), 0 deletions(-) displaying [PATCH 1 of 2] a ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 1 of 2] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 2 Message-Id: <8580ff50825a50c8f716.61@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:01 +0000 From: quux To: foo Cc: bar a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a displaying [PATCH 2 of 2] b ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 2 of 2] b X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9 X-Mercurial-Series-Index: 2 X-Mercurial-Series-Total: 2 Message-Id: <97d72e5f12c7e84f8506.62@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:02 +0000 From: quux To: foo Cc: bar b | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) # HG changeset patch # User test # Date 2 0 # Thu Jan 01 00:00:02 1970 +0000 # Node ID 97d72e5f12c7e84f85064aa72e5a297142c36ed9 # Parent 8580ff50825a50c8f716709acdf8de0deddcd6ab b diff -r 8580ff50825a -r 97d72e5f12c7 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +b test inline for single patch: $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -i -r 2 | $FILTERBOUNDARY this patch series consists of 1 patches. displaying [PATCH] test ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH] test X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Content-Disposition: inline; filename=t2.patch # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c --===*=-- (glob) test inline for single patch (quoted-printable): $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -i -r 4 | $FILTERBOUNDARY this patch series consists of 1 patches. displaying [PATCH] test ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH] test X-Mercurial-Node: a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: quoted-printable Content-Disposition: inline; filename=t2.patch # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Node ID a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 # Parent 909a00e13e9d78b575aeee23dddbada46d5a143f long line diff -r 909a00e13e9d -r a2ea8fc83dd8 long --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/long Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,4 @@ +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +foo + +bar --===*=-- (glob) test inline for multiple patches: $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -i \ > -r 0:1 -r 4 | $FILTERBOUNDARY this patch series consists of 3 patches. Write the introductory message for the patch series. displaying [PATCH 0 of 3] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 0 of 3] test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar displaying [PATCH 1 of 3] a ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH 1 of 3] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 3 Message-Id: <8580ff50825a50c8f716.61@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:01 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Content-Disposition: inline; filename=t2-1.patch # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a --===*=-- (glob) displaying [PATCH 2 of 3] b ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH 2 of 3] b X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9 X-Mercurial-Series-Index: 2 X-Mercurial-Series-Total: 3 Message-Id: <97d72e5f12c7e84f8506.62@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:02 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Content-Disposition: inline; filename=t2-2.patch # HG changeset patch # User test # Date 2 0 # Thu Jan 01 00:00:02 1970 +0000 # Node ID 97d72e5f12c7e84f85064aa72e5a297142c36ed9 # Parent 8580ff50825a50c8f716709acdf8de0deddcd6ab b diff -r 8580ff50825a -r 97d72e5f12c7 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +b --===*=-- (glob) displaying [PATCH 3 of 3] long line ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH 3 of 3] long line X-Mercurial-Node: a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 X-Mercurial-Series-Index: 3 X-Mercurial-Series-Total: 3 Message-Id: (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:03 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: quoted-printable Content-Disposition: inline; filename=t2-3.patch # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Node ID a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 # Parent 909a00e13e9d78b575aeee23dddbada46d5a143f long line diff -r 909a00e13e9d -r a2ea8fc83dd8 long --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/long Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,4 @@ +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +foo + +bar --===*=-- (glob) test attach for single patch: $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -a -r 2 | $FILTERBOUNDARY this patch series consists of 1 patches. displaying [PATCH] test ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH] test X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Patch subject is complete summary. --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Content-Disposition: attachment; filename=t2.patch # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c --===*=-- (glob) test attach for single patch (quoted-printable): $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -a -r 4 | $FILTERBOUNDARY this patch series consists of 1 patches. displaying [PATCH] test ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH] test X-Mercurial-Node: a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Patch subject is complete summary. --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: quoted-printable Content-Disposition: attachment; filename=t2.patch # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Node ID a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 # Parent 909a00e13e9d78b575aeee23dddbada46d5a143f long line diff -r 909a00e13e9d -r a2ea8fc83dd8 long --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/long Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,4 @@ +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +foo + +bar --===*=-- (glob) test attach and body for single patch: $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -a --body -r 2 | $FILTERBOUNDARY this patch series consists of 1 patches. displaying [PATCH] test ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH] test X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Content-Disposition: attachment; filename=t2.patch # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c --===*=-- (glob) test attach for multiple patches: $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -a \ > -r 0:1 -r 4 | $FILTERBOUNDARY this patch series consists of 3 patches. Write the introductory message for the patch series. displaying [PATCH 0 of 3] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 0 of 3] test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar displaying [PATCH 1 of 3] a ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH 1 of 3] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 3 Message-Id: <8580ff50825a50c8f716.61@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:01 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Patch subject is complete summary. --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Content-Disposition: attachment; filename=t2-1.patch # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a --===*=-- (glob) displaying [PATCH 2 of 3] b ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH 2 of 3] b X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9 X-Mercurial-Series-Index: 2 X-Mercurial-Series-Total: 3 Message-Id: <97d72e5f12c7e84f8506.62@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:02 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Patch subject is complete summary. --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Content-Disposition: attachment; filename=t2-2.patch # HG changeset patch # User test # Date 2 0 # Thu Jan 01 00:00:02 1970 +0000 # Node ID 97d72e5f12c7e84f85064aa72e5a297142c36ed9 # Parent 8580ff50825a50c8f716709acdf8de0deddcd6ab b diff -r 8580ff50825a -r 97d72e5f12c7 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +b --===*=-- (glob) displaying [PATCH 3 of 3] long line ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH 3 of 3] long line X-Mercurial-Node: a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 X-Mercurial-Series-Index: 3 X-Mercurial-Series-Total: 3 Message-Id: (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:03 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Patch subject is complete summary. --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: quoted-printable Content-Disposition: attachment; filename=t2-3.patch # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Node ID a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 # Parent 909a00e13e9d78b575aeee23dddbada46d5a143f long line diff -r 909a00e13e9d -r a2ea8fc83dd8 long --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/long Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,4 @@ +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +foo + +bar --===*=-- (glob) test intro for single patch: $ hg email --date '1970-1-1 0:1' -n --intro -f quux -t foo -c bar -s test \ > -r 2 this patch series consists of 1 patches. Write the introductory message for the patch series. displaying [PATCH 0 of 1] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 0 of 1] test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar displaying [PATCH 1 of 1] c ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 1 of 1] c X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:01 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c test --desc without --intro for a single patch: $ echo foo > intro.text $ hg email --date '1970-1-1 0:1' -n --desc intro.text -f quux -t foo -c bar \ > -s test -r 2 this patch series consists of 1 patches. displaying [PATCH 0 of 1] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 0 of 1] test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar foo displaying [PATCH 1 of 1] c ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 1 of 1] c X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:01 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c test intro for multiple patches: $ hg email --date '1970-1-1 0:1' -n --intro -f quux -t foo -c bar -s test \ > -r 0:1 this patch series consists of 2 patches. Write the introductory message for the patch series. displaying [PATCH 0 of 2] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 0 of 2] test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar displaying [PATCH 1 of 2] a ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 1 of 2] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 2 Message-Id: <8580ff50825a50c8f716.61@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:01 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a displaying [PATCH 2 of 2] b ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 2 of 2] b X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9 X-Mercurial-Series-Index: 2 X-Mercurial-Series-Total: 2 Message-Id: <97d72e5f12c7e84f8506.62@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:02 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 2 0 # Thu Jan 01 00:00:02 1970 +0000 # Node ID 97d72e5f12c7e84f85064aa72e5a297142c36ed9 # Parent 8580ff50825a50c8f716709acdf8de0deddcd6ab b diff -r 8580ff50825a -r 97d72e5f12c7 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +b test reply-to via config: $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -r 2 \ > --config patchbomb.reply-to='baz@example.com' this patch series consists of 1 patches. displaying [PATCH] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH] test X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar Reply-To: baz@example.com # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c test reply-to via command line: $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -r 2 \ > --reply-to baz --reply-to fred this patch series consists of 1 patches. displaying [PATCH] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH] test X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar Reply-To: baz, fred # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c tagging csets: $ hg tag -r0 zero zero.foo $ hg tag -r1 one one.patch $ hg tag -r2 two two.diff test inline for single named patch: $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -i \ > -r 2 | $FILTERBOUNDARY this patch series consists of 1 patches. displaying [PATCH] test ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH] test X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Content-Disposition: inline; filename=two.diff # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c --===*=-- (glob) test inline for multiple named/unnamed patches: $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar -s test -i \ > -r 0:1 | $FILTERBOUNDARY this patch series consists of 2 patches. Write the introductory message for the patch series. displaying [PATCH 0 of 2] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 0 of 2] test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar displaying [PATCH 1 of 2] a ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH 1 of 2] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 2 Message-Id: <8580ff50825a50c8f716.61@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:01 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Content-Disposition: inline; filename=t2-1.patch # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a --===*=-- (glob) displaying [PATCH 2 of 2] b ... Content-Type: multipart/mixed; boundary="===*==" (glob) MIME-Version: 1.0 Subject: [PATCH 2 of 2] b X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9 X-Mercurial-Series-Index: 2 X-Mercurial-Series-Total: 2 Message-Id: <97d72e5f12c7e84f8506.62@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:02 +0000 From: quux To: foo Cc: bar --===*= (glob) Content-Type: text/x-patch; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Content-Disposition: inline; filename=one.patch # HG changeset patch # User test # Date 2 0 # Thu Jan 01 00:00:02 1970 +0000 # Node ID 97d72e5f12c7e84f85064aa72e5a297142c36ed9 # Parent 8580ff50825a50c8f716709acdf8de0deddcd6ab b diff -r 8580ff50825a -r 97d72e5f12c7 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +b --===*=-- (glob) test inreplyto: $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar --in-reply-to baz \ > -r tip this patch series consists of 1 patches. displaying [PATCH] Added tag two, two.diff for changeset ff2c9fa2018b ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH] Added tag two, two.diff for changeset ff2c9fa2018b X-Mercurial-Node: 7aead2484924c445ad8ce2613df91f52f9e502ed X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: <7aead2484924c445ad8c.60@*> (glob) X-Mercurial-Series-Id: <7aead2484924c445ad8c.60@*> (glob) In-Reply-To: References: User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID 7aead2484924c445ad8ce2613df91f52f9e502ed # Parent 045ca29b1ea20e4940411e695e20e521f2f0f98e Added tag two, two.diff for changeset ff2c9fa2018b diff -r 045ca29b1ea2 -r 7aead2484924 .hgtags --- a/.hgtags Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000 @@ -2,3 +2,5 @@ 8580ff50825a50c8f716709acdf8de0deddcd6ab zero.foo 97d72e5f12c7e84f85064aa72e5a297142c36ed9 one 97d72e5f12c7e84f85064aa72e5a297142c36ed9 one.patch +ff2c9fa2018b15fa74b33363bda9527323e2a99f two +ff2c9fa2018b15fa74b33363bda9527323e2a99f two.diff no intro message in non-interactive mode $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar --in-reply-to baz \ > -r 0:1 this patch series consists of 2 patches. (optional) Subject: [PATCH 0 of 2] displaying [PATCH 1 of 2] a ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 1 of 2] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 2 Message-Id: <8580ff50825a50c8f716.60@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.60@*> (glob) In-Reply-To: References: User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a displaying [PATCH 2 of 2] b ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 2 of 2] b X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9 X-Mercurial-Series-Index: 2 X-Mercurial-Series-Total: 2 Message-Id: <97d72e5f12c7e84f8506.61@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.60@*> (glob) In-Reply-To: References: User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:01 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 2 0 # Thu Jan 01 00:00:02 1970 +0000 # Node ID 97d72e5f12c7e84f85064aa72e5a297142c36ed9 # Parent 8580ff50825a50c8f716709acdf8de0deddcd6ab b diff -r 8580ff50825a -r 97d72e5f12c7 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +b $ hg email --date '1970-1-1 0:1' -n -f quux -t foo -c bar --in-reply-to baz \ > -s test -r 0:1 this patch series consists of 2 patches. Write the introductory message for the patch series. displaying [PATCH 0 of 2] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 0 of 2] test Message-Id: (glob) In-Reply-To: References: User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar displaying [PATCH 1 of 2] a ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 1 of 2] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 2 Message-Id: <8580ff50825a50c8f716.61@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:01 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a displaying [PATCH 2 of 2] b ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 2 of 2] b X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9 X-Mercurial-Series-Index: 2 X-Mercurial-Series-Total: 2 Message-Id: <97d72e5f12c7e84f8506.62@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:02 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 2 0 # Thu Jan 01 00:00:02 1970 +0000 # Node ID 97d72e5f12c7e84f85064aa72e5a297142c36ed9 # Parent 8580ff50825a50c8f716709acdf8de0deddcd6ab b diff -r 8580ff50825a -r 97d72e5f12c7 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +b test single flag for single patch (and no warning when not mailing dirty rev): $ hg up -qr1 $ echo dirt > a $ hg email --date '1970-1-1 0:1' -n --flag fooFlag -f quux -t foo -c bar -s test \ > -r 2 | $FILTERBOUNDARY this patch series consists of 1 patches. displaying [PATCH fooFlag] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH fooFlag] test X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c test single flag for multiple patches (and warning when mailing dirty rev): $ hg email --date '1970-1-1 0:1' -n --flag fooFlag -f quux -t foo -c bar -s test \ > -r 0:1 warning: working directory has uncommitted changes this patch series consists of 2 patches. Write the introductory message for the patch series. displaying [PATCH 0 of 2 fooFlag] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 0 of 2 fooFlag] test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar displaying [PATCH 1 of 2 fooFlag] a ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 1 of 2 fooFlag] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 2 Message-Id: <8580ff50825a50c8f716.61@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:01 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a displaying [PATCH 2 of 2 fooFlag] b ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 2 of 2 fooFlag] b X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9 X-Mercurial-Series-Index: 2 X-Mercurial-Series-Total: 2 Message-Id: <97d72e5f12c7e84f8506.62@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:02 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 2 0 # Thu Jan 01 00:00:02 1970 +0000 # Node ID 97d72e5f12c7e84f85064aa72e5a297142c36ed9 # Parent 8580ff50825a50c8f716709acdf8de0deddcd6ab b diff -r 8580ff50825a -r 97d72e5f12c7 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +b $ hg revert --no-b a $ hg up -q test multiple flags for single patch: $ hg email --date '1970-1-1 0:1' -n --flag fooFlag --flag barFlag -f quux -t foo \ > -c bar -s test -r 2 this patch series consists of 1 patches. displaying [PATCH fooFlag barFlag] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH fooFlag barFlag] test X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: (glob) X-Mercurial-Series-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c test multiple flags for multiple patches: $ hg email --date '1970-1-1 0:1' -n --flag fooFlag --flag barFlag -f quux -t foo \ > -c bar -s test -r 0:1 this patch series consists of 2 patches. Write the introductory message for the patch series. displaying [PATCH 0 of 2 fooFlag barFlag] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 0 of 2 fooFlag barFlag] test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:00 +0000 From: quux To: foo Cc: bar displaying [PATCH 1 of 2 fooFlag barFlag] a ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 1 of 2 fooFlag barFlag] a X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 2 Message-Id: <8580ff50825a50c8f716.61@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:01 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a displaying [PATCH 2 of 2 fooFlag barFlag] b ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 2 of 2 fooFlag barFlag] b X-Mercurial-Node: 97d72e5f12c7e84f85064aa72e5a297142c36ed9 X-Mercurial-Series-Index: 2 X-Mercurial-Series-Total: 2 Message-Id: <97d72e5f12c7e84f8506.62@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.61@*> (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Thu, 01 Jan 1970 00:01:02 +0000 From: quux To: foo Cc: bar # HG changeset patch # User test # Date 2 0 # Thu Jan 01 00:00:02 1970 +0000 # Node ID 97d72e5f12c7e84f85064aa72e5a297142c36ed9 # Parent 8580ff50825a50c8f716709acdf8de0deddcd6ab b diff -r 8580ff50825a -r 97d72e5f12c7 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:02 1970 +0000 @@ -0,0 +1,1 @@ +b test multi-address parsing: $ hg email --date '1980-1-1 0:1' -m tmp.mbox -f quux -t 'spam' \ > -t toast -c 'foo,bar@example.com' -c '"A, B <>" ' -s test -r 0 \ > --config email.bcc='"Quux, A." ' this patch series consists of 1 patches. sending [PATCH] test ... $ cat < tmp.mbox From quux ... ... .. ..:..:.. .... (re) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH] test X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: <8580ff50825a50c8f716.315532860@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.315532860@*> (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Tue, 01 Jan 1980 00:01:00 +0000 From: quux To: spam , eggs, toast Cc: foo, bar@example.com, "A, B <>" Bcc: "Quux, A." # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a test multi-byte domain parsing: $ UUML=`$PYTHON -c 'import sys; sys.stdout.write("\374")'` $ HGENCODING=iso-8859-1 $ export HGENCODING $ hg email --date '1980-1-1 0:1' -m tmp.mbox -f quux -t "bar@${UUML}nicode.com" -s test -r 0 this patch series consists of 1 patches. Cc: sending [PATCH] test ... $ cat tmp.mbox From quux ... ... .. ..:..:.. .... (re) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH] test X-Mercurial-Node: 8580ff50825a50c8f716709acdf8de0deddcd6ab X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: <8580ff50825a50c8f716.315532860@*> (glob) X-Mercurial-Series-Id: <8580ff50825a50c8f716.315532860@*> (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Tue, 01 Jan 1980 00:01:00 +0000 From: quux To: bar@xn--nicode-2ya.com # HG changeset patch # User test # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 8580ff50825a50c8f716709acdf8de0deddcd6ab # Parent 0000000000000000000000000000000000000000 a diff -r 000000000000 -r 8580ff50825a a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,1 @@ +a test outgoing: $ hg up 1 0 files updated, 0 files merged, 6 files removed, 0 files unresolved $ hg branch test marked working directory as branch test (branches are permanent and global, did you want a bookmark?) $ echo d > d $ hg add d $ hg ci -md -d '4 0' $ echo d >> d $ hg ci -mdd -d '5 0' $ hg log -G --template "{rev}:{node|short} {desc|firstline}\n" @ 10:3b6f1ec9dde9 dd | o 9:2f9fa9b998c5 d | | o 8:7aead2484924 Added tag two, two.diff for changeset ff2c9fa2018b | | | o 7:045ca29b1ea2 Added tag one, one.patch for changeset 97d72e5f12c7 | | | o 6:5d5ef15dfe5e Added tag zero, zero.foo for changeset 8580ff50825a | | | o 5:240fb913fc1b isolatin 8-bit encoding | | | o 4:a2ea8fc83dd8 long line | | | o 3:909a00e13e9d utf-8 content | | | o 2:ff2c9fa2018b c |/ o 1:97d72e5f12c7 b | o 0:8580ff50825a a $ hg phase --force --secret -r 10 $ hg email --date '1980-1-1 0:1' -n -t foo -s test -o ../t -r 'rev(10) or rev(6)' comparing with ../t From [test]: test this patch series consists of 6 patches. Write the introductory message for the patch series. Cc: displaying [PATCH 0 of 6] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 0 of 6] test Message-Id: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Tue, 01 Jan 1980 00:01:00 +0000 From: test To: foo displaying [PATCH 1 of 6] c ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 1 of 6] c X-Mercurial-Node: ff2c9fa2018b15fa74b33363bda9527323e2a99f X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 6 Message-Id: (glob) X-Mercurial-Series-Id: (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Tue, 01 Jan 1980 00:01:01 +0000 From: test To: foo # HG changeset patch # User test # Date 3 0 # Thu Jan 01 00:00:03 1970 +0000 # Node ID ff2c9fa2018b15fa74b33363bda9527323e2a99f # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 c diff -r 97d72e5f12c7 -r ff2c9fa2018b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,1 @@ +c displaying [PATCH 2 of 6] utf-8 content ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit Subject: [PATCH 2 of 6] utf-8 content X-Mercurial-Node: 909a00e13e9d78b575aeee23dddbada46d5a143f X-Mercurial-Series-Index: 2 X-Mercurial-Series-Total: 6 Message-Id: <909a00e13e9d78b575ae.315532862@*> (glob) X-Mercurial-Series-Id: (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Tue, 01 Jan 1980 00:01:02 +0000 From: test To: foo # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Node ID 909a00e13e9d78b575aeee23dddbada46d5a143f # Parent ff2c9fa2018b15fa74b33363bda9527323e2a99f utf-8 content diff -r ff2c9fa2018b -r 909a00e13e9d description --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/description Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,3 @@ +a multiline + +description diff -r ff2c9fa2018b -r 909a00e13e9d utf --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/utf Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,1 @@ +h\xc3\xb6mma! (esc) displaying [PATCH 3 of 6] long line ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: quoted-printable Subject: [PATCH 3 of 6] long line X-Mercurial-Node: a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 X-Mercurial-Series-Index: 3 X-Mercurial-Series-Total: 6 Message-Id: (glob) X-Mercurial-Series-Id: (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Tue, 01 Jan 1980 00:01:03 +0000 From: test To: foo # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Node ID a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 # Parent 909a00e13e9d78b575aeee23dddbada46d5a143f long line diff -r 909a00e13e9d -r a2ea8fc83dd8 long --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/long Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,4 @@ +xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx= xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx +foo + +bar displaying [PATCH 4 of 6] isolatin 8-bit encoding ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit Subject: [PATCH 4 of 6] isolatin 8-bit encoding X-Mercurial-Node: 240fb913fc1b7ff15ddb9f33e73d82bf5277c720 X-Mercurial-Series-Index: 4 X-Mercurial-Series-Total: 6 Message-Id: <240fb913fc1b7ff15ddb.315532864@*> (glob) X-Mercurial-Series-Id: (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Tue, 01 Jan 1980 00:01:04 +0000 From: test To: foo # HG changeset patch # User test # Date 5 0 # Thu Jan 01 00:00:05 1970 +0000 # Node ID 240fb913fc1b7ff15ddb9f33e73d82bf5277c720 # Parent a2ea8fc83dd8b93cfd86ac97b28287204ab806e1 isolatin 8-bit encoding diff -r a2ea8fc83dd8 -r 240fb913fc1b isolatin --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/isolatin Thu Jan 01 00:00:05 1970 +0000 @@ -0,0 +1,1 @@ +h\xf6mma! (esc) displaying [PATCH 5 of 6] Added tag zero, zero.foo for changeset 8580ff50825a ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 5 of 6] Added tag zero, zero.foo for changeset 8580ff50825a X-Mercurial-Node: 5d5ef15dfe5e7bd3a4ee154b5fff76c7945ec433 X-Mercurial-Series-Index: 5 X-Mercurial-Series-Total: 6 Message-Id: <5d5ef15dfe5e7bd3a4ee.315532865@*> (glob) X-Mercurial-Series-Id: (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Tue, 01 Jan 1980 00:01:05 +0000 From: test To: foo # HG changeset patch # User test # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID 5d5ef15dfe5e7bd3a4ee154b5fff76c7945ec433 # Parent 240fb913fc1b7ff15ddb9f33e73d82bf5277c720 Added tag zero, zero.foo for changeset 8580ff50825a diff -r 240fb913fc1b -r 5d5ef15dfe5e .hgtags --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,2 @@ +8580ff50825a50c8f716709acdf8de0deddcd6ab zero +8580ff50825a50c8f716709acdf8de0deddcd6ab zero.foo displaying [PATCH 6 of 6] d ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH 6 of 6] d X-Mercurial-Node: 2f9fa9b998c5fe3ac2bd9a2b14bfcbeecbc7c268 X-Mercurial-Series-Index: 6 X-Mercurial-Series-Total: 6 Message-Id: <2f9fa9b998c5fe3ac2bd.315532866@*> (glob) X-Mercurial-Series-Id: (glob) In-Reply-To: (glob) References: (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Tue, 01 Jan 1980 00:01:06 +0000 From: test To: foo # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Branch test # Node ID 2f9fa9b998c5fe3ac2bd9a2b14bfcbeecbc7c268 # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 d diff -r 97d72e5f12c7 -r 2f9fa9b998c5 d --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/d Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,1 @@ +d Don't prompt for a CC header. $ echo "[email]" >> $HGRCPATH $ echo "cc=" >> $HGRCPATH dest#branch URIs: $ hg email --date '1980-1-1 0:1' -n -t foo -s test -o ../t#test comparing with ../t From [test]: test this patch series consists of 1 patches. displaying [PATCH] test ... Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH] test X-Mercurial-Node: 2f9fa9b998c5fe3ac2bd9a2b14bfcbeecbc7c268 X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: <2f9fa9b998c5fe3ac2bd.315532860@*> (glob) X-Mercurial-Series-Id: <2f9fa9b998c5fe3ac2bd.315532860@*> (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Tue, 01 Jan 1980 00:01:00 +0000 From: test To: foo # HG changeset patch # User test # Date 4 0 # Thu Jan 01 00:00:04 1970 +0000 # Branch test # Node ID 2f9fa9b998c5fe3ac2bd9a2b14bfcbeecbc7c268 # Parent 97d72e5f12c7e84f85064aa72e5a297142c36ed9 d diff -r 97d72e5f12c7 -r 2f9fa9b998c5 d --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/d Thu Jan 01 00:00:04 1970 +0000 @@ -0,0 +1,1 @@ +d Set up a fake sendmail program $ cat > pretendmail.sh << 'EOF' > #!/bin/sh > echo "$@" > cat > EOF $ chmod +x pretendmail.sh $ echo '[email]' >> $HGRCPATH $ echo "method=`pwd`/pretendmail.sh" >> $HGRCPATH Test introduction configuration ================================= $ echo '[patchbomb]' >> $HGRCPATH "auto" setting ---------------- $ echo 'intro=auto' >> $HGRCPATH single rev $ hg email --date '1980-1-1 0:1' -t foo -s test -r '10' | grep "Write the introductory message for the patch series." [1] single rev + flag $ hg email --date '1980-1-1 0:1' -t foo -s test -r '10' --intro | grep "Write the introductory message for the patch series." Write the introductory message for the patch series. Multi rev $ hg email --date '1980-1-1 0:1' -t foo -s test -r '9::' | grep "Write the introductory message for the patch series." Write the introductory message for the patch series. "never" setting ----------------- $ echo 'intro=never' >> $HGRCPATH single rev $ hg email --date '1980-1-1 0:1' -t foo -s test -r '10' | grep "Write the introductory message for the patch series." [1] single rev + flag $ hg email --date '1980-1-1 0:1' -t foo -s test -r '10' --intro | grep "Write the introductory message for the patch series." Write the introductory message for the patch series. Multi rev $ hg email --date '1980-1-1 0:1' -t foo -s test -r '9::' | grep "Write the introductory message for the patch series." [1] Multi rev + flag $ hg email --date '1980-1-1 0:1' -t foo -s test -r '9::' --intro | grep "Write the introductory message for the patch series." Write the introductory message for the patch series. "always" setting ----------------- $ echo 'intro=always' >> $HGRCPATH single rev $ hg email --date '1980-1-1 0:1' -t foo -s test -r '10' | grep "Write the introductory message for the patch series." Write the introductory message for the patch series. single rev + flag $ hg email --date '1980-1-1 0:1' -t foo -s test -r '10' --intro | grep "Write the introductory message for the patch series." Write the introductory message for the patch series. Multi rev $ hg email --date '1980-1-1 0:1' -t foo -s test -r '9::' | grep "Write the introductory message for the patch series." Write the introductory message for the patch series. Multi rev + flag $ hg email --date '1980-1-1 0:1' -t foo -s test -r '9::' --intro | grep "Write the introductory message for the patch series." Write the introductory message for the patch series. bad value setting ----------------- $ echo 'intro=mpmwearaclownnose' >> $HGRCPATH single rev $ hg email --date '1980-1-1 0:1' -v -t foo -s test -r '10' From [test]: test this patch series consists of 1 patches. warning: invalid patchbomb.intro value "mpmwearaclownnose" (should be one of always, never, auto) -f test foo Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Subject: [PATCH] test X-Mercurial-Node: 3b6f1ec9dde933a40a115a7990f8b320477231af X-Mercurial-Series-Index: 1 X-Mercurial-Series-Total: 1 Message-Id: <3b6f1ec9dde933a40a11*> (glob) X-Mercurial-Series-Id: <3b6f1ec9dde933a40a11.*> (glob) User-Agent: Mercurial-patchbomb/* (glob) Date: Tue, 01 Jan 1980 00:01:00 +0000 From: test To: foo # HG changeset patch # User test # Date 5 0 # Thu Jan 01 00:00:05 1970 +0000 # Branch test # Node ID 3b6f1ec9dde933a40a115a7990f8b320477231af # Parent 2f9fa9b998c5fe3ac2bd9a2b14bfcbeecbc7c268 dd diff -r 2f9fa9b998c5 -r 3b6f1ec9dde9 d --- a/d Thu Jan 01 00:00:04 1970 +0000 +++ b/d Thu Jan 01 00:00:05 1970 +0000 @@ -1,1 +1,2 @@ d +d sending [PATCH] test ... sending mail: $TESTTMP/t2/pretendmail.sh -f test foo Test pull url header ================================= basic version $ echo 'intro=auto' >> $HGRCPATH $ echo "publicurl=$TESTTMP/t2" >> $HGRCPATH $ hg email --date '1980-1-1 0:1' -t foo -s test -r '10' | grep '^#' abort: public url $TESTTMP/t2 is missing 3b6f1ec9dde9 (use "hg push $TESTTMP/t2 -r 3b6f1ec9dde9") [1] remote missing $ echo 'publicurl=$TESTTMP/missing' >> $HGRCPATH $ hg email --date '1980-1-1 0:1' -t foo -s test -r '10' unable to access public repo: $TESTTMP/missing abort: repository $TESTTMP/missing not found! [255] node missing at remote $ hg clone -r '9' . ../t3 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files updating to branch test 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 'publicurl=$TESTTMP/t3' >> $HGRCPATH $ hg email --date '1980-1-1 0:1' -t foo -s test -r '10' abort: public url $TESTTMP/t3 is missing 3b6f1ec9dde9 (use "hg push $TESTTMP/t3 -r 3b6f1ec9dde9") [255] mercurial-3.7.3/tests/test-largefiles-cache.t0000644000175000017500000001536012676531525020616 0ustar mpmmpm00000000000000Create user cache directory $ USERCACHE=`pwd`/cache; export USERCACHE $ cat <> ${HGRCPATH} > [extensions] > hgext.largefiles= > [largefiles] > usercache=${USERCACHE} > EOF $ mkdir -p ${USERCACHE} Create source repo, and commit adding largefile. $ hg init src $ cd src $ echo large > large $ hg add --large large $ hg commit -m 'add largefile' $ hg rm large $ hg commit -m 'branchhead without largefile' large $ hg up -qr 0 $ rm large $ echo "0000000000000000000000000000000000000000" > .hglf/large $ hg commit -m 'commit missing file with corrupt standin' large abort: large: file not found! [255] $ hg up -Cqr 0 $ cd .. Discard all cached largefiles in USERCACHE $ rm -rf ${USERCACHE} Create mirror repo, and pull from source without largefile: "pull" is used instead of "clone" for suppression of (1) updating to tip (= caching largefile from source repo), and (2) recording source repo as "default" path in .hg/hgrc. $ hg init mirror $ cd mirror $ hg pull ../src pulling from ../src requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) Update working directory to "tip", which requires largefile("large"), but there is no cache file for it. So, hg must treat it as "missing"(!) file. $ hg update -r0 getting changed largefiles large: largefile 7f7097b041ccf68cc5561e9600da4655d21c6d18 not available from file:/*/$TESTTMP/mirror (glob) 0 largefiles updated, 0 removed 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg status ! large Update working directory to null: this cleanup .hg/largefiles/dirstate $ hg update null getting changed largefiles 0 largefiles updated, 0 removed 0 files updated, 0 files merged, 1 files removed, 0 files unresolved Update working directory to tip, again. $ hg update -r0 getting changed largefiles large: largefile 7f7097b041ccf68cc5561e9600da4655d21c6d18 not available from file:/*/$TESTTMP/mirror (glob) 0 largefiles updated, 0 removed 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg status ! large $ cd .. Verify that largefiles from pulled branchheads are fetched, also to an empty repo $ hg init mirror2 $ hg -R mirror2 pull src -r0 pulling from src adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) #if unix-permissions Portable way to print file permissions: $ cat > ls-l.py < #!/usr/bin/env python > import sys, os > path = sys.argv[1] > print '%03o' % (os.lstat(path).st_mode & 0777) > EOF $ chmod +x ls-l.py Test that files in .hg/largefiles inherit mode from .hg/store, not from file in working copy: $ cd src $ chmod 750 .hg/store $ chmod 660 large $ echo change >> large $ hg commit -m change created new head $ ../ls-l.py .hg/largefiles/e151b474069de4ca6898f67ce2f2a7263adf8fea 640 Test permission of with files in .hg/largefiles created by update: $ cd ../mirror $ rm -r "$USERCACHE" .hg/largefiles # avoid links $ chmod 750 .hg/store $ hg pull ../src --update -q $ ../ls-l.py .hg/largefiles/e151b474069de4ca6898f67ce2f2a7263adf8fea 640 Test permission of files created by push: $ hg serve -R ../src -d -p $HGPORT --pid-file hg.pid \ > --config "web.allow_push=*" --config web.push_ssl=no $ cat hg.pid >> $DAEMON_PIDS $ echo change >> large $ hg commit -m change $ rm -r "$USERCACHE" $ hg push -q http://localhost:$HGPORT/ $ ../ls-l.py ../src/.hg/largefiles/b734e14a0971e370408ab9bce8d56d8485e368a9 640 $ cd .. #endif Test issue 4053 (remove --after on a deleted, uncommitted file shouldn't say it is missing, but a remove on a nonexistent unknown file still should. Same for a forget.) $ cd src $ touch x $ hg add x $ mv x y $ hg remove -A x y ENOENT ENOENT: * (glob) not removing y: file is untracked [1] $ hg add y $ mv y z $ hg forget y z ENOENT ENOENT: * (glob) not removing z: file is already untracked [1] Largefiles are accessible from the share's store $ cd .. $ hg share -q src share_dst --config extensions.share= $ hg -R share_dst update -r0 getting changed largefiles 1 largefiles updated, 0 removed 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo modified > share_dst/large $ hg -R share_dst ci -m modified created new head Only dirstate is in the local store for the share, and the largefile is in the share source's local store. Avoid the extra largefiles added in the unix conditional above. $ hash=`hg -R share_dst cat share_dst/.hglf/large` $ echo $hash e2fb5f2139d086ded2cb600d5a91a196e76bf020 $ find share_dst/.hg/largefiles/* | sort share_dst/.hg/largefiles/dirstate $ find src/.hg/largefiles/* | egrep "(dirstate|$hash)" | sort src/.hg/largefiles/dirstate src/.hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 Inject corruption into the largefiles store and see how update handles that: $ cd src $ hg up -qC $ cat large modified $ rm large $ cat .hglf/large e2fb5f2139d086ded2cb600d5a91a196e76bf020 $ mv .hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 .. $ echo corruption > .hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 $ hg up -C getting changed largefiles large: data corruption in $TESTTMP/src/.hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 with hash 6a7bb2556144babe3899b25e5428123735bb1e27 (glob) 0 largefiles updated, 0 removed 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg st ! large ? z $ rm .hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 #if serve Test coverage of error handling from putlfile: $ mkdir $TESTTMP/mirrorcache $ hg serve -R ../mirror -d -p $HGPORT1 --pid-file hg.pid --config largefiles.usercache=$TESTTMP/mirrorcache $ cat hg.pid >> $DAEMON_PIDS $ hg push http://localhost:$HGPORT1 -f --config files.usercache=nocache pushing to http://localhost:$HGPORT1/ searching for changes abort: remotestore: could not open file $TESTTMP/src/.hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020: HTTP Error 403: ssl required [255] $ rm .hg/largefiles/e2fb5f2139d086ded2cb600d5a91a196e76bf020 Test coverage of 'missing from store': $ hg serve -R ../mirror -d -p $HGPORT2 --pid-file hg.pid --config largefiles.usercache=$TESTTMP/mirrorcache --config "web.allow_push=*" --config web.push_ssl=no $ cat hg.pid >> $DAEMON_PIDS $ hg push http://localhost:$HGPORT2 -f --config largefiles.usercache=nocache pushing to http://localhost:$HGPORT2/ searching for changes abort: largefile e2fb5f2139d086ded2cb600d5a91a196e76bf020 missing from store (needs to be uploaded) [255] #endif mercurial-3.7.3/tests/test-merge4.t0000644000175000017500000000121612676531525016616 0ustar mpmmpm00000000000000 $ hg init $ echo This is file a1 > a $ hg add a $ hg commit -m "commit #0" $ echo This is file b1 > b $ hg add b $ hg commit -m "commit #1" $ hg update 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo This is file c1 > c $ hg add c $ hg commit -m "commit #2" created new head $ hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ rm b $ echo This is file c22 > c Test hg behaves when committing with a missing file added by a merge $ hg commit -m "commit #3" abort: cannot commit merge with missing files [255] mercurial-3.7.3/tests/test-http-branchmap.t0000644000175000017500000000563612676531525020355 0ustar mpmmpm00000000000000#require killdaemons $ hgserve() { > hg serve -a localhost -p $HGPORT1 -d --pid-file=hg.pid \ > -E errors.log -v $@ > startup.log > # Grepping hg serve stdout would hang on Windows > grep -v 'listening at' startup.log > cat hg.pid >> "$DAEMON_PIDS" > } $ hg init a $ hg --encoding utf-8 -R a branch æ marked working directory as branch \xc3\xa6 (esc) (branches are permanent and global, did you want a bookmark?) $ echo foo > a/foo $ hg -R a ci -Am foo adding foo $ hgserve -R a --config web.push_ssl=False --config web.allow_push=* --encoding latin1 $ hg --encoding utf-8 clone http://localhost:$HGPORT1 b requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch \xc3\xa6 (esc) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --encoding utf-8 -R b log changeset: 0:867c11ce77b8 branch: \xc3\xa6 (esc) tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: foo $ echo bar >> b/foo $ hg -R b ci -m bar $ hg --encoding utf-8 -R b push pushing to http://localhost:$HGPORT1/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files $ hg -R a --encoding utf-8 log changeset: 1:58e7c90d67cb branch: \xc3\xa6 (esc) tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: bar changeset: 0:867c11ce77b8 branch: \xc3\xa6 (esc) user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: foo $ killdaemons.py hg.pid verify 7e7d56fe4833 (encoding fallback in branchmap to maintain compatibility with 1.3.x) $ cat < oldhg > import sys > from mercurial import ui, hg, commands > > class StdoutWrapper(object): > def __init__(self, stdout): > self._file = stdout > > def write(self, data): > if data == '47\n': > # latin1 encoding is one %xx (3 bytes) shorter > data = '44\n' > elif data.startswith('%C3%A6 '): > # translate to latin1 encoding > data = '%%E6 %s' % data[7:] > self._file.write(data) > > def __getattr__(self, name): > return getattr(self._file, name) > > sys.stdout = StdoutWrapper(sys.stdout) > sys.stderr = StdoutWrapper(sys.stderr) > > myui = ui.ui() > repo = hg.repository(myui, 'a') > commands.serve(myui, repo, stdio=True, cmdserver=False) > EOF $ echo baz >> b/foo $ hg -R b ci -m baz $ hg push -R b -e 'python oldhg' ssh://dummy/ --encoding latin1 pushing to ssh://dummy/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files mercurial-3.7.3/tests/test-mq-symlinks.t0000644000175000017500000000337612676531525017730 0ustar mpmmpm00000000000000#require symlink $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg init $ hg qinit $ hg qnew base.patch $ echo aaa > a $ echo bbb > b $ echo ccc > c $ hg add a b c $ hg qrefresh $ readlink.py a a -> a not a symlink test replacing a file with a symlink $ hg qnew symlink.patch $ rm a $ ln -s b a $ hg qrefresh --git $ readlink.py a a -> b $ hg qpop popping symlink.patch now at: base.patch $ hg qpush applying symlink.patch now at: symlink.patch $ readlink.py a a -> b test updating a symlink $ rm a $ ln -s c a $ hg qnew --git -f updatelink $ readlink.py a a -> c $ hg qpop popping updatelink now at: symlink.patch $ hg qpush --debug applying updatelink patching file a committing files: a committing manifest committing changelog now at: updatelink $ readlink.py a a -> c $ hg st test replacing a symlink with a file $ ln -s c s $ hg add s $ hg qnew --git -f addlink $ rm s $ echo sss > s $ hg qnew --git -f replacelinkwithfile $ hg qpop popping replacelinkwithfile now at: addlink $ hg qpush applying replacelinkwithfile now at: replacelinkwithfile $ cat s sss $ hg st test symlink removal $ hg qnew removesl.patch $ hg rm a $ hg qrefresh --git $ hg qpop popping removesl.patch now at: replacelinkwithfile $ hg qpush applying removesl.patch now at: removesl.patch $ hg st -c C b C c C s replace broken symlink with another broken symlink $ ln -s linka linka $ hg add linka $ hg qnew link $ hg mv linka linkb $ rm linkb $ ln -s linkb linkb $ hg qnew movelink $ hg qpop popping movelink now at: link $ hg qpush applying movelink now at: movelink $ readlink.py linkb linkb -> linkb mercurial-3.7.3/tests/test-convert-cvsnt-mergepoints.t0000644000175000017500000001005112676531525022575 0ustar mpmmpm00000000000000#require cvs $ filterpath() > { > eval "$@" | sed "s:$CVSROOT:*REPO*:g" > } $ cvscall() > { > cvs -f "$@" > } output of 'cvs ci' varies unpredictably, so discard most of it -- just keep the part that matters $ cvsci() > { > cvs -f ci -f "$@" > /dev/null > } $ hgcat() > { > hg --cwd src-hg cat -r tip "$1" > } $ echo "[extensions]" >> $HGRCPATH $ echo "convert = " >> $HGRCPATH create cvs repository $ mkdir cvsmaster $ cd cvsmaster $ CVSROOT=`pwd` $ export CVSROOT $ CVS_OPTIONS=-f $ export CVS_OPTIONS $ cd .. $ rmdir cvsmaster $ filterpath cvscall -Q -d "$CVSROOT" init checkout #1: add foo.txt $ cvscall -Q checkout -d cvsworktmp . $ cd cvsworktmp $ mkdir foo $ cvscall -Q add foo $ cd foo $ echo foo > foo.txt $ cvscall -Q add foo.txt $ cvsci -m "add foo.txt" foo.txt $ cd ../.. $ rm -rf cvsworktmp checkout #2: create MYBRANCH1 and modify foo.txt on it $ cvscall -Q checkout -d cvswork foo $ cd cvswork $ cvscall -q rtag -b -R MYBRANCH1 foo $ cvscall -Q update -P -r MYBRANCH1 $ echo bar > foo.txt $ cvsci -m "bar" foo.txt $ echo baz > foo.txt $ cvsci -m "baz" foo.txt create MYBRANCH1_2 and modify foo.txt some more $ cvscall -q rtag -b -R -r MYBRANCH1 MYBRANCH1_2 foo $ cvscall -Q update -P -r MYBRANCH1_2 $ echo bazzie > foo.txt $ cvsci -m "bazzie" foo.txt create MYBRANCH1_1 and modify foo.txt yet again $ cvscall -q rtag -b -R MYBRANCH1_1 foo $ cvscall -Q update -P -r MYBRANCH1_1 $ echo quux > foo.txt $ cvsci -m "quux" foo.txt merge MYBRANCH1 to MYBRANCH1_1 $ filterpath cvscall -Q update -P -jMYBRANCH1 rcsmerge: warning: conflicts during merge RCS file: *REPO*/foo/foo.txt,v retrieving revision 1.1 retrieving revision 1.1.2.2 Merging differences between 1.1 and 1.1.2.2 into foo.txt carefully placed sleep to dodge cvs bug (optimization?) where it sometimes ignores a "commit" command if it comes too fast (the -f option in cvsci seems to work for all the other commits in this script) $ sleep 1 $ echo xyzzy > foo.txt $ cvsci -m "merge1+clobber" foo.txt #if unix-permissions return to trunk and merge MYBRANCH1_2 $ cvscall -Q update -P -A $ filterpath cvscall -Q update -P -jMYBRANCH1_2 RCS file: *REPO*/foo/foo.txt,v retrieving revision 1.1 retrieving revision 1.1.2.2.2.1 Merging differences between 1.1 and 1.1.2.2.2.1 into foo.txt $ cvsci -m "merge2" foo.txt $ REALCVS=`which cvs` $ echo "for x in \$*; do if [ \"\$x\" = \"rlog\" ]; then echo \"RCS file: $CVSROOT/foo/foo.txt,v\"; cat \"$TESTDIR/test-convert-cvsnt-mergepoints.rlog\"; exit 0; fi; done; $REALCVS \$*" > ../cvs $ chmod +x ../cvs $ PATH=..:${PATH} hg debugcvsps --parents foo collecting CVS rlog 7 log entries creating changesets 7 changeset entries --------------------- PatchSet 1 Date: * (glob) Author: user Branch: HEAD Tag: (none) Branchpoints: MYBRANCH1, MYBRANCH1_1 Log: foo.txt Members: foo.txt:INITIAL->1.1 --------------------- PatchSet 2 Date: * (glob) Author: user Branch: MYBRANCH1 Tag: (none) Parent: 1 Log: bar Members: foo.txt:1.1->1.1.2.1 --------------------- PatchSet 3 Date: * (glob) Author: user Branch: MYBRANCH1 Tag: (none) Branchpoints: MYBRANCH1_2 Parent: 2 Log: baz Members: foo.txt:1.1.2.1->1.1.2.2 --------------------- PatchSet 4 Date: * (glob) Author: user Branch: MYBRANCH1_1 Tag: (none) Parent: 1 Log: quux Members: foo.txt:1.1->1.1.4.1 --------------------- PatchSet 5 Date: * (glob) Author: user Branch: MYBRANCH1_2 Tag: (none) Parent: 3 Log: bazzie Members: foo.txt:1.1.2.2->1.1.2.2.2.1 --------------------- PatchSet 6 Date: * (glob) Author: user Branch: HEAD Tag: (none) Parents: 1,5 Log: merge Members: foo.txt:1.1->1.2 --------------------- PatchSet 7 Date: * (glob) Author: user Branch: MYBRANCH1_1 Tag: (none) Parents: 4,3 Log: merge Members: foo.txt:1.1.4.1->1.1.4.2 #endif $ cd .. mercurial-3.7.3/tests/test-clone-cgi.t0000644000175000017500000000220212676531525017267 0ustar mpmmpm00000000000000#require no-msys # MSYS will translate web paths as if they were file paths This is a test of the wire protocol over CGI-based hgweb. initialize repository $ hg init test $ cd test $ echo a > a $ hg ci -Ama adding a $ cd .. $ cat >hgweb.cgi < # > # An example CGI script to use hgweb, edit as necessary > import cgitb > cgitb.enable() > from mercurial import demandimport; demandimport.enable() > from mercurial.hgweb import hgweb > from mercurial.hgweb import wsgicgi > application = hgweb("test", "Empty test repository") > wsgicgi.launch(application) > HGWEB $ chmod 755 hgweb.cgi try hgweb request $ . "$TESTDIR/cgienv" $ QUERY_STRING="cmd=changegroup&roots=0000000000000000000000000000000000000000"; export QUERY_STRING $ python hgweb.cgi >page1 2>&1 $ python "$TESTDIR/md5sum.py" page1 1f424bb22ec05c3c6bc866b6e67efe43 page1 make sure headers are sent even when there is no body $ QUERY_STRING="cmd=listkeys&namespace=nosuchnamespace" python hgweb.cgi Status: 200 Script output follows\r (esc) Content-Type: application/mercurial-0.1\r (esc) Content-Length: 0\r (esc) \r (esc) mercurial-3.7.3/tests/test-merge-symlinks.t0000644000175000017500000000300512676531525020377 0ustar mpmmpm00000000000000 $ cat > echo.py < #!/usr/bin/env python > import os, sys > try: > import msvcrt > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY) > except ImportError: > pass > > for k in ('HG_FILE', 'HG_MY_ISLINK', 'HG_OTHER_ISLINK', 'HG_BASE_ISLINK'): > print k, os.environ[k] > EOF Create 2 heads containing the same file, once as a file, once as a link. Bundle was generated with: # hg init t # cd t # echo a > a # hg ci -qAm t0 -d '0 0' # echo l > l # hg ci -qAm t1 -d '1 0' # hg up -C 0 # ln -s a l # hg ci -qAm t2 -d '2 0' # echo l2 > l2 # hg ci -qAm t3 -d '3 0' $ hg init t $ cd t $ hg -q pull "$TESTDIR/bundles/test-merge-symlinks.hg" $ hg up -C 3 3 files updated, 0 files merged, 0 files removed, 0 files unresolved Merge them and display *_ISLINK vars merge heads $ hg merge --tool="python ../echo.py" merging l HG_FILE l HG_MY_ISLINK 1 HG_OTHER_ISLINK 0 HG_BASE_ISLINK 0 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) Test working directory symlink bit calculation wrt copies, especially on non-supporting systems. merge working directory $ hg up -C 2 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg copy l l2 $ HGMERGE="python ../echo.py" hg up 3 merging l2 HG_FILE l2 HG_MY_ISLINK 1 HG_OTHER_ISLINK 0 HG_BASE_ISLINK 0 0 files updated, 1 files merged, 0 files removed, 0 files unresolved $ cd .. mercurial-3.7.3/tests/test-serve.t0000644000175000017500000000341712676531525016564 0ustar mpmmpm00000000000000#require serve $ hgserve() > { > hg serve -a localhost -d --pid-file=hg.pid -E errors.log -v $@ \ > | sed -e "s/:$HGPORT1\\([^0-9]\\)/:HGPORT1\1/g" \ > -e "s/:$HGPORT2\\([^0-9]\\)/:HGPORT2\1/g" \ > -e 's/http:\/\/[^/]*\//http:\/\/localhost\//' > cat hg.pid >> "$DAEMON_PIDS" > echo % errors > cat errors.log > killdaemons.py hg.pid > } $ hg init test $ cd test $ echo '[web]' > .hg/hgrc $ echo 'accesslog = access.log' >> .hg/hgrc $ echo "port = $HGPORT1" >> .hg/hgrc Without -v $ hg serve -a localhost -p $HGPORT -d --pid-file=hg.pid -E errors.log $ cat hg.pid >> "$DAEMON_PIDS" $ if [ -f access.log ]; then > echo 'access log created - .hg/hgrc respected' > fi access log created - .hg/hgrc respected errors $ cat errors.log With -v $ hgserve listening at http://localhost/ (bound to 127.0.0.1:HGPORT1) % errors With -v and -p HGPORT2 $ hgserve -p "$HGPORT2" listening at http://localhost/ (bound to 127.0.0.1:HGPORT2) % errors With -v and -p daytime (should fail because low port) #if no-root $ KILLQUIETLY=Y $ hgserve -p daytime abort: cannot start server at 'localhost:13': Permission denied abort: child process failed to start % errors $ KILLQUIETLY=N #endif With --prefix foo $ hgserve --prefix foo listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) % errors With --prefix /foo $ hgserve --prefix /foo listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) % errors With --prefix foo/ $ hgserve --prefix foo/ listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) % errors With --prefix /foo/ $ hgserve --prefix /foo/ listening at http://localhost/foo/ (bound to 127.0.0.1:HGPORT1) % errors $ cd .. mercurial-3.7.3/tests/test-convert-svn-move.t0000644000175000017500000001774412676531525020700 0ustar mpmmpm00000000000000#require svn svn-bindings $ cat >> $HGRCPATH < [extensions] > convert = > EOF $ svnadmin create svn-repo $ svnadmin load -q svn-repo < "$TESTDIR/svn/move.svndump" $ SVNREPOPATH=`pwd`/svn-repo #if windows $ SVNREPOURL=file:///`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` #else $ SVNREPOURL=file://`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` #endif Convert trunk and branches $ hg convert --datesort "$SVNREPOURL"/subproject A-hg initializing destination A-hg repository scanning source... sorting... converting... 13 createtrunk 12 moved1 11 moved1 10 moved2 9 changeb and rm d2 8 changeb and rm d2 7 moved1again 6 moved1again 5 copyfilefrompast 4 copydirfrompast 3 add d3 2 copy dir and remove subdir 1 add d4old 0 rename d4old into d4new $ cd A-hg $ hg log -G --template '{rev} {desc|firstline} files: {files}\n' o 13 rename d4old into d4new files: d4new/g d4old/g | o 12 add d4old files: d4old/g | o 11 copy dir and remove subdir files: d3/d31/e d4/d31/e d4/f | o 10 add d3 files: d3/d31/e d3/f | o 9 copydirfrompast files: d2/d | o 8 copyfilefrompast files: d | o 7 moved1again files: d1/b d1/c | | o 6 moved1again files: | | o | 5 changeb and rm d2 files: d1/b d2/d | | | o 4 changeb and rm d2 files: b | | o | 3 moved2 files: d2/d | | o | 2 moved1 files: d1/b d1/c | | | o 1 moved1 files: b c | o 0 createtrunk files: Check move copy records $ hg st --rev 12:13 --copies A d4new/g d4old/g R d4old/g Check branches $ hg branches default 13:* (glob) d1 6:* (glob) $ cd .. $ mkdir test-replace $ cd test-replace $ svnadmin create svn-repo $ svnadmin load -q svn-repo < "$TESTDIR/svn/replace.svndump" Convert files being replaced by directories $ hg convert svn-repo hg-repo initializing destination hg-repo repository scanning source... sorting... converting... 6 initial 5 clobber symlink 4 clobber1 3 clobber2 2 adddb 1 clobberdir 0 branch $ cd hg-repo Manifest before $ hg -v manifest -r 1 644 a 644 d/b 644 d2/a 644 @ dlink 644 @ dlink2 644 dlink3 Manifest after clobber1 $ hg -v manifest -r 2 644 a/b 644 d/b 644 d2/a 644 dlink/b 644 @ dlink2 644 dlink3 Manifest after clobber2 $ hg -v manifest -r 3 644 a/b 644 d/b 644 d2/a 644 dlink/b 644 @ dlink2 644 @ dlink3 Manifest after clobberdir $ hg -v manifest -r 6 644 a/b 644 d/b 644 d2/a 644 d2/c 644 dlink/b 644 @ dlink2 644 @ dlink3 Try updating $ hg up -qC default $ cd .. Test convert progress bar $ cat >> $HGRCPATH < [extensions] > progress = > [progress] > assume-tty = 1 > delay = 0 > changedelay = 0 > format = topic bar number > refresh = 0 > width = 60 > EOF $ hg convert svn-repo hg-progress \r (no-eol) (esc) scanning [=====> ] 1/7\r (no-eol) (esc) scanning [===========> ] 2/7\r (no-eol) (esc) scanning [=================> ] 3/7\r (no-eol) (esc) scanning [========================> ] 4/7\r (no-eol) (esc) scanning [==============================> ] 5/7\r (no-eol) (esc) scanning [====================================> ] 6/7\r (no-eol) (esc) scanning [===========================================>] 7/7\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) converting [ ] 0/7\r (no-eol) (esc) getting files [=====> ] 1/6\r (no-eol) (esc) getting files [============> ] 2/6\r (no-eol) (esc) getting files [==================> ] 3/6\r (no-eol) (esc) getting files [=========================> ] 4/6\r (no-eol) (esc) getting files [===============================> ] 5/6\r (no-eol) (esc) getting files [======================================>] 6/6\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) converting [=====> ] 1/7\r (no-eol) (esc) scanning paths [ ] 0/1\r (no-eol) (esc) getting files [======================================>] 1/1\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) converting [===========> ] 2/7\r (no-eol) (esc) scanning paths [ ] 0/2\r (no-eol) (esc) scanning paths [==================> ] 1/2\r (no-eol) (esc) getting files [========> ] 1/4\r (no-eol) (esc) getting files [==================> ] 2/4\r (no-eol) (esc) getting files [============================> ] 3/4\r (no-eol) (esc) getting files [======================================>] 4/4\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) converting [=================> ] 3/7\r (no-eol) (esc) scanning paths [ ] 0/1\r (no-eol) (esc) getting files [======================================>] 1/1\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) converting [=======================> ] 4/7\r (no-eol) (esc) scanning paths [ ] 0/1\r (no-eol) (esc) getting files [======================================>] 1/1\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) converting [=============================> ] 5/7\r (no-eol) (esc) scanning paths [ ] 0/1\r (no-eol) (esc) getting files [===> ] 1/8\r (no-eol) (esc) getting files [========> ] 2/8\r (no-eol) (esc) getting files [=============> ] 3/8\r (no-eol) (esc) getting files [==================> ] 4/8\r (no-eol) (esc) getting files [=======================> ] 5/8\r (no-eol) (esc) getting files [============================> ] 6/8\r (no-eol) (esc) getting files [=================================> ] 7/8\r (no-eol) (esc) getting files [======================================>] 8/8\r (no-eol) (esc) \r (no-eol) (esc) \r (no-eol) (esc) converting [===================================> ] 6/7\r (no-eol) (esc) scanning paths [ ] 0/3\r (no-eol) (esc) scanning paths [===========> ] 1/3\r (no-eol) (esc) scanning paths [========================> ] 2/3\r (no-eol) (esc) getting files [===> ] 1/8\r (no-eol) (esc) getting files [========> ] 2/8\r (no-eol) (esc) getting files [=============> ] 3/8\r (no-eol) (esc) getting files [==================> ] 4/8\r (no-eol) (esc) getting files [=======================> ] 5/8\r (no-eol) (esc) getting files [============================> ] 6/8\r (no-eol) (esc) getting files [=================================> ] 7/8\r (no-eol) (esc) getting files [======================================>] 8/8\r (no-eol) (esc) \r (no-eol) (esc) initializing destination hg-progress repository scanning source... sorting... converting... 6 initial 5 clobber symlink 4 clobber1 3 clobber2 2 adddb 1 clobberdir 0 branch $ cd .. mercurial-3.7.3/tests/test-rebase-issue-noparam-single-rev.t0000644000175000017500000000320012676531525023521 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > > [phases] > publish=False > > [alias] > tglog = log -G --template "{rev}: '{desc}' {branches}\n" > EOF $ hg init a $ cd a $ echo c1 > c1 $ hg ci -Am c1 adding c1 $ echo c2 > c2 $ hg ci -Am c2 adding c2 $ echo l1 > l1 $ hg ci -Am l1 adding l1 $ hg up -q -C 1 $ echo r1 > r1 $ hg ci -Am r1 adding r1 created new head $ echo r2 > r2 $ hg ci -Am r2 adding r2 $ hg tglog @ 4: 'r2' | o 3: 'r1' | | o 2: 'l1' |/ o 1: 'c2' | o 0: 'c1' Rebase with no arguments - single revision in source branch: $ hg up -q -C 2 $ hg rebase rebasing 2:87c180a611f2 "l1" saved backup bundle to $TESTTMP/a/.hg/strip-backup/87c180a611f2-a5be192d-backup.hg (glob) $ hg tglog @ 4: 'l1' | o 3: 'r2' | o 2: 'r1' | o 1: 'c2' | o 0: 'c1' $ cd .. $ hg init b $ cd b $ echo c1 > c1 $ hg ci -Am c1 adding c1 $ echo c2 > c2 $ hg ci -Am c2 adding c2 $ echo l1 > l1 $ hg ci -Am l1 adding l1 $ echo l2 > l2 $ hg ci -Am l2 adding l2 $ hg up -q -C 1 $ echo r1 > r1 $ hg ci -Am r1 adding r1 created new head $ hg tglog @ 4: 'r1' | | o 3: 'l2' | | | o 2: 'l1' |/ o 1: 'c2' | o 0: 'c1' Rebase with no arguments - single revision in target branch: $ hg up -q -C 3 $ hg rebase rebasing 2:87c180a611f2 "l1" rebasing 3:1ac923b736ef "l2" saved backup bundle to $TESTTMP/b/.hg/strip-backup/87c180a611f2-b980535c-backup.hg (glob) $ hg tglog @ 4: 'l2' | o 3: 'l1' | o 2: 'r1' | o 1: 'c2' | o 0: 'c1' $ cd .. mercurial-3.7.3/tests/test-fetch.t0000644000175000017500000003061012676531525016524 0ustar mpmmpm00000000000000#require serve $ echo "[extensions]" >> $HGRCPATH $ echo "fetch=" >> $HGRCPATH test fetch with default branches only $ hg init a $ echo a > a/a $ hg --cwd a commit -Ama adding a $ hg clone a b updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone a c updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b > a/b $ hg --cwd a commit -Amb adding b $ hg --cwd a parents -q 1:d2ae7f538514 should pull one change $ hg --cwd b fetch ../a pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --cwd b parents -q 1:d2ae7f538514 $ echo c > c/c $ hg --cwd c commit -Amc adding c $ hg clone c d updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone c e updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved We cannot use the default commit message if fetching from a local repo, because the path of the repo will be included in the commit message, making every commit appear different. should merge c into a $ hg --cwd c fetch -d '0 0' -m 'automated merge' ../a pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) updating to 2:d2ae7f538514 1 files updated, 0 files merged, 1 files removed, 0 files unresolved merging with 1:d36c0562f908 1 files updated, 0 files merged, 0 files removed, 0 files unresolved new changeset 3:a323a0c43ec4 merges remote changes with local $ ls c a b c $ hg --cwd a serve -a localhost -p $HGPORT -d --pid-file=hg.pid $ cat a/hg.pid >> "$DAEMON_PIDS" fetch over http, no auth (this also tests that editor is invoked if '--edit' is specified) $ HGEDITOR=cat hg --cwd d fetch --edit http://localhost:$HGPORT/ pulling from http://localhost:$HGPORT/ searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) updating to 2:d2ae7f538514 1 files updated, 0 files merged, 1 files removed, 0 files unresolved merging with 1:d36c0562f908 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Automated merge with http://localhost:$HGPORT/ HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch merge HG: branch 'default' HG: changed c new changeset 3:* merges remote changes with local (glob) $ hg --cwd d tip --template '{desc}\n' Automated merge with http://localhost:$HGPORT/ $ hg --cwd d status --rev 'tip^1' --rev tip A c $ hg --cwd d status --rev 'tip^2' --rev tip A b fetch over http with auth (should be hidden in desc) (this also tests that editor is not invoked if '--edit' is not specified, even though commit message is not specified explicitly) $ HGEDITOR=cat hg --cwd e fetch http://user:password@localhost:$HGPORT/ pulling from http://user:***@localhost:$HGPORT/ searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) updating to 2:d2ae7f538514 1 files updated, 0 files merged, 1 files removed, 0 files unresolved merging with 1:d36c0562f908 1 files updated, 0 files merged, 0 files removed, 0 files unresolved new changeset 3:* merges remote changes with local (glob) $ hg --cwd e tip --template '{desc}\n' Automated merge with http://localhost:$HGPORT/ $ hg clone a f updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone a g updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo f > f/f $ hg --cwd f ci -Amf adding f $ echo g > g/g $ hg --cwd g ci -Amg adding g $ hg clone -q f h $ hg clone -q g i should merge f into g $ hg --cwd g fetch -d '0 0' --switch -m 'automated merge' ../f pulling from ../f searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) 0 files updated, 0 files merged, 0 files removed, 0 files unresolved merging with 3:6343ca3eff20 1 files updated, 0 files merged, 0 files removed, 0 files unresolved new changeset 4:f7faa0b7d3c6 merges remote changes with local $ rm i/g should abort, because i is modified $ hg --cwd i fetch ../h abort: uncommitted changes [255] test fetch with named branches $ hg init nbase $ echo base > nbase/a $ hg -R nbase ci -Am base adding a $ hg -R nbase branch a marked working directory as branch a (branches are permanent and global, did you want a bookmark?) $ echo a > nbase/a $ hg -R nbase ci -m a $ hg -R nbase up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R nbase branch b marked working directory as branch b $ echo b > nbase/b $ hg -R nbase ci -Am b adding b pull in change on foreign branch $ hg clone nbase n1 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone nbase n2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R n1 up -C a 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo aa > n1/a $ hg -R n1 ci -m a1 $ hg -R n2 up -C b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R n2 fetch -m 'merge' n1 pulling from n1 searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files parent should be 2 (no automatic update) $ hg -R n2 parents --template '{rev}\n' 2 $ rm -fr n1 n2 pull in changes on both foreign and local branches $ hg clone nbase n1 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone nbase n2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R n1 up -C a 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo aa > n1/a $ hg -R n1 ci -m a1 $ hg -R n1 up -C b 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo bb > n1/b $ hg -R n1 ci -m b1 $ hg -R n2 up -C b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R n2 fetch -m 'merge' n1 pulling from n1 searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files 1 files updated, 0 files merged, 0 files removed, 0 files unresolved parent should be 4 (fast forward) $ hg -R n2 parents --template '{rev}\n' 4 $ rm -fr n1 n2 pull changes on foreign (2 new heads) and local (1 new head) branches with a local change $ hg clone nbase n1 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone nbase n2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R n1 up -C a 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a1 > n1/a $ hg -R n1 ci -m a1 $ hg -R n1 up -C b 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo bb > n1/b $ hg -R n1 ci -m b1 $ hg -R n1 up -C 1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo a2 > n1/a $ hg -R n1 ci -m a2 created new head $ hg -R n2 up -C b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo change >> n2/c $ hg -R n2 ci -A -m local adding c $ hg -R n2 fetch -d '0 0' -m 'merge' n1 pulling from n1 searching for changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 2 files (+2 heads) updating to 5:3c4a837a864f 1 files updated, 0 files merged, 1 files removed, 0 files unresolved merging with 3:1267f84a9ea5 1 files updated, 0 files merged, 0 files removed, 0 files unresolved new changeset 7:2cf2a1261f21 merges remote changes with local parent should be 7 (new merge changeset) $ hg -R n2 parents --template '{rev}\n' 7 $ rm -fr n1 n2 pull in changes on foreign (merge of local branch) and local (2 new heads) with a local change $ hg clone nbase n1 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone nbase n2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R n1 up -C a 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R n1 merge b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg -R n1 ci -m merge $ hg -R n1 up -C 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo c > n1/a $ hg -R n1 ci -m c $ hg -R n1 up -C 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo cc > n1/a $ hg -R n1 ci -m cc created new head $ hg -R n2 up -C b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo change >> n2/b $ hg -R n2 ci -A -m local $ hg -R n2 fetch -m 'merge' n1 pulling from n1 searching for changes adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 1 files (+2 heads) not merging with 1 other new branch heads (use "hg heads ." and "hg merge" to merge them) [1] parent should be 3 (fetch did not merge anything) $ hg -R n2 parents --template '{rev}\n' 3 $ rm -fr n1 n2 pull in change on different branch than dirstate $ hg init n1 $ echo a > n1/a $ hg -R n1 ci -Am initial adding a $ hg clone n1 n2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b > n1/a $ hg -R n1 ci -m next $ hg -R n2 branch topic marked working directory as branch topic (branches are permanent and global, did you want a bookmark?) $ hg -R n2 fetch -m merge n1 abort: working directory not at branch tip (use "hg update" to check out branch tip) [255] parent should be 0 (fetch did not update or merge anything) $ hg -R n2 parents --template '{rev}\n' 0 $ rm -fr n1 n2 test fetch with inactive branches $ hg init ib1 $ echo a > ib1/a $ hg --cwd ib1 ci -Am base adding a $ hg --cwd ib1 branch second marked working directory as branch second (branches are permanent and global, did you want a bookmark?) $ echo b > ib1/b $ hg --cwd ib1 ci -Am onsecond adding b $ hg --cwd ib1 branch -f default marked working directory as branch default $ echo c > ib1/c $ hg --cwd ib1 ci -Am newdefault adding c created new head $ hg clone ib1 ib2 updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved fetch should succeed $ hg --cwd ib2 fetch ../ib1 pulling from ../ib1 searching for changes no changes found $ rm -fr ib1 ib2 test issue1726 $ hg init i1726r1 $ echo a > i1726r1/a $ hg --cwd i1726r1 ci -Am base adding a $ hg clone i1726r1 i1726r2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b > i1726r1/a $ hg --cwd i1726r1 ci -m second $ echo c > i1726r2/a $ hg --cwd i1726r2 ci -m third $ HGMERGE=true hg --cwd i1726r2 fetch ../i1726r1 pulling from ../i1726r1 searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) updating to 2:7837755a2789 1 files updated, 0 files merged, 0 files removed, 0 files unresolved merging with 1:d1f0c6c48ebd merging a 0 files updated, 1 files merged, 0 files removed, 0 files unresolved new changeset 3:* merges remote changes with local (glob) $ hg --cwd i1726r2 heads default --template '{rev}\n' 3 test issue2047 $ hg -q init i2047a $ cd i2047a $ echo a > a $ hg -q ci -Am a $ hg -q branch stable $ echo b > b $ hg -q ci -Am b $ cd .. $ hg -q clone -r 0 i2047a i2047b $ cd i2047b $ hg fetch ../i2047a pulling from ../i2047a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files $ cd .. mercurial-3.7.3/tests/test-merge-commit.t0000644000175000017500000001306412676531525020024 0ustar mpmmpm00000000000000Check that renames are correctly saved by a commit after a merge Test with the merge on 3 having the rename on the local parent $ hg init a $ cd a $ echo line1 > foo $ hg add foo $ hg ci -m '0: add foo' $ echo line2 >> foo $ hg ci -m '1: change foo' $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg mv foo bar $ rm bar $ echo line0 > bar $ echo line1 >> bar $ hg ci -m '2: mv foo bar; change bar' created new head $ hg merge 1 merging bar and foo to bar 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat bar line0 line1 line2 $ hg ci -m '3: merge with local rename' $ hg debugindex bar rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 77 ..... 2 d35118874825 000000000000 000000000000 (re) 1 77 76 ..... 3 5345f5ab8abd 000000000000 d35118874825 (re) $ hg debugrename bar bar renamed from foo:9e25c27b87571a1edee5ae4dddee5687746cc8e2 $ hg debugindex foo rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 7 ..... 0 690b295714ae 000000000000 000000000000 (re) 1 7 13 ..... 1 9e25c27b8757 690b295714ae 000000000000 (re) Revert the content change from rev 2: $ hg up -C 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm bar $ echo line1 > bar $ hg ci -m '4: revert content change from rev 2' created new head $ hg log --template '{rev}:{node|short} {parents}\n' 4:2263c1be0967 2:0f2ff26688b9 3:0555950ead28 2:0f2ff26688b9 1:5cd961e4045d 2:0f2ff26688b9 0:2665aaee66e9 1:5cd961e4045d 0:2665aaee66e9 This should use bar@rev2 as the ancestor: $ hg --debug merge 3 searching for copies back to rev 1 resolving manifests branchmerge: True, force: False, partial: False ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 0555950ead28 preserving bar for resolve of bar bar: versions differ -> m (premerge) picked tool ':merge' for bar (binary False symlink False changedelete False) merging bar my bar@2263c1be0967+ other bar@0555950ead28 ancestor bar@0f2ff26688b9 premerge successful 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat bar line1 line2 $ hg ci -m '5: merge' $ hg debugindex bar rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 77 ..... 2 d35118874825 000000000000 000000000000 (re) 1 77 76 ..... 3 5345f5ab8abd 000000000000 d35118874825 (re) 2 153 7 ..... 4 ff4b45017382 d35118874825 000000000000 (re) 3 160 13 ..... 5 3701b4893544 ff4b45017382 5345f5ab8abd (re) Same thing, but with the merge on 3 having the rename on the remote parent: $ cd .. $ hg clone -U -r 1 -r 2 a b adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 2 files (+1 heads) $ cd b $ hg up -C 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 2 merging foo and bar to bar 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat bar line0 line1 line2 $ hg ci -m '3: merge with remote rename' $ hg debugindex bar rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 77 ..... 2 d35118874825 000000000000 000000000000 (re) 1 77 76 ..... 3 5345f5ab8abd 000000000000 d35118874825 (re) $ hg debugrename bar bar renamed from foo:9e25c27b87571a1edee5ae4dddee5687746cc8e2 $ hg debugindex foo rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 7 ..... 0 690b295714ae 000000000000 000000000000 (re) 1 7 13 ..... 1 9e25c27b8757 690b295714ae 000000000000 (re) Revert the content change from rev 2: $ hg up -C 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm bar $ echo line1 > bar $ hg ci -m '4: revert content change from rev 2' created new head $ hg log --template '{rev}:{node|short} {parents}\n' 4:2263c1be0967 2:0f2ff26688b9 3:3ffa6b9e35f0 1:5cd961e4045d 2:0f2ff26688b9 2:0f2ff26688b9 0:2665aaee66e9 1:5cd961e4045d 0:2665aaee66e9 This should use bar@rev2 as the ancestor: $ hg --debug merge 3 searching for copies back to rev 1 resolving manifests branchmerge: True, force: False, partial: False ancestor: 0f2ff26688b9, local: 2263c1be0967+, remote: 3ffa6b9e35f0 preserving bar for resolve of bar bar: versions differ -> m (premerge) picked tool ':merge' for bar (binary False symlink False changedelete False) merging bar my bar@2263c1be0967+ other bar@3ffa6b9e35f0 ancestor bar@0f2ff26688b9 premerge successful 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat bar line1 line2 $ hg ci -m '5: merge' $ hg debugindex bar rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 77 ..... 2 d35118874825 000000000000 000000000000 (re) 1 77 76 ..... 3 5345f5ab8abd 000000000000 d35118874825 (re) 2 153 7 ..... 4 ff4b45017382 d35118874825 000000000000 (re) 3 160 13 ..... 5 3701b4893544 ff4b45017382 5345f5ab8abd (re) $ cd .. mercurial-3.7.3/tests/test-http-clone-r.t0000644000175000017500000001401112676531525017744 0ustar mpmmpm00000000000000#require serve creating 'remote $ hg init remote $ cd remote $ hg unbundle "$TESTDIR/bundles/remote.hg" adding changesets adding manifests adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Starting server $ hg serve -p $HGPORT -E ../error.log -d --pid-file=../hg1.pid $ cd .. $ cat hg1.pid >> $DAEMON_PIDS clone remote via stream $ for i in 0 1 2 3 4 5 6 7 8; do > hg clone -r "$i" http://localhost:$HGPORT/ test-"$i" > if cd test-"$i"; then > hg verify > cd .. > fi > done adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 3 changesets, 3 total revisions adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 4 changesets, 4 total revisions adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 3 changesets, 3 total revisions adding changesets adding manifests adding file changes added 4 changesets with 5 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 4 changesets, 5 total revisions adding changesets adding manifests adding file changes added 5 changesets with 6 changes to 3 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 5 changesets, 6 total revisions adding changesets adding manifests adding file changes added 5 changesets with 5 changes to 2 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 5 changesets, 5 total revisions $ cd test-8 $ hg pull ../test-7 pulling from ../test-7 searching for changes adding changesets adding manifests adding file changes added 4 changesets with 2 changes to 3 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 9 changesets, 7 total revisions $ cd .. $ cd test-1 $ hg pull -r 4 http://localhost:$HGPORT/ pulling from http://localhost:$HGPORT/ searching for changes adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 3 changesets, 2 total revisions $ hg pull http://localhost:$HGPORT/ pulling from http://localhost:$HGPORT/ searching for changes adding changesets adding manifests adding file changes added 6 changesets with 5 changes to 4 files (run 'hg update' to get a working copy) $ cd .. $ cd test-2 $ hg pull -r 5 http://localhost:$HGPORT/ pulling from http://localhost:$HGPORT/ searching for changes adding changesets adding manifests adding file changes added 2 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 5 changesets, 3 total revisions $ hg pull http://localhost:$HGPORT/ pulling from http://localhost:$HGPORT/ searching for changes adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 4 files (run 'hg update' to get a working copy) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 9 changesets, 7 total revisions $ cd .. no default destination if url has no path: $ hg clone http://localhost:$HGPORT/ abort: empty destination path is not valid [255] $ cat error.log mercurial-3.7.3/tests/test-identify.t0000644000175000017500000000461312676531525017252 0ustar mpmmpm00000000000000#require serve #if no-outer-repo no repo $ hg id abort: there is no Mercurial repository here (.hg not found) [255] #endif create repo $ hg init test $ cd test $ echo a > a $ hg ci -Ama adding a basic id usage $ hg id cb9a9f314b8b tip $ hg id --debug cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b tip $ hg id -q cb9a9f314b8b $ hg id -v cb9a9f314b8b tip with options $ hg id -r. cb9a9f314b8b tip $ hg id -n 0 $ hg id -t tip $ hg id -b default $ hg id -i cb9a9f314b8b $ hg id -n -t -b -i cb9a9f314b8b 0 default tip with modifications $ echo b > a $ hg id -n -t -b -i cb9a9f314b8b+ 0+ default tip other local repo $ cd .. $ hg -R test id cb9a9f314b8b+ tip #if no-outer-repo $ hg id test cb9a9f314b8b+ tip #endif with remote http repo $ cd test $ hg serve -p $HGPORT1 -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ hg id http://localhost:$HGPORT1/ cb9a9f314b8b remote with rev number? $ hg id -n http://localhost:$HGPORT1/ abort: can't query remote revision number, branch, or tags [255] remote with tags? $ hg id -t http://localhost:$HGPORT1/ abort: can't query remote revision number, branch, or tags [255] remote with branch? $ hg id -b http://localhost:$HGPORT1/ abort: can't query remote revision number, branch, or tags [255] test bookmark support $ hg bookmark Y $ hg bookmark Z $ hg bookmarks Y 0:cb9a9f314b8b * Z 0:cb9a9f314b8b $ hg id cb9a9f314b8b+ tip Y/Z $ hg id --bookmarks Y Z test remote identify with bookmarks $ hg id http://localhost:$HGPORT1/ cb9a9f314b8b Y/Z $ hg id --bookmarks http://localhost:$HGPORT1/ Y Z $ hg id -r . http://localhost:$HGPORT1/ cb9a9f314b8b Y/Z $ hg id --bookmarks -r . http://localhost:$HGPORT1/ Y Z test invalid lookup $ hg id -r noNoNO http://localhost:$HGPORT1/ abort: unknown revision 'noNoNO'! [255] Make sure we do not obscure unknown requires file entries (issue2649) $ echo fake >> .hg/requires $ hg id abort: repository requires features unknown to this Mercurial: fake! (see https://mercurial-scm.org/wiki/MissingRequirement for more information) [255] $ cd .. #if no-outer-repo $ hg id test abort: repository requires features unknown to this Mercurial: fake! (see https://mercurial-scm.org/wiki/MissingRequirement for more information) [255] #endif mercurial-3.7.3/tests/test-debugbuilddag.t0000644000175000017500000001254012676531525020217 0ustar mpmmpm00000000000000 plain $ hg init $ hg debugbuilddag '+2:f +3:p2 @temp --config extensions.progress= --config progress.assume-tty=1 \ > --config progress.delay=0 --config progress.refresh=0 \ > --config progress.format=topic,bar,number \ > --config progress.width=60 \r (no-eol) (esc) building [ ] 0/12\r (no-eol) (esc) building [ ] 0/12\r (no-eol) (esc) building [==> ] 1/12\r (no-eol) (esc) building [==> ] 1/12\r (no-eol) (esc) building [======> ] 2/12\r (no-eol) (esc) building [=========> ] 3/12\r (no-eol) (esc) building [=============> ] 4/12\r (no-eol) (esc) building [=============> ] 4/12\r (no-eol) (esc) building [=============> ] 4/12\r (no-eol) (esc) building [================> ] 5/12\r (no-eol) (esc) building [====================> ] 6/12\r (no-eol) (esc) building [=======================> ] 7/12\r (no-eol) (esc) building [===========================> ] 8/12\r (no-eol) (esc) building [===========================> ] 8/12\r (no-eol) (esc) building [==============================> ] 9/12\r (no-eol) (esc) building [==================================> ] 10/12\r (no-eol) (esc) building [=====================================> ] 11/12\r (no-eol) (esc) \r (no-eol) (esc) tags $ cat .hg/localtags 66f7d451a68b85ed82ff5fcc254daf50c74144bd f bebd167eb94d257ace0e814aeb98e6972ed2970d p2 dag $ hg debugdag -t -b +2:f +3:p2 @temp*f+3 @default*/p2+2:tip tip $ hg id 000000000000 glog $ hg log -G --template '{rev}: {desc} [{branches}] @ {date}\n' o 11: r11 [] @ 11.00 | o 10: r10 [] @ 10.00 | o 9: r9 [] @ 9.00 |\ | o 8: r8 [temp] @ 8.00 | | | o 7: r7 [temp] @ 7.00 | | | o 6: r6 [temp] @ 6.00 | | | o 5: r5 [temp] @ 5.00 | | o | 4: r4 [] @ 4.00 | | o | 3: r3 [] @ 3.00 | | o | 2: r2 [] @ 2.00 |/ o 1: r1 [] @ 1.00 | o 0: r0 [] @ 0.00 overwritten files, starting on a non-default branch $ rm -r .hg $ hg init $ hg debugbuilddag '@start.@default.:f +3:p2 @temp a $ echo c > c $ echo e > e $ bzr add -q a c e $ bzr commit -q -m 'Initial add: a, c, e' $ bzr mv a b a => b $ bzr mv c d c => d $ bzr mv e f e => f $ echo a2 >> a $ mkdir e $ bzr add -q a e $ bzr commit -q -m 'rename a into b, create a, rename c into d' $ cd .. $ hg convert source source-hg scanning source... sorting... converting... 1 Initial add: a, c, e 0 rename a into b, create a, rename c into d $ glog -R source-hg o 1@source "rename a into b, create a, rename c into d" files: a b c d e f | o 0@source "Initial add: a, c, e" files: a c e manifest $ hg manifest -R source-hg -r tip a b d f test --rev option $ hg convert -r 1 source source-1-hg initializing destination source-1-hg repository scanning source... sorting... converting... 0 Initial add: a, c, e $ glog -R source-1-hg o 0@source "Initial add: a, c, e" files: a c e test with filemap $ cat > filemap < exclude a > EOF $ hg convert --filemap filemap source source-filemap-hg initializing destination source-filemap-hg repository scanning source... sorting... converting... 1 Initial add: a, c, e 0 rename a into b, create a, rename c into d $ hg -R source-filemap-hg manifest -r tip b d f convert from lightweight checkout $ bzr checkout --lightweight source source-light $ hg convert -s bzr source-light source-light-hg initializing destination source-light-hg repository warning: lightweight checkouts may cause conversion failures, try with a regular branch instead. $TESTTMP/test-createandrename/source-light does not look like a Bazaar repository abort: source-light: missing or unsupported repository [255] extract timestamps that look just like hg's {date|isodate}: yyyy-mm-dd HH:MM zzzz (no seconds!) compare timestamps $ cd source $ bzr log | \ > sed '/timestamp/!d;s/.\{15\}\([0-9: -]\{16\}\):.. \(.[0-9]\{4\}\)/\1 \2/' \ > > ../bzr-timestamps $ cd .. $ hg -R source-hg log --template "{date|isodate}\n" > hg-timestamps $ cmp bzr-timestamps hg-timestamps || diff -u bzr-timestamps hg-timestamps $ cd .. merge $ mkdir test-merge $ cd test-merge $ cat > helper.py < import sys > from bzrlib import workingtree > wt = workingtree.WorkingTree.open('.') > > message, stamp = sys.argv[1:] > wt.commit(message, timestamp=int(stamp)) > EOF $ bzr init -q source $ cd source $ echo content > a $ echo content2 > b $ bzr add -q a b $ bzr commit -q -m 'Initial add' $ cd .. $ bzr branch -q source source-improve $ cd source $ echo more >> a $ python ../helper.py 'Editing a' 100 $ cd ../source-improve $ echo content3 >> b $ python ../helper.py 'Editing b' 200 $ cd ../source $ bzr merge -q ../source-improve $ bzr commit -q -m 'Merged improve branch' $ cd .. $ hg convert --datesort source source-hg initializing destination source-hg repository scanning source... sorting... converting... 3 Initial add 2 Editing a 1 Editing b 0 Merged improve branch $ glog -R source-hg o 3@source "Merged improve branch" files: |\ | o 2@source-improve "Editing b" files: b | | o | 1@source "Editing a" files: a |/ o 0@source "Initial add" files: a b $ cd .. #if symlink execbit symlinks and executable files $ mkdir test-symlinks $ cd test-symlinks $ bzr init -q source $ cd source $ touch program $ chmod +x program $ ln -s program altname $ mkdir d $ echo a > d/a $ ln -s a syma $ bzr add -q altname program syma d/a $ bzr commit -q -m 'Initial setup' $ touch newprog $ chmod +x newprog $ rm altname $ ln -s newprog altname $ chmod -x program $ bzr add -q newprog $ bzr commit -q -m 'Symlink changed, x bits changed' $ cd .. $ hg convert source source-hg initializing destination source-hg repository scanning source... sorting... converting... 1 Initial setup 0 Symlink changed, x bits changed $ manifest source-hg 0 % manifest of 0 644 @ altname 644 d/a 755 * program 644 @ syma $ manifest source-hg tip % manifest of tip 644 @ altname 644 d/a 755 * newprog 644 program 644 @ syma test the symlinks can be recreated $ cd source-hg $ hg up 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg cat syma; echo a $ cd ../.. #endif Multiple branches $ bzr init-repo -q --no-trees repo $ bzr init -q repo/trunk $ bzr co repo/trunk repo-trunk $ cd repo-trunk $ echo a > a $ bzr add -q a $ bzr ci -qm adda $ bzr tag trunk-tag Created tag trunk-tag. $ bzr switch -b branch Tree is up to date at revision 1. Switched to branch: *repo/branch/ (glob) $ sleep 1 $ echo b > b $ bzr add -q b $ bzr ci -qm addb $ bzr tag branch-tag Created tag branch-tag. $ bzr switch --force ../repo/trunk Updated to revision 1. Switched to branch: */repo/trunk/ (glob) $ sleep 1 $ echo a >> a $ bzr ci -qm changea $ cd .. $ hg convert --datesort repo repo-bzr initializing destination repo-bzr repository scanning source... sorting... converting... 2 adda 1 addb 0 changea updating tags $ (cd repo-bzr; glog) o 3@default "update tags" files: .hgtags | o 2@default "changea" files: a | | o 1@branch "addb" files: b |/ o 0@default "adda" files: a Test tags (converted identifiers are not stable because bzr ones are not and get incorporated in extra fields). $ hg -R repo-bzr tags tip 3:* (glob) branch-tag 1:* (glob) trunk-tag 0:* (glob) Nested repositories (issue3254) $ bzr init-repo -q --no-trees repo/inner $ bzr init -q repo/inner/trunk $ bzr co repo/inner/trunk inner-trunk $ cd inner-trunk $ echo b > b $ bzr add -q b $ bzr ci -qm addb $ cd .. $ hg convert --datesort repo noinner-bzr initializing destination noinner-bzr repository scanning source... sorting... converting... 2 adda 1 addb 0 changea updating tags mercurial-3.7.3/tests/test-git-export.t0000644000175000017500000002732212676531525017543 0ustar mpmmpm00000000000000 $ hg init $ echo start > start $ hg ci -Amstart adding start New file: $ mkdir dir1 $ echo new > dir1/new $ hg ci -Amnew adding dir1/new $ hg diff --git -r 0 diff --git a/dir1/new b/dir1/new new file mode 100644 --- /dev/null +++ b/dir1/new @@ -0,0 +1,1 @@ +new Copy: $ mkdir dir2 $ hg cp dir1/new dir1/copy $ echo copy1 >> dir1/copy $ hg cp dir1/new dir2/copy $ echo copy2 >> dir2/copy $ hg ci -mcopy $ hg diff --git -r 1:tip diff --git a/dir1/new b/dir1/copy copy from dir1/new copy to dir1/copy --- a/dir1/new +++ b/dir1/copy @@ -1,1 +1,2 @@ new +copy1 diff --git a/dir1/new b/dir2/copy copy from dir1/new copy to dir2/copy --- a/dir1/new +++ b/dir2/copy @@ -1,1 +1,2 @@ new +copy2 Cross and same-directory copies with a relative root: $ hg diff --git --root .. -r 1:tip abort: .. not under root '$TESTTMP' [255] $ hg diff --git --root doesnotexist -r 1:tip $ hg diff --git --root . -r 1:tip diff --git a/dir1/new b/dir1/copy copy from dir1/new copy to dir1/copy --- a/dir1/new +++ b/dir1/copy @@ -1,1 +1,2 @@ new +copy1 diff --git a/dir1/new b/dir2/copy copy from dir1/new copy to dir2/copy --- a/dir1/new +++ b/dir2/copy @@ -1,1 +1,2 @@ new +copy2 $ hg diff --git --root dir1 -r 1:tip diff --git a/new b/copy copy from new copy to copy --- a/new +++ b/copy @@ -1,1 +1,2 @@ new +copy1 $ hg diff --git --root dir2/ -r 1:tip diff --git a/copy b/copy new file mode 100644 --- /dev/null +++ b/copy @@ -0,0 +1,2 @@ +new +copy2 $ hg diff --git --root dir1 -r 1:tip -I '**/copy' diff --git a/new b/copy copy from new copy to copy --- a/new +++ b/copy @@ -1,1 +1,2 @@ new +copy1 $ hg diff --git --root dir1 -r 1:tip dir2 warning: dir2 not inside relative root dir1 $ hg diff --git --root dir1 -r 1:tip 'dir2/{copy}' warning: dir2/{copy} not inside relative root dir1 (glob) $ cd dir1 $ hg diff --git --root .. -r 1:tip diff --git a/dir1/new b/dir1/copy copy from dir1/new copy to dir1/copy --- a/dir1/new +++ b/dir1/copy @@ -1,1 +1,2 @@ new +copy1 diff --git a/dir1/new b/dir2/copy copy from dir1/new copy to dir2/copy --- a/dir1/new +++ b/dir2/copy @@ -1,1 +1,2 @@ new +copy2 $ hg diff --git --root ../.. -r 1:tip abort: ../.. not under root '$TESTTMP' [255] $ hg diff --git --root ../doesnotexist -r 1:tip $ hg diff --git --root .. -r 1:tip diff --git a/dir1/new b/dir1/copy copy from dir1/new copy to dir1/copy --- a/dir1/new +++ b/dir1/copy @@ -1,1 +1,2 @@ new +copy1 diff --git a/dir1/new b/dir2/copy copy from dir1/new copy to dir2/copy --- a/dir1/new +++ b/dir2/copy @@ -1,1 +1,2 @@ new +copy2 $ hg diff --git --root . -r 1:tip diff --git a/new b/copy copy from new copy to copy --- a/new +++ b/copy @@ -1,1 +1,2 @@ new +copy1 $ hg diff --git --root . -r 1:tip copy diff --git a/new b/copy copy from new copy to copy --- a/new +++ b/copy @@ -1,1 +1,2 @@ new +copy1 $ hg diff --git --root . -r 1:tip ../dir2 warning: ../dir2 not inside relative root . (glob) $ hg diff --git --root . -r 1:tip '../dir2/*' warning: ../dir2/* not inside relative root . (glob) $ cd .. Rename: $ hg mv dir1/copy dir1/rename1 $ echo rename1 >> dir1/rename1 $ hg mv dir2/copy dir1/rename2 $ echo rename2 >> dir1/rename2 $ hg ci -mrename $ hg diff --git -r 2:tip diff --git a/dir1/copy b/dir1/rename1 rename from dir1/copy rename to dir1/rename1 --- a/dir1/copy +++ b/dir1/rename1 @@ -1,2 +1,3 @@ new copy1 +rename1 diff --git a/dir2/copy b/dir1/rename2 rename from dir2/copy rename to dir1/rename2 --- a/dir2/copy +++ b/dir1/rename2 @@ -1,2 +1,3 @@ new copy2 +rename2 Cross and same-directory renames with a relative root: $ hg diff --root dir1 --git -r 2:tip diff --git a/copy b/rename1 rename from copy rename to rename1 --- a/copy +++ b/rename1 @@ -1,2 +1,3 @@ new copy1 +rename1 diff --git a/rename2 b/rename2 new file mode 100644 --- /dev/null +++ b/rename2 @@ -0,0 +1,3 @@ +new +copy2 +rename2 $ hg diff --root dir2 --git -r 2:tip diff --git a/copy b/copy deleted file mode 100644 --- a/copy +++ /dev/null @@ -1,2 +0,0 @@ -new -copy2 $ hg diff --root dir1 --git -r 2:tip -I '**/copy' diff --git a/copy b/copy deleted file mode 100644 --- a/copy +++ /dev/null @@ -1,2 +0,0 @@ -new -copy1 $ hg diff --root dir1 --git -r 2:tip -I '**/rename*' diff --git a/copy b/rename1 copy from copy copy to rename1 --- a/copy +++ b/rename1 @@ -1,2 +1,3 @@ new copy1 +rename1 diff --git a/rename2 b/rename2 new file mode 100644 --- /dev/null +++ b/rename2 @@ -0,0 +1,3 @@ +new +copy2 +rename2 Delete: $ hg rm dir1/* $ hg ci -mdelete $ hg diff --git -r 3:tip diff --git a/dir1/new b/dir1/new deleted file mode 100644 --- a/dir1/new +++ /dev/null @@ -1,1 +0,0 @@ -new diff --git a/dir1/rename1 b/dir1/rename1 deleted file mode 100644 --- a/dir1/rename1 +++ /dev/null @@ -1,3 +0,0 @@ -new -copy1 -rename1 diff --git a/dir1/rename2 b/dir1/rename2 deleted file mode 100644 --- a/dir1/rename2 +++ /dev/null @@ -1,3 +0,0 @@ -new -copy2 -rename2 $ cat > src < 1 > 2 > 3 > 4 > 5 > EOF $ hg ci -Amsrc adding src #if execbit chmod 644: $ chmod +x src $ hg ci -munexec $ hg diff --git -r 5:tip diff --git a/src b/src old mode 100644 new mode 100755 Rename+mod+chmod: $ hg mv src dst $ chmod -x dst $ echo a >> dst $ hg ci -mrenamemod $ hg diff --git -r 6:tip diff --git a/src b/dst old mode 100755 new mode 100644 rename from src rename to dst --- a/src +++ b/dst @@ -3,3 +3,4 @@ 3 4 5 +a Nonexistent in tip+chmod: $ hg diff --git -r 5:6 diff --git a/src b/src old mode 100644 new mode 100755 #else Dummy changes when no exec bit, mocking the execbit commit structure $ echo change >> src $ hg ci -munexec $ hg mv src dst $ hg ci -mrenamemod #endif Binary diff: $ cp "$TESTDIR/binfile.bin" . $ hg add binfile.bin $ hg diff --git > b.diff $ cat b.diff diff --git a/binfile.bin b/binfile.bin new file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..37ba3d1c6f17137d9c5f5776fa040caf5fe73ff9 GIT binary patch literal 593 zc$@)I0W$NUkd zX$nnYLt$-$V!?uy+1V%`z&Eh=ah|duER<4|QWhju3gb^nF*8iYobxWG-qqXl=2~5M z*IoDB)sG^CfNuoBmqLTVU^<;@nwHP!1wrWd`{(mHo6VNXWtyh{alzqmsH*yYzpvLT zLdYlXB*ODN003Z&P17_@)3Pi=i0wb04> start $ hg ci -m 'change start' $ hg revert -r -2 start $ hg mv dst2 dst3 $ hg ci -m 'mv dst2 dst3; revert start' $ hg diff --git -r 9:11 diff --git a/dst2 b/dst3 rename from dst2 rename to dst3 Reversed: $ hg diff --git -r 11:9 diff --git a/dst3 b/dst2 rename from dst3 rename to dst2 $ echo a >> foo $ hg add foo $ hg ci -m 'add foo' $ echo b >> foo $ hg ci -m 'change foo' $ hg mv foo bar $ hg ci -m 'mv foo bar' $ echo c >> bar $ hg ci -m 'change bar' File created before r1 and renamed before r2: $ hg diff --git -r -3:-1 diff --git a/foo b/bar rename from foo rename to bar --- a/foo +++ b/bar @@ -1,2 +1,3 @@ a b +c Reversed: $ hg diff --git -r -1:-3 diff --git a/bar b/foo rename from bar rename to foo --- a/bar +++ b/foo @@ -1,3 +1,2 @@ a b -c File created in r1 and renamed before r2: $ hg diff --git -r -4:-1 diff --git a/foo b/bar rename from foo rename to bar --- a/foo +++ b/bar @@ -1,1 +1,3 @@ a +b +c Reversed: $ hg diff --git -r -1:-4 diff --git a/bar b/foo rename from bar rename to foo --- a/bar +++ b/foo @@ -1,3 +1,1 @@ a -b -c File created after r1 and renamed before r2: $ hg diff --git -r -5:-1 diff --git a/bar b/bar new file mode 100644 --- /dev/null +++ b/bar @@ -0,0 +1,3 @@ +a +b +c Reversed: $ hg diff --git -r -1:-5 diff --git a/bar b/bar deleted file mode 100644 --- a/bar +++ /dev/null @@ -1,3 +0,0 @@ -a -b -c Comparing with the working dir: $ echo >> start $ hg ci -m 'change start again' $ echo > created $ hg add created $ hg ci -m 'add created' $ hg mv created created2 $ hg ci -m 'mv created created2' $ hg mv created2 created3 There's a copy in the working dir: $ hg diff --git diff --git a/created2 b/created3 rename from created2 rename to created3 There's another copy between the original rev and the wd: $ hg diff --git -r -2 diff --git a/created b/created3 rename from created rename to created3 The source of the copy was created after the original rev: $ hg diff --git -r -3 diff --git a/created3 b/created3 new file mode 100644 --- /dev/null +++ b/created3 @@ -0,0 +1,1 @@ + $ hg ci -m 'mv created2 created3' $ echo > brand-new $ hg add brand-new $ hg ci -m 'add brand-new' $ hg mv brand-new brand-new2 Created in parent of wd; renamed in the wd: $ hg diff --git diff --git a/brand-new b/brand-new2 rename from brand-new rename to brand-new2 Created between r1 and parent of wd; renamed in the wd: $ hg diff --git -r -2 diff --git a/brand-new2 b/brand-new2 new file mode 100644 --- /dev/null +++ b/brand-new2 @@ -0,0 +1,1 @@ + $ hg ci -m 'mv brand-new brand-new2' One file is copied to many destinations and removed: $ hg cp brand-new2 brand-new3 $ hg mv brand-new2 brand-new3-2 $ hg ci -m 'multiple renames/copies' $ hg diff --git -r -2 -r -1 diff --git a/brand-new2 b/brand-new3 rename from brand-new2 rename to brand-new3 diff --git a/brand-new2 b/brand-new3-2 copy from brand-new2 copy to brand-new3-2 Reversed: $ hg diff --git -r -1 -r -2 diff --git a/brand-new3-2 b/brand-new2 rename from brand-new3-2 rename to brand-new2 diff --git a/brand-new3 b/brand-new3 deleted file mode 100644 --- a/brand-new3 +++ /dev/null @@ -1,1 +0,0 @@ - There should be a trailing TAB if there are spaces in the file name: $ echo foo > 'with spaces' $ hg add 'with spaces' $ hg diff --git diff --git a/with spaces b/with spaces new file mode 100644 --- /dev/null +++ b/with spaces @@ -0,0 +1,1 @@ +foo $ hg ci -m 'add filename with spaces' Additions should be properly marked even in the middle of a merge $ hg up -r -2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo "New File" >> inmerge $ hg add inmerge $ hg ci -m "file in merge" created new head $ hg up 23 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg diff -g diff --git a/inmerge b/inmerge new file mode 100644 --- /dev/null +++ b/inmerge @@ -0,0 +1,1 @@ +New File mercurial-3.7.3/tests/test-bundle-r.t0000644000175000017500000002112612676531525017145 0ustar mpmmpm00000000000000 $ hg init test $ cd test $ hg unbundle "$TESTDIR/bundles/remote.hg" adding changesets adding manifests adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. $ for i in 0 1 2 3 4 5 6 7 8; do > mkdir test-"$i" > hg --cwd test-"$i" init > hg -R test bundle -r "$i" test-"$i".hg test-"$i" > cd test-"$i" > hg unbundle ../test-"$i".hg > hg verify > hg tip -q > cd .. > done searching for changes 1 changesets found adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions 0:bfaf4b5cbf01 searching for changes 2 changesets found adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files (run 'hg update' to get a working copy) checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions 1:21f32785131f searching for changes 3 changesets found adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files (run 'hg update' to get a working copy) checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 3 changesets, 3 total revisions 2:4ce51a113780 searching for changes 4 changesets found adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 1 files (run 'hg update' to get a working copy) checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 4 changesets, 4 total revisions 3:93ee6ab32777 searching for changes 2 changesets found adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files (run 'hg update' to get a working copy) checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions 1:c70afb1ee985 searching for changes 3 changesets found adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files (run 'hg update' to get a working copy) checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 3 changesets, 3 total revisions 2:f03ae5a9b979 searching for changes 4 changesets found adding changesets adding manifests adding file changes added 4 changesets with 5 changes to 2 files (run 'hg update' to get a working copy) checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 4 changesets, 5 total revisions 3:095cb14b1b4d searching for changes 5 changesets found adding changesets adding manifests adding file changes added 5 changesets with 6 changes to 3 files (run 'hg update' to get a working copy) checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 5 changesets, 6 total revisions 4:faa2e4234c7a searching for changes 5 changesets found adding changesets adding manifests adding file changes added 5 changesets with 5 changes to 2 files (run 'hg update' to get a working copy) checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 5 changesets, 5 total revisions 4:916f1afdef90 $ cd test-8 $ hg pull ../test-7 pulling from ../test-7 searching for changes adding changesets adding manifests adding file changes added 4 changesets with 2 changes to 3 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 9 changesets, 7 total revisions $ hg rollback repository tip rolled back to revision 4 (undo pull) $ cd .. should fail $ hg -R test bundle --base 2 -r tip test-bundle-branch1.hg test-3 abort: --base is incompatible with specifying a destination [255] $ hg -R test bundle -a -r tip test-bundle-branch1.hg test-3 abort: --all is incompatible with specifying a destination [255] $ hg -R test bundle -r tip test-bundle-branch1.hg abort: repository default-push not found! [255] $ hg -R test bundle --base 2 -r tip test-bundle-branch1.hg 2 changesets found $ hg -R test bundle --base 2 -r 7 test-bundle-branch2.hg 4 changesets found $ hg -R test bundle --base 2 test-bundle-all.hg 6 changesets found $ hg -R test bundle --base 2 --all test-bundle-all-2.hg ignoring --base because --all was specified 9 changesets found $ hg -R test bundle --base 3 -r tip test-bundle-should-fail.hg 1 changesets found empty bundle $ hg -R test bundle --base 7 --base 8 test-bundle-empty.hg no changes found [1] issue76 msg2163 $ hg -R test bundle --base 3 -r 3 -r 3 test-bundle-cset-3.hg no changes found [1] Issue1910: 'hg bundle --base $head' does not exclude $head from result $ hg -R test bundle --base 7 test-bundle-cset-7.hg 4 changesets found $ hg clone test-2 test-9 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd test-9 revision 2 $ hg tip -q 2:4ce51a113780 $ hg unbundle ../test-bundle-should-fail.hg adding changesets transaction abort! rollback completed abort: 00changelog.i@93ee6ab32777: unknown parent! [255] revision 2 $ hg tip -q 2:4ce51a113780 $ hg unbundle ../test-bundle-all.hg adding changesets adding manifests adding file changes added 6 changesets with 4 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) revision 8 $ hg tip -q 8:916f1afdef90 $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 9 changesets, 7 total revisions $ hg rollback repository tip rolled back to revision 2 (undo unbundle) revision 2 $ hg tip -q 2:4ce51a113780 $ hg unbundle ../test-bundle-branch1.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (run 'hg update' to get a working copy) revision 4 $ hg tip -q 4:916f1afdef90 $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 5 changesets, 5 total revisions $ hg rollback repository tip rolled back to revision 2 (undo unbundle) $ hg unbundle ../test-bundle-branch2.hg adding changesets adding manifests adding file changes added 4 changesets with 3 changes to 3 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) revision 6 $ hg tip -q 6:faa2e4234c7a $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 7 changesets, 6 total revisions $ hg rollback repository tip rolled back to revision 2 (undo unbundle) $ hg unbundle ../test-bundle-cset-7.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (run 'hg update' to get a working copy) revision 4 $ hg tip -q 4:916f1afdef90 $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 5 changesets, 5 total revisions $ cd ../test $ hg merge 7 note: possible conflict - afile was renamed multiple times to: anotherfile adifferentfile 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m merge $ cd .. $ hg -R test bundle --base 2 test-bundle-head.hg 7 changesets found $ hg clone test-2 test-10 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd test-10 $ hg unbundle ../test-bundle-head.hg adding changesets adding manifests adding file changes added 7 changesets with 4 changes to 4 files (run 'hg update' to get a working copy) revision 9 $ hg tip -q 9:03fc0b0e347c $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 10 changesets, 7 total revisions $ cd .. mercurial-3.7.3/tests/histedit-helpers.sh0000644000175000017500000000034512676531525020104 0ustar mpmmpm00000000000000fixbundle() { grep -v 'saving bundle' | grep -v 'saved backup' | \ grep -v added | grep -v adding | \ grep -v "unable to find 'e' for patching" | \ grep -v "e: No such file or directory" | \ cat } mercurial-3.7.3/tests/test-archive-symlinks.t0000644000175000017500000000120312676531525020717 0ustar mpmmpm00000000000000#require symlink $ origdir=`pwd` $ hg init repo $ cd repo $ ln -s nothing dangling avoid tar warnings about old timestamp $ hg ci -d '2000-01-01 00:00:00 +0000' -qAm 'add symlink' $ hg archive -t files ../archive $ hg archive -t tar -p tar ../archive.tar $ hg archive -t zip -p zip ../archive.zip files $ cd "$origdir" $ cd archive $ readlink.py dangling dangling -> nothing tar $ cd "$origdir" $ tar xf archive.tar $ cd tar $ readlink.py dangling dangling -> nothing zip $ cd "$origdir" $ unzip archive.zip > /dev/null 2>&1 $ cd zip $ readlink.py dangling dangling -> nothing $ cd .. mercurial-3.7.3/tests/test-mq-git.t0000644000175000017500000000720212676531525016632 0ustar mpmmpm00000000000000# Test the plumbing of mq.git option # Automatic upgrade itself is tested elsewhere. $ cat <> $HGRCPATH > [extensions] > mq = > [diff] > nodates = 1 > EOF $ hg init repo-auto $ cd repo-auto git=auto: regular patch creation: $ echo a > a $ hg add a $ hg qnew -d '0 0' -f adda $ cat .hg/patches/adda # HG changeset patch # Date 0 0 # Parent 0000000000000000000000000000000000000000 diff -r 000000000000 -r ef8dafc9fa4c a --- /dev/null +++ b/a @@ -0,0 +1,1 @@ +a git=auto: git patch creation with copy: $ hg cp a b $ hg qnew -d '0 0' -f copy $ cat .hg/patches/copy # HG changeset patch # Date 0 0 # Parent ef8dafc9fa4caff80f6e243eb0171bcd60c455b4 diff --git a/a b/b copy from a copy to b git=auto: git patch when using --git: $ echo regular > regular $ hg add regular $ hg qnew -d '0 0' --git -f git $ cat .hg/patches/git # HG changeset patch # Date 0 0 # Parent 99586d5f048c399e20f81cee41fbb3809c0e735d diff --git a/regular b/regular new file mode 100644 --- /dev/null +++ b/regular @@ -0,0 +1,1 @@ +regular git=auto: regular patch after qrefresh without --git: $ hg qrefresh -d '0 0' $ cat .hg/patches/git # HG changeset patch # Date 0 0 # Parent 99586d5f048c399e20f81cee41fbb3809c0e735d diff -r 99586d5f048c regular --- /dev/null +++ b/regular @@ -0,0 +1,1 @@ +regular $ cd .. $ hg init repo-keep $ cd repo-keep $ echo '[mq]' > .hg/hgrc $ echo 'git = KEEP' >> .hg/hgrc git=keep: git patch with --git: $ echo a > a $ hg add a $ hg qnew -d '0 0' -f --git git $ cat .hg/patches/git # HG changeset patch # Date 0 0 # Parent 0000000000000000000000000000000000000000 diff --git a/a b/a new file mode 100644 --- /dev/null +++ b/a @@ -0,0 +1,1 @@ +a git=keep: git patch after qrefresh without --git: $ echo a >> a $ hg qrefresh -d '0 0' $ cat .hg/patches/git # HG changeset patch # Date 0 0 # Parent 0000000000000000000000000000000000000000 diff --git a/a b/a new file mode 100644 --- /dev/null +++ b/a @@ -0,0 +1,2 @@ +a +a $ cd .. $ hg init repo-yes $ cd repo-yes $ echo '[mq]' > .hg/hgrc $ echo 'git = yes' >> .hg/hgrc git=yes: git patch: $ echo a > a $ hg add a $ hg qnew -d '0 0' -f git $ cat .hg/patches/git # HG changeset patch # Date 0 0 # Parent 0000000000000000000000000000000000000000 diff --git a/a b/a new file mode 100644 --- /dev/null +++ b/a @@ -0,0 +1,1 @@ +a git=yes: git patch after qrefresh: $ echo a >> a $ hg qrefresh -d '0 0' $ cat .hg/patches/git # HG changeset patch # Date 0 0 # Parent 0000000000000000000000000000000000000000 diff --git a/a b/a new file mode 100644 --- /dev/null +++ b/a @@ -0,0 +1,2 @@ +a +a $ cd .. $ hg init repo-no $ cd repo-no $ echo '[diff]' > .hg/hgrc $ echo 'git = True' >> .hg/hgrc $ echo '[mq]' > .hg/hgrc $ echo 'git = False' >> .hg/hgrc git=no: regular patch with copy: $ echo a > a $ hg add a $ hg qnew -d '0 0' -f adda $ hg cp a b $ hg qnew -d '0 0' -f regular $ cat .hg/patches/regular # HG changeset patch # Date 0 0 # Parent ef8dafc9fa4caff80f6e243eb0171bcd60c455b4 diff -r ef8dafc9fa4c -r a70404f79ba3 b --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +a git=no: regular patch after qrefresh with copy: $ hg cp a c $ hg qrefresh -d '0 0' $ cat .hg/patches/regular # HG changeset patch # Date 0 0 # Parent ef8dafc9fa4caff80f6e243eb0171bcd60c455b4 diff -r ef8dafc9fa4c b --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +a diff -r ef8dafc9fa4c c --- /dev/null +++ b/c @@ -0,0 +1,1 @@ +a $ cd .. mercurial-3.7.3/tests/test-subrepo-paths.t0000644000175000017500000000232312676531525020227 0ustar mpmmpm00000000000000 $ hg init outer $ cd outer $ echo '[paths]' >> .hg/hgrc $ echo 'default = http://example.net/' >> .hg/hgrc hg debugsub with no remapping $ echo 'sub = libfoo' > .hgsub $ hg add .hgsub $ hg debugsub path sub source libfoo revision hg debugsub with remapping $ echo '[subpaths]' >> .hg/hgrc $ printf 'http://example.net/lib(.*) = C:\\libs\\\\1-lib\\\n' >> .hg/hgrc $ hg debugsub path sub source C:\libs\foo-lib\ revision test cumulative remapping, the $HGRCPATH file is loaded first $ echo '[subpaths]' >> $HGRCPATH $ echo 'libfoo = libbar' >> $HGRCPATH $ hg debugsub path sub source C:\libs\bar-lib\ revision test absolute source path -- testing with a URL is important since standard os.path.join wont treat that as an absolute path $ echo 'abs = http://example.net/abs' > .hgsub $ hg debugsub path abs source http://example.net/abs revision $ echo 'abs = /abs' > .hgsub $ hg debugsub path abs source /abs revision test bad subpaths pattern $ cat > .hg/hgrc < [subpaths] > .* = \1 > EOF $ hg debugsub abort: bad subrepository pattern in $TESTTMP/outer/.hg/hgrc:2: invalid group reference (glob) [255] $ cd .. mercurial-3.7.3/tests/test-convert-mtn.t0000644000175000017500000002473012676531525017715 0ustar mpmmpm00000000000000#require mtn Monotone directory is called .monotone on *nix and monotone on Windows. #if windows $ mtndir=monotone #else $ mtndir=.monotone #endif $ echo "[extensions]" >> $HGRCPATH $ echo "convert=" >> $HGRCPATH Windows version of monotone home $ APPDATA=$HOME; export APPDATA tedious monotone keys configuration The /dev/null redirection is necessary under Windows, or it complains about home directory permissions $ mtn --quiet genkey test@selenic.com 1>/dev/null 2>&1 < passphrase > passphrase > EOF $ cat >> $HOME/$mtndir/monotonerc < function get_passphrase(keypair_id) > return "passphrase" > end > EOF create monotone repository $ mtn db init --db=repo.mtn $ mtn --db=repo.mtn --branch=com.selenic.test setup workingdir $ cd workingdir $ echo a > a $ mkdir dir $ echo b > dir/b $ echo d > dir/d $ $PYTHON -c 'file("bin", "wb").write("a\\x00b")' $ echo c > c $ mtn add a dir/b dir/d c bin mtn: adding 'a' to workspace manifest mtn: adding 'bin' to workspace manifest mtn: adding 'c' to workspace manifest mtn: adding 'dir' to workspace manifest mtn: adding 'dir/b' to workspace manifest mtn: adding 'dir/d' to workspace manifest $ mtn ci -m initialize mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision 0f6e5e4f2e7d2a8ef312408f57618abf026afd90 update monotone working directory $ mtn mv a dir/a mtn: skipping 'dir', already accounted for in workspace mtn: renaming 'a' to 'dir/a' in workspace manifest $ echo a >> dir/a $ echo b >> dir/b $ mtn drop c mtn: dropping 'c' from workspace manifest $ $PYTHON -c 'file("bin", "wb").write("b\\x00c")' $ mtn ci -m update1 mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision 51d0a982464573a2a2cf5ee2c9219c652aaebeff $ cd .. convert once $ hg convert -s mtn repo.mtn assuming destination repo.mtn-hg initializing destination repo.mtn-hg repository scanning source... sorting... converting... 1 initialize 0 update1 $ cd workingdir $ echo e > e $ mtn add e mtn: adding 'e' to workspace manifest $ mtn drop dir/b mtn: dropping 'dir/b' from workspace manifest $ mtn mv bin bin2 mtn: renaming 'bin' to 'bin2' in workspace manifest $ mtn ci -m 'update2 "with" quotes' mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision ebe58335d85d8cb176b6d0a12be04f5314b998da test directory move $ mkdir -p dir1/subdir1 $ mkdir -p dir1/subdir2_other $ echo file1 > dir1/subdir1/file1 $ echo file2 > dir1/subdir2_other/file1 $ mtn add dir1/subdir1/file1 dir1/subdir2_other/file1 mtn: adding 'dir1' to workspace manifest mtn: adding 'dir1/subdir1' to workspace manifest mtn: adding 'dir1/subdir1/file1' to workspace manifest mtn: adding 'dir1/subdir2_other' to workspace manifest mtn: adding 'dir1/subdir2_other/file1' to workspace manifest $ mtn ci -m createdir1 mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision a8d62bc04fee4d2936d28e98bbcc81686dd74306 $ mtn rename dir1/subdir1 dir1/subdir2 mtn: skipping 'dir1', already accounted for in workspace mtn: renaming 'dir1/subdir1' to 'dir1/subdir2' in workspace manifest $ mtn ci -m movedir1 mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision 2c3d241bbbfe538b1b51d910f5676407e3f4d3a6 test subdirectory move $ mtn mv dir dir2 mtn: renaming 'dir' to 'dir2' in workspace manifest $ echo newfile > dir2/newfile $ mtn drop dir2/d mtn: dropping 'dir2/d' from workspace manifest $ mtn add dir2/newfile mtn: adding 'dir2/newfile' to workspace manifest $ mtn ci -m movedir mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision fdb5a02dae8bfce3a79b3393680af471016e1b4c Test directory removal with empty directory $ mkdir dir2/dir $ mkdir dir2/dir/subdir $ echo f > dir2/dir/subdir/f $ mkdir dir2/dir/emptydir $ mtn add --quiet -R dir2/dir $ mtn ci -m emptydir mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision 8bbf76d717001d24964e4604739fdcd0f539fc88 $ mtn drop -R dir2/dir mtn: dropping 'dir2/dir/subdir/f' from workspace manifest mtn: dropping 'dir2/dir/subdir' from workspace manifest mtn: dropping 'dir2/dir/emptydir' from workspace manifest mtn: dropping 'dir2/dir' from workspace manifest $ mtn ci -m dropdirectory mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision 2323d4bc324e6c82628dc04d47a9fd32ad24e322 test directory and file move $ mkdir -p dir3/d1 $ echo a > dir3/a $ mtn add dir3/a dir3/d1 mtn: adding 'dir3' to workspace manifest mtn: adding 'dir3/a' to workspace manifest mtn: adding 'dir3/d1' to workspace manifest $ mtn ci -m dirfilemove mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision 47b192f720faa622f48c68d1eb075b26d405aa8b $ mtn mv dir3/a dir3/d1/a mtn: skipping 'dir3/d1', already accounted for in workspace mtn: renaming 'dir3/a' to 'dir3/d1/a' in workspace manifest $ mtn mv dir3/d1 dir3/d2 mtn: skipping 'dir3', already accounted for in workspace mtn: renaming 'dir3/d1' to 'dir3/d2' in workspace manifest $ mtn ci -m dirfilemove2 mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision 8b543a400d3ee7f6d4bb1835b9b9e3747c8cb632 test directory move into another directory move $ mkdir dir4 $ mkdir dir5 $ echo a > dir4/a $ mtn add dir4/a dir5 mtn: adding 'dir4' to workspace manifest mtn: adding 'dir4/a' to workspace manifest mtn: adding 'dir5' to workspace manifest $ mtn ci -m dirdirmove mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision 466e0b2afc7a55aa2b4ab2f57cb240bb6cd66fc7 $ mtn mv dir5 dir6 mtn: renaming 'dir5' to 'dir6' in workspace manifest $ mtn mv dir4 dir6/dir4 mtn: skipping 'dir6', already accounted for in workspace mtn: renaming 'dir4' to 'dir6/dir4' in workspace manifest $ mtn ci -m dirdirmove2 mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision 3d1f77ebad0c23a5d14911be3b670f990991b749 test diverging directory moves $ mkdir -p dir7/dir9/dir8 $ echo a > dir7/dir9/dir8/a $ echo b > dir7/dir9/b $ echo c > dir7/c $ mtn add -R dir7 mtn: adding 'dir7' to workspace manifest mtn: adding 'dir7/c' to workspace manifest mtn: adding 'dir7/dir9' to workspace manifest mtn: adding 'dir7/dir9/b' to workspace manifest mtn: adding 'dir7/dir9/dir8' to workspace manifest mtn: adding 'dir7/dir9/dir8/a' to workspace manifest $ mtn ci -m divergentdirmove mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision 08a08511f18b428d840199b062de90d0396bc2ed $ mtn mv dir7 dir7-2 mtn: renaming 'dir7' to 'dir7-2' in workspace manifest $ mtn mv dir7-2/dir9 dir9-2 mtn: renaming 'dir7-2/dir9' to 'dir9-2' in workspace manifest $ mtn mv dir9-2/dir8 dir8-2 mtn: renaming 'dir9-2/dir8' to 'dir8-2' in workspace manifest $ mtn ci -m divergentdirmove2 mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision 4a736634505795f17786fffdf2c9cbf5b11df6f6 test large file support (> 32kB) >>> fp = file('large-file', 'wb') >>> for x in xrange(10000): fp.write('%d\n' % x) >>> fp.close() $ md5sum.py large-file 5d6de8a95c3b6bf9e0ffb808ba5299c1 large-file $ mtn add large-file mtn: adding 'large-file' to workspace manifest $ mtn ci -m largefile mtn: beginning commit on branch 'com.selenic.test' mtn: committed revision f0a20fecd10dc4392d18fe69a03f1f4919d3387b test suspending (closing a branch) $ mtn suspend f0a20fecd10dc4392d18fe69a03f1f4919d3387b 2> /dev/null $ cd .. convert incrementally $ hg convert -s mtn repo.mtn assuming destination repo.mtn-hg scanning source... sorting... converting... 12 update2 "with" quotes 11 createdir1 10 movedir1 9 movedir 8 emptydir 7 dropdirectory 6 dirfilemove 5 dirfilemove2 4 dirdirmove 3 dirdirmove2 2 divergentdirmove 1 divergentdirmove2 0 largefile $ glog() > { > hg log -G --template '{rev} "{desc|firstline}" files: {files}\n' "$@" > } $ cd repo.mtn-hg $ hg up -C 12 files updated, 0 files merged, 0 files removed, 0 files unresolved $ glog @ 14 "largefile" files: large-file | o 13 "divergentdirmove2" files: dir7-2/c dir7/c dir7/dir9/b dir7/dir9/dir8/a dir8-2/a dir9-2/b | o 12 "divergentdirmove" files: dir7/c dir7/dir9/b dir7/dir9/dir8/a | o 11 "dirdirmove2" files: dir4/a dir6/dir4/a | o 10 "dirdirmove" files: dir4/a | o 9 "dirfilemove2" files: dir3/a dir3/d2/a | o 8 "dirfilemove" files: dir3/a | o 7 "dropdirectory" files: dir2/dir/subdir/f | o 6 "emptydir" files: dir2/dir/subdir/f | o 5 "movedir" files: dir/a dir/d dir2/a dir2/newfile | o 4 "movedir1" files: dir1/subdir1/file1 dir1/subdir2/file1 | o 3 "createdir1" files: dir1/subdir1/file1 dir1/subdir2_other/file1 | o 2 "update2 "with" quotes" files: bin bin2 dir/b e | o 1 "update1" files: a bin c dir/a dir/b | o 0 "initialize" files: a bin c dir/b dir/d manifest $ hg manifest bin2 dir1/subdir2/file1 dir1/subdir2_other/file1 dir2/a dir2/newfile dir3/d2/a dir6/dir4/a dir7-2/c dir8-2/a dir9-2/b e large-file contents $ cat dir2/a a a $ test -d dir2/dir && echo 'removed dir2/dir is still there!' [1] file move $ hg log -v -C -r 1 | grep copies copies: dir/a (a) check directory move $ hg manifest -r 4 bin2 dir/a dir/d dir1/subdir2/file1 dir1/subdir2_other/file1 e $ test -d dir1/subdir2 || echo 'new dir1/subdir2 does not exist!' $ test -d dir1/subdir1 && echo 'renamed dir1/subdir1 is still there!' [1] $ hg log -v -C -r 4 | grep copies copies: dir1/subdir2/file1 (dir1/subdir1/file1) check file remove with directory move $ hg manifest -r 5 bin2 dir1/subdir2/file1 dir1/subdir2_other/file1 dir2/a dir2/newfile e check file move with directory move $ hg manifest -r 9 bin2 dir1/subdir2/file1 dir1/subdir2_other/file1 dir2/a dir2/newfile dir3/d2/a e check file directory directory move $ hg manifest -r 11 bin2 dir1/subdir2/file1 dir1/subdir2_other/file1 dir2/a dir2/newfile dir3/d2/a dir6/dir4/a e check divergent directory moves $ hg manifest -r 13 bin2 dir1/subdir2/file1 dir1/subdir2_other/file1 dir2/a dir2/newfile dir3/d2/a dir6/dir4/a dir7-2/c dir8-2/a dir9-2/b e test large file support (> 32kB) $ md5sum.py large-file 5d6de8a95c3b6bf9e0ffb808ba5299c1 large-file check branch closing $ hg branches -a $ hg branches -c com.selenic.test 14:* (closed) (glob) mercurial-3.7.3/tests/test-batching.py.out0000644000175000017500000000075312676531525020212 0ustar mpmmpm00000000000000 == Local Ready. Un and Deux Eins und Zwei One and Two Nope Eins und Zwei Hello, John Smith Ready. Uno und Due == Remote Ready. REQ: foo?one=Vo&two=Efvy -> Vo!boe!Efvy Un and Deux REQ: bar?b=Fjot&a=[xfj -> Fjot!voe![xfj Eins und Zwei REQ: batch?cmds=foo:one=Pof,two=Uxp;bar:b=Fjot,a=[xfj -> Pof!boe!Uxp;Fjot!voe![xfj REQ: greet?name=Kpio!Tnjui -> Ifmmp-!Kpio!Tnjui REQ: batch?cmds=bar:b=Vop,a=Evf -> Vop!voe!Evf One and Two Nope Eins und Zwei Hello, John Smith Ready. Uno und Due mercurial-3.7.3/tests/killdaemons.py0000755000175000017500000000604312676531525017153 0ustar mpmmpm00000000000000#!/usr/bin/env python import os, sys, time, errno, signal if os.name =='nt': import ctypes def _check(ret, expectederr=None): if ret == 0: winerrno = ctypes.GetLastError() if winerrno == expectederr: return True raise ctypes.WinError(winerrno) def kill(pid, logfn, tryhard=True): logfn('# Killing daemon process %d' % pid) PROCESS_TERMINATE = 1 PROCESS_QUERY_INFORMATION = 0x400 SYNCHRONIZE = 0x00100000 WAIT_OBJECT_0 = 0 WAIT_TIMEOUT = 258 handle = ctypes.windll.kernel32.OpenProcess( PROCESS_TERMINATE|SYNCHRONIZE|PROCESS_QUERY_INFORMATION, False, pid) if handle == 0: _check(0, 87) # err 87 when process not found return # process not found, already finished try: r = ctypes.windll.kernel32.WaitForSingleObject(handle, 100) if r == WAIT_OBJECT_0: pass # terminated, but process handle still available elif r == WAIT_TIMEOUT: _check(ctypes.windll.kernel32.TerminateProcess(handle, -1)) else: _check(r) # TODO?: forcefully kill when timeout # and ?shorter waiting time? when tryhard==True r = ctypes.windll.kernel32.WaitForSingleObject(handle, 100) # timeout = 100 ms if r == WAIT_OBJECT_0: pass # process is terminated elif r == WAIT_TIMEOUT: logfn('# Daemon process %d is stuck') else: _check(r) # any error except: #re-raises ctypes.windll.kernel32.CloseHandle(handle) # no _check, keep error raise _check(ctypes.windll.kernel32.CloseHandle(handle)) else: def kill(pid, logfn, tryhard=True): try: os.kill(pid, 0) logfn('# Killing daemon process %d' % pid) os.kill(pid, signal.SIGTERM) if tryhard: for i in range(10): time.sleep(0.05) os.kill(pid, 0) else: time.sleep(0.1) os.kill(pid, 0) logfn('# Daemon process %d is stuck - really killing it' % pid) os.kill(pid, signal.SIGKILL) except OSError as err: if err.errno != errno.ESRCH: raise def killdaemons(pidfile, tryhard=True, remove=False, logfn=None): if not logfn: logfn = lambda s: s # Kill off any leftover daemon processes try: fp = open(pidfile) for line in fp: try: pid = int(line) except ValueError: continue kill(pid, logfn, tryhard) fp.close() if remove: os.unlink(pidfile) except IOError: pass if __name__ == '__main__': if len(sys.argv) > 1: path, = sys.argv[1:] else: path = os.environ["DAEMON_PIDS"] killdaemons(path) mercurial-3.7.3/tests/test-mv-cp-st-diff.t0000644000175000017500000005215512676531525020017 0ustar mpmmpm00000000000000 $ add() > { > echo $2 >> $1 > } $ hg init t $ cd t set up a boring main branch $ add a a $ hg add a $ mkdir x $ add x/x x $ hg add x/x $ hg ci -m0 $ add a m1 $ hg ci -m1 $ add a m2 $ add x/y y1 $ hg add x/y $ hg ci -m2 $ cd .. $ show() > { > echo "# $2:" > echo > echo "% hg st -C $1" > hg st -C $1 > echo > echo "% hg diff --git $1" > hg diff --git $1 > echo > } $ count=0 make a new branch and get diff/status output $1 - first commit $2 - second commit $3 - working dir action $ tb() > { > hg clone -q t t2 ; cd t2 > hg co -q -C 0 > > echo % add a $count > add a $count > count=`expr $count + 1` > echo % hg ci -m "t0" > hg ci -m "t0" > echo % $1 > $1 > echo % hg ci -m "t1" > hg ci -m "t1" > echo % $2 > $2 > echo % hg ci -m "t2" > hg ci -m "t2" > echo % $3 > $3 > echo > show "" "working to parent" > show "--rev 0" "working to root" > show "--rev 2" "working to branch" > show "--rev 0 --rev ." "root to parent" > show "--rev . --rev 0" "parent to root" > show "--rev 2 --rev ." "branch to parent" > show "--rev . --rev 2" "parent to branch" > echo > cd .. > rm -rf t2 > } rename in working dir $ tb "add a a1" "add a a2" "hg mv a b" % add a 0 % hg ci -m t0 created new head % add a a1 % hg ci -m t1 % add a a2 % hg ci -m t2 % hg mv a b # working to parent: % hg st -C A b a R a % hg diff --git diff --git a/a b/b rename from a rename to b # working to root: % hg st -C --rev 0 A b a R a % hg diff --git --rev 0 diff --git a/a b/b rename from a rename to b --- a/a +++ b/b @@ -1,1 +1,4 @@ a +0 +a1 +a2 # working to branch: % hg st -C --rev 2 A b a R a R x/y % hg diff --git --rev 2 diff --git a/a b/b rename from a rename to b --- a/a +++ b/b @@ -1,3 +1,4 @@ a -m1 -m2 +0 +a1 +a2 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # root to parent: % hg st -C --rev 0 --rev . M a % hg diff --git --rev 0 --rev . diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,4 @@ a +0 +a1 +a2 # parent to root: % hg st -C --rev . --rev 0 M a % hg diff --git --rev . --rev 0 diff --git a/a b/a --- a/a +++ b/a @@ -1,4 +1,1 @@ a -0 -a1 -a2 # branch to parent: % hg st -C --rev 2 --rev . M a R x/y % hg diff --git --rev 2 --rev . diff --git a/a b/a --- a/a +++ b/a @@ -1,3 +1,4 @@ a -m1 -m2 +0 +a1 +a2 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # parent to branch: % hg st -C --rev . --rev 2 M a A x/y % hg diff --git --rev . --rev 2 diff --git a/a b/a --- a/a +++ b/a @@ -1,4 +1,3 @@ a -0 -a1 -a2 +m1 +m2 diff --git a/x/y b/x/y new file mode 100644 --- /dev/null +++ b/x/y @@ -0,0 +1,1 @@ +y1 copy in working dir $ tb "add a a1" "add a a2" "hg cp a b" % add a 1 % hg ci -m t0 created new head % add a a1 % hg ci -m t1 % add a a2 % hg ci -m t2 % hg cp a b # working to parent: % hg st -C A b a % hg diff --git diff --git a/a b/b copy from a copy to b # working to root: % hg st -C --rev 0 M a A b a % hg diff --git --rev 0 diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,4 @@ a +1 +a1 +a2 diff --git a/a b/b copy from a copy to b --- a/a +++ b/b @@ -1,1 +1,4 @@ a +1 +a1 +a2 # working to branch: % hg st -C --rev 2 M a A b a R x/y % hg diff --git --rev 2 diff --git a/a b/a --- a/a +++ b/a @@ -1,3 +1,4 @@ a -m1 -m2 +1 +a1 +a2 diff --git a/a b/b copy from a copy to b --- a/a +++ b/b @@ -1,3 +1,4 @@ a -m1 -m2 +1 +a1 +a2 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # root to parent: % hg st -C --rev 0 --rev . M a % hg diff --git --rev 0 --rev . diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,4 @@ a +1 +a1 +a2 # parent to root: % hg st -C --rev . --rev 0 M a % hg diff --git --rev . --rev 0 diff --git a/a b/a --- a/a +++ b/a @@ -1,4 +1,1 @@ a -1 -a1 -a2 # branch to parent: % hg st -C --rev 2 --rev . M a R x/y % hg diff --git --rev 2 --rev . diff --git a/a b/a --- a/a +++ b/a @@ -1,3 +1,4 @@ a -m1 -m2 +1 +a1 +a2 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # parent to branch: % hg st -C --rev . --rev 2 M a A x/y % hg diff --git --rev . --rev 2 diff --git a/a b/a --- a/a +++ b/a @@ -1,4 +1,3 @@ a -1 -a1 -a2 +m1 +m2 diff --git a/x/y b/x/y new file mode 100644 --- /dev/null +++ b/x/y @@ -0,0 +1,1 @@ +y1 single rename $ tb "hg mv a b" "add b b1" "add b w" % add a 2 % hg ci -m t0 created new head % hg mv a b % hg ci -m t1 % add b b1 % hg ci -m t2 % add b w # working to parent: % hg st -C M b % hg diff --git diff --git a/b b/b --- a/b +++ b/b @@ -1,3 +1,4 @@ a 2 b1 +w # working to root: % hg st -C --rev 0 A b a R a % hg diff --git --rev 0 diff --git a/a b/b rename from a rename to b --- a/a +++ b/b @@ -1,1 +1,4 @@ a +2 +b1 +w # working to branch: % hg st -C --rev 2 A b a R a R x/y % hg diff --git --rev 2 diff --git a/a b/b rename from a rename to b --- a/a +++ b/b @@ -1,3 +1,4 @@ a -m1 -m2 +2 +b1 +w diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # root to parent: % hg st -C --rev 0 --rev . A b a R a % hg diff --git --rev 0 --rev . diff --git a/a b/b rename from a rename to b --- a/a +++ b/b @@ -1,1 +1,3 @@ a +2 +b1 # parent to root: % hg st -C --rev . --rev 0 A a b R b % hg diff --git --rev . --rev 0 diff --git a/b b/a rename from b rename to a --- a/b +++ b/a @@ -1,3 +1,1 @@ a -2 -b1 # branch to parent: % hg st -C --rev 2 --rev . A b a R a R x/y % hg diff --git --rev 2 --rev . diff --git a/a b/b rename from a rename to b --- a/a +++ b/b @@ -1,3 +1,3 @@ a -m1 -m2 +2 +b1 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # parent to branch: % hg st -C --rev . --rev 2 A a b A x/y R b % hg diff --git --rev . --rev 2 diff --git a/b b/a rename from b rename to a --- a/b +++ b/a @@ -1,3 +1,3 @@ a -2 -b1 +m1 +m2 diff --git a/x/y b/x/y new file mode 100644 --- /dev/null +++ b/x/y @@ -0,0 +1,1 @@ +y1 single copy $ tb "hg cp a b" "add b b1" "add a w" % add a 3 % hg ci -m t0 created new head % hg cp a b % hg ci -m t1 % add b b1 % hg ci -m t2 % add a w # working to parent: % hg st -C M a % hg diff --git diff --git a/a b/a --- a/a +++ b/a @@ -1,2 +1,3 @@ a 3 +w # working to root: % hg st -C --rev 0 M a A b a % hg diff --git --rev 0 diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,3 @@ a +3 +w diff --git a/a b/b copy from a copy to b --- a/a +++ b/b @@ -1,1 +1,3 @@ a +3 +b1 # working to branch: % hg st -C --rev 2 M a A b a R x/y % hg diff --git --rev 2 diff --git a/a b/a --- a/a +++ b/a @@ -1,3 +1,3 @@ a -m1 -m2 +3 +w diff --git a/a b/b copy from a copy to b --- a/a +++ b/b @@ -1,3 +1,3 @@ a -m1 -m2 +3 +b1 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # root to parent: % hg st -C --rev 0 --rev . M a A b a % hg diff --git --rev 0 --rev . diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,2 @@ a +3 diff --git a/a b/b copy from a copy to b --- a/a +++ b/b @@ -1,1 +1,3 @@ a +3 +b1 # parent to root: % hg st -C --rev . --rev 0 M a R b % hg diff --git --rev . --rev 0 diff --git a/a b/a --- a/a +++ b/a @@ -1,2 +1,1 @@ a -3 diff --git a/b b/b deleted file mode 100644 --- a/b +++ /dev/null @@ -1,3 +0,0 @@ -a -3 -b1 # branch to parent: % hg st -C --rev 2 --rev . M a A b a R x/y % hg diff --git --rev 2 --rev . diff --git a/a b/a --- a/a +++ b/a @@ -1,3 +1,2 @@ a -m1 -m2 +3 diff --git a/a b/b copy from a copy to b --- a/a +++ b/b @@ -1,3 +1,3 @@ a -m1 -m2 +3 +b1 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # parent to branch: % hg st -C --rev . --rev 2 M a A x/y R b % hg diff --git --rev . --rev 2 diff --git a/a b/a --- a/a +++ b/a @@ -1,2 +1,3 @@ a -3 +m1 +m2 diff --git a/b b/b deleted file mode 100644 --- a/b +++ /dev/null @@ -1,3 +0,0 @@ -a -3 -b1 diff --git a/x/y b/x/y new file mode 100644 --- /dev/null +++ b/x/y @@ -0,0 +1,1 @@ +y1 rename chain $ tb "hg mv a b" "hg mv b c" "hg mv c d" % add a 4 % hg ci -m t0 created new head % hg mv a b % hg ci -m t1 % hg mv b c % hg ci -m t2 % hg mv c d # working to parent: % hg st -C A d c R c % hg diff --git diff --git a/c b/d rename from c rename to d # working to root: % hg st -C --rev 0 A d a R a % hg diff --git --rev 0 diff --git a/a b/d rename from a rename to d --- a/a +++ b/d @@ -1,1 +1,2 @@ a +4 # working to branch: % hg st -C --rev 2 A d a R a R x/y % hg diff --git --rev 2 diff --git a/a b/d rename from a rename to d --- a/a +++ b/d @@ -1,3 +1,2 @@ a -m1 -m2 +4 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # root to parent: % hg st -C --rev 0 --rev . A c a R a % hg diff --git --rev 0 --rev . diff --git a/a b/c rename from a rename to c --- a/a +++ b/c @@ -1,1 +1,2 @@ a +4 # parent to root: % hg st -C --rev . --rev 0 A a c R c % hg diff --git --rev . --rev 0 diff --git a/c b/a rename from c rename to a --- a/c +++ b/a @@ -1,2 +1,1 @@ a -4 # branch to parent: % hg st -C --rev 2 --rev . A c a R a R x/y % hg diff --git --rev 2 --rev . diff --git a/a b/c rename from a rename to c --- a/a +++ b/c @@ -1,3 +1,2 @@ a -m1 -m2 +4 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # parent to branch: % hg st -C --rev . --rev 2 A a c A x/y R c % hg diff --git --rev . --rev 2 diff --git a/c b/a rename from c rename to a --- a/c +++ b/a @@ -1,2 +1,3 @@ a -4 +m1 +m2 diff --git a/x/y b/x/y new file mode 100644 --- /dev/null +++ b/x/y @@ -0,0 +1,1 @@ +y1 copy chain $ tb "hg cp a b" "hg cp b c" "hg cp c d" % add a 5 % hg ci -m t0 created new head % hg cp a b % hg ci -m t1 % hg cp b c % hg ci -m t2 % hg cp c d # working to parent: % hg st -C A d c % hg diff --git diff --git a/c b/d copy from c copy to d # working to root: % hg st -C --rev 0 M a A b a A c a A d a % hg diff --git --rev 0 diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,2 @@ a +5 diff --git a/a b/b copy from a copy to b --- a/a +++ b/b @@ -1,1 +1,2 @@ a +5 diff --git a/a b/c copy from a copy to c --- a/a +++ b/c @@ -1,1 +1,2 @@ a +5 diff --git a/a b/d copy from a copy to d --- a/a +++ b/d @@ -1,1 +1,2 @@ a +5 # working to branch: % hg st -C --rev 2 M a A b a A c a A d a R x/y % hg diff --git --rev 2 diff --git a/a b/a --- a/a +++ b/a @@ -1,3 +1,2 @@ a -m1 -m2 +5 diff --git a/a b/b copy from a copy to b --- a/a +++ b/b @@ -1,3 +1,2 @@ a -m1 -m2 +5 diff --git a/a b/c copy from a copy to c --- a/a +++ b/c @@ -1,3 +1,2 @@ a -m1 -m2 +5 diff --git a/a b/d copy from a copy to d --- a/a +++ b/d @@ -1,3 +1,2 @@ a -m1 -m2 +5 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # root to parent: % hg st -C --rev 0 --rev . M a A b a A c a % hg diff --git --rev 0 --rev . diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,2 @@ a +5 diff --git a/a b/b copy from a copy to b --- a/a +++ b/b @@ -1,1 +1,2 @@ a +5 diff --git a/a b/c copy from a copy to c --- a/a +++ b/c @@ -1,1 +1,2 @@ a +5 # parent to root: % hg st -C --rev . --rev 0 M a R b R c % hg diff --git --rev . --rev 0 diff --git a/a b/a --- a/a +++ b/a @@ -1,2 +1,1 @@ a -5 diff --git a/b b/b deleted file mode 100644 --- a/b +++ /dev/null @@ -1,2 +0,0 @@ -a -5 diff --git a/c b/c deleted file mode 100644 --- a/c +++ /dev/null @@ -1,2 +0,0 @@ -a -5 # branch to parent: % hg st -C --rev 2 --rev . M a A b a A c a R x/y % hg diff --git --rev 2 --rev . diff --git a/a b/a --- a/a +++ b/a @@ -1,3 +1,2 @@ a -m1 -m2 +5 diff --git a/a b/b copy from a copy to b --- a/a +++ b/b @@ -1,3 +1,2 @@ a -m1 -m2 +5 diff --git a/a b/c copy from a copy to c --- a/a +++ b/c @@ -1,3 +1,2 @@ a -m1 -m2 +5 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # parent to branch: % hg st -C --rev . --rev 2 M a A x/y R b R c % hg diff --git --rev . --rev 2 diff --git a/a b/a --- a/a +++ b/a @@ -1,2 +1,3 @@ a -5 +m1 +m2 diff --git a/b b/b deleted file mode 100644 --- a/b +++ /dev/null @@ -1,2 +0,0 @@ -a -5 diff --git a/c b/c deleted file mode 100644 --- a/c +++ /dev/null @@ -1,2 +0,0 @@ -a -5 diff --git a/x/y b/x/y new file mode 100644 --- /dev/null +++ b/x/y @@ -0,0 +1,1 @@ +y1 circular rename $ tb "add a a1" "hg mv a b" "hg mv b a" % add a 6 % hg ci -m t0 created new head % add a a1 % hg ci -m t1 % hg mv a b % hg ci -m t2 % hg mv b a # working to parent: % hg st -C A a b R b % hg diff --git diff --git a/b b/a rename from b rename to a # working to root: % hg st -C --rev 0 M a % hg diff --git --rev 0 diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,3 @@ a +6 +a1 # working to branch: % hg st -C --rev 2 M a R x/y % hg diff --git --rev 2 diff --git a/a b/a --- a/a +++ b/a @@ -1,3 +1,3 @@ a -m1 -m2 +6 +a1 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # root to parent: % hg st -C --rev 0 --rev . A b a R a % hg diff --git --rev 0 --rev . diff --git a/a b/b rename from a rename to b --- a/a +++ b/b @@ -1,1 +1,3 @@ a +6 +a1 # parent to root: % hg st -C --rev . --rev 0 A a b R b % hg diff --git --rev . --rev 0 diff --git a/b b/a rename from b rename to a --- a/b +++ b/a @@ -1,3 +1,1 @@ a -6 -a1 # branch to parent: % hg st -C --rev 2 --rev . A b a R a R x/y % hg diff --git --rev 2 --rev . diff --git a/a b/b rename from a rename to b --- a/a +++ b/b @@ -1,3 +1,3 @@ a -m1 -m2 +6 +a1 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 # parent to branch: % hg st -C --rev . --rev 2 A a b A x/y R b % hg diff --git --rev . --rev 2 diff --git a/b b/a rename from b rename to a --- a/b +++ b/a @@ -1,3 +1,3 @@ a -6 -a1 +m1 +m2 diff --git a/x/y b/x/y new file mode 100644 --- /dev/null +++ b/x/y @@ -0,0 +1,1 @@ +y1 directory move $ tb "hg mv x y" "add y/x x1" "add y/x x2" % add a 7 % hg ci -m t0 created new head % hg mv x y moving x/x to y/x (glob) % hg ci -m t1 % add y/x x1 % hg ci -m t2 % add y/x x2 # working to parent: % hg st -C M y/x % hg diff --git diff --git a/y/x b/y/x --- a/y/x +++ b/y/x @@ -1,2 +1,3 @@ x x1 +x2 # working to root: % hg st -C --rev 0 M a A y/x x/x R x/x % hg diff --git --rev 0 diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,2 @@ a +7 diff --git a/x/x b/y/x rename from x/x rename to y/x --- a/x/x +++ b/y/x @@ -1,1 +1,3 @@ x +x1 +x2 # working to branch: % hg st -C --rev 2 M a A y/x x/x R x/x R x/y % hg diff --git --rev 2 diff --git a/a b/a --- a/a +++ b/a @@ -1,3 +1,2 @@ a -m1 -m2 +7 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 diff --git a/x/x b/y/x rename from x/x rename to y/x --- a/x/x +++ b/y/x @@ -1,1 +1,3 @@ x +x1 +x2 # root to parent: % hg st -C --rev 0 --rev . M a A y/x x/x R x/x % hg diff --git --rev 0 --rev . diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,2 @@ a +7 diff --git a/x/x b/y/x rename from x/x rename to y/x --- a/x/x +++ b/y/x @@ -1,1 +1,2 @@ x +x1 # parent to root: % hg st -C --rev . --rev 0 M a A x/x y/x R y/x % hg diff --git --rev . --rev 0 diff --git a/a b/a --- a/a +++ b/a @@ -1,2 +1,1 @@ a -7 diff --git a/y/x b/x/x rename from y/x rename to x/x --- a/y/x +++ b/x/x @@ -1,2 +1,1 @@ x -x1 # branch to parent: % hg st -C --rev 2 --rev . M a A y/x x/x R x/x R x/y % hg diff --git --rev 2 --rev . diff --git a/a b/a --- a/a +++ b/a @@ -1,3 +1,2 @@ a -m1 -m2 +7 diff --git a/x/y b/x/y deleted file mode 100644 --- a/x/y +++ /dev/null @@ -1,1 +0,0 @@ -y1 diff --git a/x/x b/y/x rename from x/x rename to y/x --- a/x/x +++ b/y/x @@ -1,1 +1,2 @@ x +x1 # parent to branch: % hg st -C --rev . --rev 2 M a A x/x y/x A x/y R y/x % hg diff --git --rev . --rev 2 diff --git a/a b/a --- a/a +++ b/a @@ -1,2 +1,3 @@ a -7 +m1 +m2 diff --git a/y/x b/x/x rename from y/x rename to x/x --- a/y/x +++ b/x/x @@ -1,2 +1,1 @@ x -x1 diff --git a/x/y b/x/y new file mode 100644 --- /dev/null +++ b/x/y @@ -0,0 +1,1 @@ +y1 Cannot implement unrelated branch with tb testing copies with unrelated branch $ hg init unrelated $ cd unrelated $ echo a >> a $ hg ci -Am adda adding a $ hg mv a b $ hg ci -m movea $ hg up -C null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo a >> a $ hg ci -Am addunrelateda adding a created new head unrelated branch diff $ hg diff --git -r 2 -r 1 diff --git a/a b/a deleted file mode 100644 --- a/a +++ /dev/null @@ -1,1 +0,0 @@ -a diff --git a/b b/b new file mode 100644 --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +a $ cd .. test for case where we didn't look sufficiently far back to find rename ancestor $ hg init diffstop $ cd diffstop $ echo > f $ hg ci -qAmf $ hg mv f g $ hg ci -m'f->g' $ hg up -qr0 $ touch x $ hg ci -qAmx $ echo f > f $ hg ci -qmf=f $ hg merge -q $ hg ci -mmerge $ hg log -G --template '{rev} {desc}' @ 4 merge |\ | o 3 f=f | | | o 2 x | | o | 1 f->g |/ o 0 f $ hg diff --git -r 2 diff --git a/f b/g rename from f rename to g --- a/f +++ b/g @@ -1,1 +1,1 @@ - +f $ cd .. Additional tricky linkrev case ------------------------------ If the first file revision after the diff base has a linkrev pointing to a changeset on another branch with a revision lower that the diff base, we can jump past the copy detection limit and fail to detect the rename. $ hg init diffstoplinkrev $ cd diffstoplinkrev $ touch f $ hg ci -Aqm 'empty f' Make a simple change $ echo change > f $ hg ci -m 'change f' Make a second branch, we use a named branch to create a simple commit that does not touch f. $ hg up -qr 'desc(empty)' $ hg branch -q dev $ hg ci -Aqm dev Graft the initial change, as f was untouched, we reuse the same entry and the linkrev point to the older branch. $ hg graft -q 'desc(change)' Make a rename because we want to track renames. It is also important that the faulty linkrev is not the "start" commit to ensure the linkrev will be used. $ hg mv f renamed $ hg ci -m renamed $ hg log -G -T '{rev} {desc}' @ 4 renamed | o 3 change f | o 2 dev | | o 1 change f |/ o 0 empty f The copy tracking should still reach rev 2 (branch creation). accessing the parent of 4 (renamed) should not jump use to revision 1. $ hg diff --git -r 'desc(dev)' -r . diff --git a/f b/renamed rename from f rename to renamed --- a/f +++ b/renamed @@ -0,0 +1,1 @@ +change $ cd .. mercurial-3.7.3/tests/test-status.t0000644000175000017500000002204112676531525016755 0ustar mpmmpm00000000000000 $ hg init repo1 $ cd repo1 $ mkdir a b a/1 b/1 b/2 $ touch in_root a/in_a b/in_b a/1/in_a_1 b/1/in_b_1 b/2/in_b_2 hg status in repo root: $ hg status ? a/1/in_a_1 ? a/in_a ? b/1/in_b_1 ? b/2/in_b_2 ? b/in_b ? in_root hg status . in repo root: $ hg status . ? a/1/in_a_1 ? a/in_a ? b/1/in_b_1 ? b/2/in_b_2 ? b/in_b ? in_root $ hg status --cwd a ? a/1/in_a_1 ? a/in_a ? b/1/in_b_1 ? b/2/in_b_2 ? b/in_b ? in_root $ hg status --cwd a . ? 1/in_a_1 ? in_a $ hg status --cwd a .. ? 1/in_a_1 ? in_a ? ../b/1/in_b_1 ? ../b/2/in_b_2 ? ../b/in_b ? ../in_root $ hg status --cwd b ? a/1/in_a_1 ? a/in_a ? b/1/in_b_1 ? b/2/in_b_2 ? b/in_b ? in_root $ hg status --cwd b . ? 1/in_b_1 ? 2/in_b_2 ? in_b $ hg status --cwd b .. ? ../a/1/in_a_1 ? ../a/in_a ? 1/in_b_1 ? 2/in_b_2 ? in_b ? ../in_root $ hg status --cwd a/1 ? a/1/in_a_1 ? a/in_a ? b/1/in_b_1 ? b/2/in_b_2 ? b/in_b ? in_root $ hg status --cwd a/1 . ? in_a_1 $ hg status --cwd a/1 .. ? in_a_1 ? ../in_a $ hg status --cwd b/1 ? a/1/in_a_1 ? a/in_a ? b/1/in_b_1 ? b/2/in_b_2 ? b/in_b ? in_root $ hg status --cwd b/1 . ? in_b_1 $ hg status --cwd b/1 .. ? in_b_1 ? ../2/in_b_2 ? ../in_b $ hg status --cwd b/2 ? a/1/in_a_1 ? a/in_a ? b/1/in_b_1 ? b/2/in_b_2 ? b/in_b ? in_root $ hg status --cwd b/2 . ? in_b_2 $ hg status --cwd b/2 .. ? ../1/in_b_1 ? in_b_2 ? ../in_b combining patterns with root and patterns without a root works $ hg st a/in_a re:.*b$ ? a/in_a ? b/in_b $ cd .. $ hg init repo2 $ cd repo2 $ touch modified removed deleted ignored $ echo "^ignored$" > .hgignore $ hg ci -A -m 'initial checkin' adding .hgignore adding deleted adding modified adding removed $ touch modified added unknown ignored $ hg add added $ hg remove removed $ rm deleted hg status: $ hg status A added R removed ! deleted ? unknown hg status modified added removed deleted unknown never-existed ignored: $ hg status modified added removed deleted unknown never-existed ignored never-existed: * (glob) A added R removed ! deleted ? unknown $ hg copy modified copied hg status -C: $ hg status -C A added A copied modified R removed ! deleted ? unknown hg status -A: $ hg status -A A added A copied modified R removed ! deleted ? unknown I ignored C .hgignore C modified $ hg status -A -Tjson [ { "path": "added", "status": "A" }, { "copy": "modified", "path": "copied", "status": "A" }, { "path": "removed", "status": "R" }, { "path": "deleted", "status": "!" }, { "path": "unknown", "status": "?" }, { "path": "ignored", "status": "I" }, { "path": ".hgignore", "status": "C" }, { "path": "modified", "status": "C" } ] $ hg status -A -Tpickle > pickle >>> import pickle >>> print sorted((x['status'], x['path']) for x in pickle.load(open("pickle"))) [('!', 'deleted'), ('?', 'pickle'), ('?', 'unknown'), ('A', 'added'), ('A', 'copied'), ('C', '.hgignore'), ('C', 'modified'), ('I', 'ignored'), ('R', 'removed')] $ rm pickle $ echo "^ignoreddir$" > .hgignore $ mkdir ignoreddir $ touch ignoreddir/file Test templater support: $ hg status -AT "[{status}]\t{if(copy, '{copy} -> ')}{path}\n" [M] .hgignore [A] added [A] modified -> copied [R] removed [!] deleted [?] ignored [?] unknown [I] ignoreddir/file [C] modified $ hg status -AT default M .hgignore A added A copied modified R removed ! deleted ? ignored ? unknown I ignoreddir/file C modified $ hg status -T compact abort: "status" not in template map [255] hg status ignoreddir/file: $ hg status ignoreddir/file hg status -i ignoreddir/file: $ hg status -i ignoreddir/file I ignoreddir/file $ cd .. Check 'status -q' and some combinations $ hg init repo3 $ cd repo3 $ touch modified removed deleted ignored $ echo "^ignored$" > .hgignore $ hg commit -A -m 'initial checkin' adding .hgignore adding deleted adding modified adding removed $ touch added unknown ignored $ hg add added $ echo "test" >> modified $ hg remove removed $ rm deleted $ hg copy modified copied Specify working directory revision explicitly, that should be the same as "hg status" $ hg status --change "wdir()" M modified A added A copied R removed ! deleted ? unknown Run status with 2 different flags. Check if result is the same or different. If result is not as expected, raise error $ assert() { > hg status $1 > ../a > hg status $2 > ../b > if diff ../a ../b > /dev/null; then > out=0 > else > out=1 > fi > if [ $3 -eq 0 ]; then > df="same" > else > df="different" > fi > if [ $out -ne $3 ]; then > echo "Error on $1 and $2, should be $df." > fi > } Assert flag1 flag2 [0-same | 1-different] $ assert "-q" "-mard" 0 $ assert "-A" "-marduicC" 0 $ assert "-qA" "-mardcC" 0 $ assert "-qAui" "-A" 0 $ assert "-qAu" "-marducC" 0 $ assert "-qAi" "-mardicC" 0 $ assert "-qu" "-u" 0 $ assert "-q" "-u" 1 $ assert "-m" "-a" 1 $ assert "-r" "-d" 1 $ cd .. $ hg init repo4 $ cd repo4 $ touch modified removed deleted $ hg ci -q -A -m 'initial checkin' $ touch added unknown $ hg add added $ hg remove removed $ rm deleted $ echo x > modified $ hg copy modified copied $ hg ci -m 'test checkin' -d "1000001 0" $ rm * $ touch unrelated $ hg ci -q -A -m 'unrelated checkin' -d "1000002 0" hg status --change 1: $ hg status --change 1 M modified A added A copied R removed hg status --change 1 unrelated: $ hg status --change 1 unrelated hg status -C --change 1 added modified copied removed deleted: $ hg status -C --change 1 added modified copied removed deleted M modified A added A copied modified R removed hg status -A --change 1 and revset: $ hg status -A --change '1|1' M modified A added A copied modified R removed C deleted $ cd .. hg status with --rev and reverted changes: $ hg init reverted-changes-repo $ cd reverted-changes-repo $ echo a > file $ hg add file $ hg ci -m a $ echo b > file $ hg ci -m b reverted file should appear clean $ hg revert -r 0 . reverting file $ hg status -A --rev 0 C file #if execbit reverted file with changed flag should appear modified $ chmod +x file $ hg status -A --rev 0 M file $ hg revert -r 0 . reverting file reverted and committed file with changed flag should appear modified $ hg co -C . 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ chmod +x file $ hg ci -m 'change flag' $ hg status -A --rev 1 --rev 2 M file $ hg diff -r 1 -r 2 #endif hg status of binary file starting with '\1\n', a separator for metadata: $ hg init repo5 $ cd repo5 >>> open("010a", "wb").write("\1\nfoo") $ hg ci -q -A -m 'initial checkin' $ hg status -A C 010a >>> open("010a", "wb").write("\1\nbar") $ hg status -A M 010a $ hg ci -q -m 'modify 010a' $ hg status -A --rev 0:1 M 010a $ touch empty $ hg ci -q -A -m 'add another file' $ hg status -A --rev 1:2 010a C 010a $ cd .. test "hg status" with "directory pattern" which matches against files only known on target revision. $ hg init repo6 $ cd repo6 $ echo a > a.txt $ hg add a.txt $ hg commit -m '#0' $ mkdir -p 1/2/3/4/5 $ echo b > 1/2/3/4/5/b.txt $ hg add 1/2/3/4/5/b.txt $ hg commit -m '#1' $ hg update -C 0 > /dev/null $ hg status -A C a.txt the directory matching against specified pattern should be removed, because directory existence prevents 'dirstate.walk()' from showing warning message about such pattern. $ test ! -d 1 $ hg status -A --rev 1 1/2/3/4/5/b.txt R 1/2/3/4/5/b.txt $ hg status -A --rev 1 1/2/3/4/5 R 1/2/3/4/5/b.txt $ hg status -A --rev 1 1/2/3 R 1/2/3/4/5/b.txt $ hg status -A --rev 1 1 R 1/2/3/4/5/b.txt $ hg status --config ui.formatdebug=True --rev 1 1 status = [ {*'path': '1/2/3/4/5/b.txt'*}, (glob) ] #if windows $ hg --config ui.slash=false status -A --rev 1 1 R 1\2\3\4\5\b.txt #endif $ cd .. Status after move overwriting a file (issue4458) ================================================= $ hg init issue4458 $ cd issue4458 $ echo a > a $ echo b > b $ hg commit -Am base adding a adding b with --force $ hg mv b --force a $ hg st --copies M a b R b $ hg revert --all reverting a undeleting b $ rm *.orig without force $ hg rm a $ hg st --copies R a $ hg mv b a $ hg st --copies M a b R b using ui.statuscopies setting $ hg st --config ui.statuscopies=true M a b R b $ hg st --config ui.statuscopies=false M a R b Other "bug" highlight, the revision status does not report the copy information. This is buggy behavior. $ hg commit -m 'blah' $ hg st --copies --change . M a R b $ cd .. mercurial-3.7.3/tests/test-mactext.t0000644000175000017500000000177312676531525017110 0ustar mpmmpm00000000000000 $ cat > unix2mac.py < import sys > > for path in sys.argv[1:]: > data = file(path, 'rb').read() > data = data.replace('\n', '\r') > file(path, 'wb').write(data) > EOF $ cat > print.py < import sys > print(sys.stdin.read().replace('\n', '').replace('\r', '').replace('\0', '')) > EOF $ hg init $ echo '[hooks]' >> .hg/hgrc $ echo 'pretxncommit.cr = python:hgext.win32text.forbidcr' >> .hg/hgrc $ echo 'pretxnchangegroup.cr = python:hgext.win32text.forbidcr' >> .hg/hgrc $ cat .hg/hgrc [hooks] pretxncommit.cr = python:hgext.win32text.forbidcr pretxnchangegroup.cr = python:hgext.win32text.forbidcr $ echo hello > f $ hg add f $ hg ci -m 1 $ python unix2mac.py f $ hg ci -m 2 attempt to commit or push text file(s) using CR line endings in dea860dc51ec: f transaction abort! rollback completed abort: pretxncommit.cr hook failed [255] $ hg cat f | python print.py hello $ cat f | python print.py hello mercurial-3.7.3/tests/test-backout.t0000644000175000017500000004051112676531525017064 0ustar mpmmpm00000000000000 $ hg init basic $ cd basic should complain $ hg backout abort: please specify a revision to backout [255] $ hg backout -r 0 0 abort: please specify just one revision [255] basic operation (this also tests that editor is invoked if the commit message is not specified explicitly) $ echo a > a $ hg commit -d '0 0' -A -m a adding a $ echo b >> a $ hg commit -d '1 0' -m b $ hg status --rev tip --rev "tip^1" M a $ HGEDITOR=cat hg backout -d '2 0' tip --tool=true reverting a Backed out changeset a820f4f40a57 HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: changed a changeset 2:2929462c3dff backs out changeset 1:a820f4f40a57 $ cat a a $ hg summary parent: 2:2929462c3dff tip Backed out changeset a820f4f40a57 branch: default commit: (clean) update: (current) phases: 3 draft commit option $ cd .. $ hg init commit $ cd commit $ echo tomatoes > a $ hg add a $ hg commit -d '0 0' -m tomatoes $ echo chair > b $ hg add b $ hg commit -d '1 0' -m chair $ echo grapes >> a $ hg commit -d '2 0' -m grapes $ hg backout -d '4 0' 1 --tool=:fail 0 files updated, 0 files merged, 1 files removed, 0 files unresolved changeset 3:1c2161e97c0a backs out changeset 1:22cb4f70d813 $ hg summary parent: 3:1c2161e97c0a tip Backed out changeset 22cb4f70d813 branch: default commit: (clean) update: (current) phases: 4 draft $ echo ypples > a $ hg commit -d '5 0' -m ypples $ hg backout -d '6 0' 2 --tool=:fail 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges [1] $ hg summary parent: 4:ed99997b793d tip ypples branch: default commit: 1 unresolved (clean) update: (current) phases: 5 draft file that was removed is recreated (this also tests that editor is not invoked if the commit message is specified explicitly) $ cd .. $ hg init remove $ cd remove $ echo content > a $ hg commit -d '0 0' -A -m a adding a $ hg rm a $ hg commit -d '1 0' -m b $ HGEDITOR=cat hg backout -d '2 0' tip --tool=true -m "Backed out changeset 76862dcce372" adding a changeset 2:de31bdc76c0d backs out changeset 1:76862dcce372 $ cat a content $ hg summary parent: 2:de31bdc76c0d tip Backed out changeset 76862dcce372 branch: default commit: (clean) update: (current) phases: 3 draft backout of backout is as if nothing happened $ hg backout -d '3 0' --merge tip --tool=true removing a changeset 3:7f6d0f120113 backs out changeset 2:de31bdc76c0d $ test -f a [1] $ hg summary parent: 3:7f6d0f120113 tip Backed out changeset de31bdc76c0d branch: default commit: (clean) update: (current) phases: 4 draft Test that 'hg rollback' restores dirstate just before opening transaction: in-memory dirstate changes should be written into '.hg/journal.dirstate' as expected. $ echo 'removed soon' > b $ hg commit -A -d '4 0' -m 'prepare for subsequent removing' adding b $ echo 'newly added' > c $ hg add c $ hg remove b $ hg commit -d '5 0' -m 'prepare for subsequent backout' $ touch -t 200001010000 c $ hg status -A C c $ hg debugstate --nodates n 644 12 set c $ hg backout -d '6 0' -m 'to be rollback-ed soon' -r . adding b removing c changeset 6:4bfec048029d backs out changeset 5:fac0b729a654 $ hg rollback -q $ hg status -A A b R c $ hg debugstate --nodates a 0 -1 unset b r 0 0 set c across branch $ cd .. $ hg init branch $ cd branch $ echo a > a $ hg ci -Am0 adding a $ echo b > b $ hg ci -Am1 adding b $ hg co -C 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg summary parent: 0:f7b1eb17ad24 0 branch: default commit: (clean) update: 1 new changesets (update) phases: 2 draft should fail $ hg backout 1 abort: cannot backout change that is not an ancestor [255] $ echo c > c $ hg ci -Am2 adding c created new head $ hg summary parent: 2:db815d6d32e6 tip 2 branch: default commit: (clean) update: 1 new changesets, 2 branch heads (merge) phases: 3 draft should fail $ hg backout 1 abort: cannot backout change that is not an ancestor [255] $ hg summary parent: 2:db815d6d32e6 tip 2 branch: default commit: (clean) update: 1 new changesets, 2 branch heads (merge) phases: 3 draft backout with merge $ cd .. $ hg init merge $ cd merge $ echo line 1 > a $ echo line 2 >> a $ hg commit -d '0 0' -A -m a adding a $ hg summary parent: 0:59395513a13a tip a branch: default commit: (clean) update: (current) phases: 1 draft remove line 1 $ echo line 2 > a $ hg commit -d '1 0' -m b $ echo line 3 >> a $ hg commit -d '2 0' -m c $ hg backout --merge -d '3 0' 1 --tool=true reverting a created new head changeset 3:26b8ccb9ad91 backs out changeset 1:5a50a024c182 merging with changeset 3:26b8ccb9ad91 merging a 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -d '4 0' -m d $ hg summary parent: 4:c7df5e0b9c09 tip d branch: default commit: (clean) update: (current) phases: 5 draft check line 1 is back $ cat a line 1 line 2 line 3 Test visibility of in-memory dirstate changes outside transaction to external hook process $ cat > $TESTTMP/checkvisibility.sh < echo "==== \$1:" > hg parents --template "{rev}:{node|short}\n" > echo "====" > EOF "hg backout --merge REV1" at REV2 below implies steps below: (1) update to REV1 (REV2 => REV1) (2) revert by REV1^1 (3) commit backnig out revision (REV3) (4) update to REV2 (REV3 => REV2) (5) merge with REV3 (REV2 => REV2, REV3) == test visibility to external preupdate hook $ hg update -q -C 2 $ hg --config extensions.strip= strip 3 saved backup bundle to * (glob) $ cat >> .hg/hgrc < [hooks] > preupdate.visibility = sh $TESTTMP/checkvisibility.sh preupdate > EOF ("-m" is needed to avoid writing dirstte changes out at other than invocation of the hook to be examined) $ hg backout --merge -d '3 0' 1 --tool=true -m 'fixed comment' ==== preupdate: 2:6ea3f2a197a2 ==== reverting a created new head changeset 3:d92a3f57f067 backs out changeset 1:5a50a024c182 ==== preupdate: 3:d92a3f57f067 ==== merging with changeset 3:d92a3f57f067 ==== preupdate: 2:6ea3f2a197a2 ==== merging a 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat >> .hg/hgrc < [hooks] > preupdate.visibility = > EOF == test visibility to external update hook $ hg update -q -C 2 $ hg --config extensions.strip= strip 3 saved backup bundle to * (glob) $ cat >> .hg/hgrc < [hooks] > update.visibility = sh $TESTTMP/checkvisibility.sh update > EOF $ hg backout --merge -d '3 0' 1 --tool=true -m 'fixed comment' ==== update: 1:5a50a024c182 ==== reverting a created new head changeset 3:d92a3f57f067 backs out changeset 1:5a50a024c182 ==== update: 2:6ea3f2a197a2 ==== merging with changeset 3:d92a3f57f067 merging a ==== update: 2:6ea3f2a197a2 3:d92a3f57f067 ==== 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat >> .hg/hgrc < [hooks] > update.visibility = > EOF $ cd .. backout should not back out subsequent changesets $ hg init onecs $ cd onecs $ echo 1 > a $ hg commit -d '0 0' -A -m a adding a $ echo 2 >> a $ hg commit -d '1 0' -m b $ echo 1 > b $ hg commit -d '2 0' -A -m c adding b $ hg summary parent: 2:882396649954 tip c branch: default commit: (clean) update: (current) phases: 3 draft without --merge $ hg backout --no-commit -d '3 0' 1 --tool=true 1 files updated, 0 files merged, 0 files removed, 0 files unresolved changeset 22bca4c721e5 backed out, don't forget to commit. $ hg locate b b $ hg update -C tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg locate b b $ hg summary parent: 2:882396649954 tip c branch: default commit: (clean) update: (current) phases: 3 draft with --merge $ hg backout --merge -d '3 0' 1 --tool=true reverting a created new head changeset 3:3202beb76721 backs out changeset 1:22bca4c721e5 merging with changeset 3:3202beb76721 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg locate b b $ hg update -C tip 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg locate b [1] $ cd .. $ hg init m $ cd m $ echo a > a $ hg commit -d '0 0' -A -m a adding a $ echo b > b $ hg commit -d '1 0' -A -m b adding b $ echo c > c $ hg commit -d '2 0' -A -m b adding c $ hg update 1 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo d > d $ hg commit -d '3 0' -A -m c adding d created new head $ hg merge 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -d '4 0' -A -m d $ hg summary parent: 4:b2f3bb92043e tip d branch: default commit: (clean) update: (current) phases: 5 draft backout of merge should fail $ hg backout 4 abort: cannot backout a merge changeset [255] backout of merge with bad parent should fail $ hg backout --parent 0 4 abort: cb9a9f314b8b is not a parent of b2f3bb92043e [255] backout of non-merge with parent should fail $ hg backout --parent 0 3 abort: cannot use --parent on non-merge changeset [255] backout with valid parent should be ok $ hg backout -d '5 0' --parent 2 4 --tool=true removing d changeset 5:10e5328c8435 backs out changeset 4:b2f3bb92043e $ hg summary parent: 5:10e5328c8435 tip Backed out changeset b2f3bb92043e branch: default commit: (clean) update: (current) phases: 6 draft $ hg rollback repository tip rolled back to revision 4 (undo commit) working directory now based on revision 4 $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg summary parent: 4:b2f3bb92043e tip d branch: default commit: (clean) update: (current) phases: 5 draft $ hg backout -d '6 0' --parent 3 4 --tool=true removing c changeset 5:033590168430 backs out changeset 4:b2f3bb92043e $ hg summary parent: 5:033590168430 tip Backed out changeset b2f3bb92043e branch: default commit: (clean) update: (current) phases: 6 draft $ cd .. named branches $ hg init named_branches $ cd named_branches $ echo default > default $ hg ci -d '0 0' -Am default adding default $ hg branch branch1 marked working directory as branch branch1 (branches are permanent and global, did you want a bookmark?) $ echo branch1 > file1 $ hg ci -d '1 0' -Am file1 adding file1 $ hg branch branch2 marked working directory as branch branch2 $ echo branch2 > file2 $ hg ci -d '2 0' -Am file2 adding file2 without --merge $ hg backout --no-commit -r 1 --tool=true 0 files updated, 0 files merged, 1 files removed, 0 files unresolved changeset bf1602f437f3 backed out, don't forget to commit. $ hg branch branch2 $ hg status -A R file1 C default C file2 $ hg summary parent: 2:45bbcd363bf0 tip file2 branch: branch2 commit: 1 removed update: (current) phases: 3 draft with --merge (this also tests that editor is invoked if '--edit' is specified explicitly regardless of '--message') $ hg update -qC $ HGEDITOR=cat hg backout --merge -d '3 0' -r 1 -m 'backout on branch1' --tool=true --edit removing file1 backout on branch1 HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'branch2' HG: removed file1 created new head changeset 3:d4e8f6db59fb backs out changeset 1:bf1602f437f3 merging with changeset 3:d4e8f6db59fb 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg summary parent: 2:45bbcd363bf0 file2 parent: 3:d4e8f6db59fb tip backout on branch1 branch: branch2 commit: 1 removed (merge) update: (current) phases: 4 draft $ hg update -q -C 2 on branch2 with branch1 not merged, so file1 should still exist: $ hg id 45bbcd363bf0 (branch2) $ hg st -A C default C file1 C file2 $ hg summary parent: 2:45bbcd363bf0 file2 branch: branch2 commit: (clean) update: 1 new changesets, 2 branch heads (merge) phases: 4 draft on branch2 with branch1 merged, so file1 should be gone: $ hg merge 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -d '4 0' -m 'merge backout of branch1' $ hg id 22149cdde76d (branch2) tip $ hg st -A C default C file2 $ hg summary parent: 4:22149cdde76d tip merge backout of branch1 branch: branch2 commit: (clean) update: (current) phases: 5 draft on branch1, so no file1 and file2: $ hg co -C branch1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg id bf1602f437f3 (branch1) $ hg st -A C default C file1 $ hg summary parent: 1:bf1602f437f3 file1 branch: branch1 commit: (clean) update: (current) phases: 5 draft $ cd .. backout of empty changeset (issue4190) $ hg init emptycommit $ cd emptycommit $ touch file1 $ hg ci -Aqm file1 $ hg branch -q branch1 $ hg ci -qm branch1 $ hg backout -v 1 resolving manifests nothing changed [1] $ cd .. Test usage of `hg resolve` in case of conflict (issue4163) $ hg init issue4163 $ cd issue4163 $ touch foo $ hg add foo $ cat > foo << EOF > one > two > three > four > five > six > seven > height > nine > ten > EOF $ hg ci -m 'initial' $ cat > foo << EOF > one > two > THREE > four > five > six > seven > height > nine > ten > EOF $ hg ci -m 'capital three' $ cat > foo << EOF > one > two > THREE > four > five > six > seven > height > nine > TEN > EOF $ hg ci -m 'capital ten' $ hg backout -r 'desc("capital three")' --tool internal:fail 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges [1] $ hg status $ hg debugmergestate * version 2 records local: b71750c4b0fdf719734971e3ef90dbeab5919a2d other: a30dd8addae3ce71b8667868478542bc417439e6 file: foo (record type "F", state "u", hash 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33) local path: foo (flags "") ancestor path: foo (node f89532f44c247a0e993d63e3a734dd781ab04708) other path: foo (node f50039b486d6fa1a90ae51778388cad161f425ee) $ mv .hg/merge/state2 .hg/merge/state2-moved $ hg debugmergestate * version 1 records local: b71750c4b0fdf719734971e3ef90dbeab5919a2d file: foo (record type "F", state "u", hash 0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33) local path: foo (flags "") ancestor path: foo (node f89532f44c247a0e993d63e3a734dd781ab04708) other path: foo (node not stored in v1 format) $ mv .hg/merge/state2-moved .hg/merge/state2 $ hg resolve -l # still unresolved U foo $ hg summary parent: 2:b71750c4b0fd tip capital ten branch: default commit: 1 unresolved (clean) update: (current) phases: 3 draft $ hg resolve --all --debug picked tool ':merge' for foo (binary False symlink False changedelete False) merging foo my foo@b71750c4b0fd+ other foo@a30dd8addae3 ancestor foo@913609522437 premerge successful (no more unresolved files) continue: hg commit $ hg status M foo ? foo.orig $ hg resolve -l R foo $ hg summary parent: 2:b71750c4b0fd tip capital ten branch: default commit: 1 modified, 1 unknown update: (current) phases: 3 draft $ cat foo one two three four five six seven height nine TEN --no-commit shouldn't commit $ hg init a $ cd a $ for i in 1 2 3; do > touch $i > hg ci -Am $i > done adding 1 adding 2 adding 3 $ hg backout --no-commit . removing 3 changeset cccc23d9d68f backed out, don't forget to commit. $ hg revert -aq --no-commit can't be used with --merge $ hg backout --merge --no-commit 2 abort: cannot use --merge with --no-commit [255] mercurial-3.7.3/tests/test-qrecord.t0000644000175000017500000002132712676531525017077 0ustar mpmmpm00000000000000Create configuration $ echo "[ui]" >> $HGRCPATH $ echo "interactive=true" >> $HGRCPATH help record (no record) $ hg help record record extension - commands to interactively select changes for commit/qrefresh (use "hg help extensions" for information on enabling extensions) help qrecord (no record) $ hg help qrecord 'qrecord' is provided by the following extension: record commands to interactively select changes for commit/qrefresh (use "hg help extensions" for information on enabling extensions) $ echo "[extensions]" >> $HGRCPATH $ echo "record=" >> $HGRCPATH help record (record) $ hg help record hg record [OPTION]... [FILE]... interactively select changes to commit If a list of files is omitted, all changes reported by 'hg status' will be candidates for recording. See 'hg help dates' for a list of formats valid for -d/--date. You will be prompted for whether to record changes to each modified file, and for files with multiple changes, for each change to use. For each query, the following responses are possible: y - record this change n - skip this change e - edit this change manually s - skip remaining changes to this file f - record remaining changes to this file d - done, skip remaining changes and files a - record all changes to all remaining files q - quit, recording no changes ? - display help This command is not available when committing a merge. options ([+] can be repeated): -A --addremove mark new/missing files as added/removed before committing --close-branch mark a branch head as closed --amend amend the parent of the working directory -s --secret use the secret phase for committing -e --edit invoke editor on commit messages -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns -m --message TEXT use text as commit message -l --logfile FILE read commit message from file -d --date DATE record the specified date as commit date -u --user USER record the specified user as committer -S --subrepos recurse into subrepositories -w --ignore-all-space ignore white space when comparing lines -b --ignore-space-change ignore changes in the amount of white space -B --ignore-blank-lines ignore changes whose lines are all blank (some details hidden, use --verbose to show complete help) help (no mq, so no qrecord) $ hg help qrecord hg qrecord [OPTION]... PATCH [FILE]... interactively record a new patch See 'hg help qnew' & 'hg help record' for more information and usage. (some details hidden, use --verbose to show complete help) $ hg init a qrecord (mq not present) $ hg -R a qrecord hg qrecord: invalid arguments hg qrecord [OPTION]... PATCH [FILE]... interactively record a new patch (use "hg qrecord -h" to show more help) [255] qrecord patch (mq not present) $ hg -R a qrecord patch abort: 'mq' extension not loaded [255] help (bad mq) $ echo "mq=nonexistent" >> $HGRCPATH $ hg help qrecord *** failed to import extension mq from nonexistent: [Errno *] * (glob) hg qrecord [OPTION]... PATCH [FILE]... interactively record a new patch See 'hg help qnew' & 'hg help record' for more information and usage. (some details hidden, use --verbose to show complete help) help (mq present) $ sed 's/mq=nonexistent/mq=/' $HGRCPATH > hgrc.tmp $ mv hgrc.tmp $HGRCPATH $ hg help qrecord hg qrecord [OPTION]... PATCH [FILE]... interactively record a new patch See 'hg help qnew' & 'hg help record' for more information and usage. options ([+] can be repeated): -e --edit invoke editor on commit messages -g --git use git extended diff format -U --currentuser add "From: " to patch -u --user USER add "From: " to patch -D --currentdate add "Date: " to patch -d --date DATE add "Date: " to patch -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns -m --message TEXT use text as commit message -l --logfile FILE read commit message from file -w --ignore-all-space ignore white space when comparing lines -b --ignore-space-change ignore changes in the amount of white space -B --ignore-blank-lines ignore changes whose lines are all blank --mq operate on patch repository (some details hidden, use --verbose to show complete help) $ cd a Base commit $ cat > 1.txt < 1 > 2 > 3 > 4 > 5 > EOF $ cat > 2.txt < a > b > c > d > e > f > EOF $ mkdir dir $ cat > dir/a.txt < hello world > > someone > up > there > loves > me > EOF $ hg add 1.txt 2.txt dir/a.txt $ hg commit -m 'initial checkin' Changing files $ sed -e 's/2/2 2/;s/4/4 4/' 1.txt > 1.txt.new $ sed -e 's/b/b b/' 2.txt > 2.txt.new $ sed -e 's/hello world/hello world!/' dir/a.txt > dir/a.txt.new $ mv -f 1.txt.new 1.txt $ mv -f 2.txt.new 2.txt $ mv -f dir/a.txt.new dir/a.txt Whole diff $ hg diff --nodates diff -r 1057167b20ef 1.txt --- a/1.txt +++ b/1.txt @@ -1,5 +1,5 @@ 1 -2 +2 2 3 -4 +4 4 5 diff -r 1057167b20ef 2.txt --- a/2.txt +++ b/2.txt @@ -1,5 +1,5 @@ a -b +b b c d e diff -r 1057167b20ef dir/a.txt --- a/dir/a.txt +++ b/dir/a.txt @@ -1,4 +1,4 @@ -hello world +hello world! someone up qrecord with bad patch name, should abort before prompting $ hg qrecord .hg abort: patch name cannot begin with ".hg" [255] qrecord a.patch $ hg qrecord -d '0 0' -m aaa a.patch < y > y > n > y > y > n > EOF diff --git a/1.txt b/1.txt 2 hunks, 2 lines changed examine changes to '1.txt'? [Ynesfdaq?] y @@ -1,3 +1,3 @@ 1 -2 +2 2 3 record change 1/4 to '1.txt'? [Ynesfdaq?] y @@ -3,3 +3,3 @@ 3 -4 +4 4 5 record change 2/4 to '1.txt'? [Ynesfdaq?] n diff --git a/2.txt b/2.txt 1 hunks, 1 lines changed examine changes to '2.txt'? [Ynesfdaq?] y @@ -1,5 +1,5 @@ a -b +b b c d e record change 3/4 to '2.txt'? [Ynesfdaq?] y diff --git a/dir/a.txt b/dir/a.txt 1 hunks, 1 lines changed examine changes to 'dir/a.txt'? [Ynesfdaq?] n After qrecord a.patch 'tip'" $ hg tip -p changeset: 1:5d1ca63427ee tag: a.patch tag: qbase tag: qtip tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: aaa diff -r 1057167b20ef -r 5d1ca63427ee 1.txt --- a/1.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/1.txt Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +1,5 @@ 1 -2 +2 2 3 4 5 diff -r 1057167b20ef -r 5d1ca63427ee 2.txt --- a/2.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/2.txt Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +1,5 @@ a -b +b b c d e After qrecord a.patch 'diff'" $ hg diff --nodates diff -r 5d1ca63427ee 1.txt --- a/1.txt +++ b/1.txt @@ -1,5 +1,5 @@ 1 2 2 3 -4 +4 4 5 diff -r 5d1ca63427ee dir/a.txt --- a/dir/a.txt +++ b/dir/a.txt @@ -1,4 +1,4 @@ -hello world +hello world! someone up qrecord b.patch $ hg qrecord -d '0 0' -m bbb b.patch < y > y > y > y > EOF diff --git a/1.txt b/1.txt 1 hunks, 1 lines changed examine changes to '1.txt'? [Ynesfdaq?] y @@ -1,5 +1,5 @@ 1 2 2 3 -4 +4 4 5 record change 1/2 to '1.txt'? [Ynesfdaq?] y diff --git a/dir/a.txt b/dir/a.txt 1 hunks, 1 lines changed examine changes to 'dir/a.txt'? [Ynesfdaq?] y @@ -1,4 +1,4 @@ -hello world +hello world! someone up record change 2/2 to 'dir/a.txt'? [Ynesfdaq?] y After qrecord b.patch 'tip' $ hg tip -p changeset: 2:b056198bf878 tag: b.patch tag: qtip tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: bbb diff -r 5d1ca63427ee -r b056198bf878 1.txt --- a/1.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/1.txt Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +1,5 @@ 1 2 2 3 -4 +4 4 5 diff -r 5d1ca63427ee -r b056198bf878 dir/a.txt --- a/dir/a.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/dir/a.txt Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +1,4 @@ -hello world +hello world! someone up After qrecord b.patch 'diff' $ hg diff --nodates $ cd .. mercurial-3.7.3/tests/test-hgcia.t0000644000175000017500000000404712676531525016513 0ustar mpmmpm00000000000000Test the CIA extension $ cat >> $HGRCPATH < [extensions] > hgcia= > > [hooks] > changegroup.cia = python:hgext.hgcia.hook > > [web] > baseurl = http://hgserver/ > > [cia] > user = testuser > project = testproject > test = True > EOF $ hg init src $ hg init cia $ cd src $ echo foo > foo $ hg ci -Amfoo adding foo $ hg push ../cia pushing to ../cia searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Mercurial (hgcia) 0.1 http://hg.kublai.com/mercurial/hgcia testuser testproject default test 0:e63c23eaa88a foo http://hgserver/rev/e63c23eaa88a foo 0 $ cat >> $HGRCPATH < strip = 0 > EOF $ echo bar > bar $ hg ci -Ambar adding bar $ hg push ../cia pushing to ../cia searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Mercurial (hgcia) 0.1 http://hg.kublai.com/mercurial/hgcia testuser testproject default test 1:c0c7cf58edc5 bar http://hgserver/$TESTTMP/cia/rev/c0c7cf58edc5 bar 0 $ cd .. mercurial-3.7.3/tests/test-merge1.t0000644000175000017500000002532412676531525016621 0ustar mpmmpm00000000000000 $ cat < merge > import sys, os > > try: > import msvcrt > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY) > except ImportError: > pass > > print "merging for", os.path.basename(sys.argv[1]) > EOF $ HGMERGE="python ../merge"; export HGMERGE $ hg init t $ cd t $ echo This is file a1 > a $ hg add a $ hg commit -m "commit #0" $ echo This is file b1 > b $ hg add b $ hg commit -m "commit #1" $ hg update 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved Test interrupted updates by exploiting our non-handling of directory collisions $ mkdir b $ hg up abort: *: '$TESTTMP/t/b' (glob) [255] $ hg ci abort: last update was interrupted (use 'hg update' to get a consistent checkout) [255] $ hg sum parent: 0:538afb845929 commit #0 branch: default commit: (interrupted update) update: 1 new changesets (update) phases: 2 draft $ rmdir b $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg sum parent: 1:b8bb4a988f25 tip commit #1 branch: default commit: (clean) update: (current) phases: 2 draft Prepare a basic merge $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo This is file c1 > c $ hg add c $ hg commit -m "commit #2" created new head $ echo This is file b1 > b no merges expected $ hg merge -P 1 changeset: 1:b8bb4a988f25 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit #1 $ hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg diff --nodates diff -r 49035e18a8e6 b --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +This is file b1 $ hg status M b $ cd ..; rm -r t $ hg init t $ cd t $ echo This is file a1 > a $ hg add a $ hg commit -m "commit #0" $ echo This is file b1 > b $ hg add b $ hg commit -m "commit #1" $ hg update 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo This is file c1 > c $ hg add c $ hg commit -m "commit #2" created new head $ echo This is file b2 > b merge should fail $ hg merge 1 b: untracked file differs abort: untracked files in working directory differ from files in requested revision [255] #if symlink symlinks to directories should be treated as regular files (issue5027) $ rm b $ ln -s 'This is file b2' b $ hg merge 1 b: untracked file differs abort: untracked files in working directory differ from files in requested revision [255] symlinks shouldn't be followed $ rm b $ echo This is file b1 > .hg/b $ ln -s .hg/b b $ hg merge 1 b: untracked file differs abort: untracked files in working directory differ from files in requested revision [255] $ rm b $ echo This is file b2 > b #endif bad config $ hg merge 1 --config merge.checkunknown=x abort: merge.checkunknown not valid ('x' is none of 'abort', 'ignore', 'warn') [255] this merge should fail $ hg merge 1 --config merge.checkunknown=abort b: untracked file differs abort: untracked files in working directory differ from files in requested revision [255] this merge should warn $ hg merge 1 --config merge.checkunknown=warn b: replacing untracked file 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat b.orig This is file b2 $ hg up --clean 2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ mv b.orig b this merge should silently ignore $ cat b This is file b2 $ hg merge 1 --config merge.checkunknown=ignore 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) merge.checkignored $ hg up --clean 1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ cat >> .hgignore << EOF > remoteignored > EOF $ echo This is file localignored3 > localignored $ echo This is file remoteignored3 > remoteignored $ hg add .hgignore localignored remoteignored $ hg commit -m "commit #3" $ hg up 2 1 files updated, 0 files merged, 4 files removed, 0 files unresolved $ cat >> .hgignore << EOF > localignored > EOF $ hg add .hgignore $ hg commit -m "commit #4" remote .hgignore shouldn't be used for determining whether a file is ignored $ echo This is file remoteignored4 > remoteignored $ hg merge 3 --config merge.checkignored=ignore --config merge.checkunknown=abort remoteignored: untracked file differs abort: untracked files in working directory differ from files in requested revision [255] $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore merging .hgignore merging for .hgignore 3 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat remoteignored This is file remoteignored3 $ cat remoteignored.orig This is file remoteignored4 $ rm remoteignored.orig local .hgignore should be used for that $ hg up --clean 4 1 files updated, 0 files merged, 3 files removed, 0 files unresolved $ echo This is file localignored4 > localignored also test other conflicting files to see we output the full set of warnings $ echo This is file b2 > b $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=abort b: untracked file differs localignored: untracked file differs abort: untracked files in working directory differ from files in requested revision [255] $ hg merge 3 --config merge.checkignored=abort --config merge.checkunknown=ignore localignored: untracked file differs abort: untracked files in working directory differ from files in requested revision [255] $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=abort b: untracked file differs abort: untracked files in working directory differ from files in requested revision [255] $ hg merge 3 --config merge.checkignored=warn --config merge.checkunknown=warn b: replacing untracked file localignored: replacing untracked file merging .hgignore merging for .hgignore 3 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat localignored This is file localignored3 $ cat localignored.orig This is file localignored4 $ rm localignored.orig $ cat b.orig This is file b2 $ hg up --clean 2 0 files updated, 0 files merged, 4 files removed, 0 files unresolved $ mv b.orig b this merge of b should work $ cat b This is file b2 $ hg merge -f 1 merging b merging for b 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg diff --nodates diff -r 49035e18a8e6 b --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +This is file b2 $ hg status M b $ cd ..; rm -r t $ hg init t $ cd t $ echo This is file a1 > a $ hg add a $ hg commit -m "commit #0" $ echo This is file b1 > b $ hg add b $ hg commit -m "commit #1" $ echo This is file b22 > b $ hg commit -m "commit #2" $ hg update 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo This is file c1 > c $ hg add c $ hg commit -m "commit #3" created new head Contents of b should be "this is file b1" $ cat b This is file b1 $ echo This is file b22 > b merge fails $ hg merge 2 abort: uncommitted changes (use 'hg status' to list changes) [255] merge expected! $ hg merge -f 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg diff --nodates diff -r 85de557015a8 b --- a/b +++ b/b @@ -1,1 +1,1 @@ -This is file b1 +This is file b22 $ hg status M b $ cd ..; rm -r t $ hg init t $ cd t $ echo This is file a1 > a $ hg add a $ hg commit -m "commit #0" $ echo This is file b1 > b $ hg add b $ hg commit -m "commit #1" $ echo This is file b22 > b $ hg commit -m "commit #2" $ hg update 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo This is file c1 > c $ hg add c $ hg commit -m "commit #3" created new head $ echo This is file b33 > b merge of b should fail $ hg merge 2 abort: uncommitted changes (use 'hg status' to list changes) [255] merge of b expected $ hg merge -f 2 merging b merging for b 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg diff --nodates diff -r 85de557015a8 b --- a/b +++ b/b @@ -1,1 +1,1 @@ -This is file b1 +This is file b33 $ hg status M b Test for issue2364 $ hg up -qC . $ hg rm b $ hg ci -md $ hg revert -r -2 b $ hg up -q -- -2 Test that updated files are treated as "modified", when 'merge.update()' is aborted before 'merge.recordupdates()' (= parents aren't changed), even if none of mode, size and timestamp of them isn't changed on the filesystem (see also issue4583). $ cat > $TESTTMP/abort.py < # emulate aborting before "recordupdates()". in this case, files > # are changed without updating dirstate > from mercurial import extensions, merge, error > def applyupdates(orig, *args, **kwargs): > orig(*args, **kwargs) > raise error.Abort('intentional aborting') > def extsetup(ui): > extensions.wrapfunction(merge, "applyupdates", applyupdates) > EOF $ cat >> .hg/hgrc < [fakedirstatewritetime] > # emulate invoking dirstate.write() via repo.status() > # at 2000-01-01 00:00 > fakenow = 200001010000 > EOF (file gotten from other revision) $ hg update -q -C 2 $ echo 'THIS IS FILE B5' > b $ hg commit -m 'commit #5' $ hg update -q -C 3 $ cat b This is file b1 $ touch -t 200001010000 b $ hg debugrebuildstate $ cat >> .hg/hgrc < [extensions] > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py > abort = $TESTTMP/abort.py > EOF $ hg merge 5 abort: intentional aborting [255] $ cat >> .hg/hgrc < [extensions] > fakedirstatewritetime = ! > abort = ! > EOF $ cat b THIS IS FILE B5 $ touch -t 200001010000 b $ hg status -A b M b (file merged from other revision) $ hg update -q -C 3 $ echo 'this is file b6' > b $ hg commit -m 'commit #6' created new head $ cat b this is file b6 $ touch -t 200001010000 b $ hg debugrebuildstate $ cat >> .hg/hgrc < [extensions] > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py > abort = $TESTTMP/abort.py > EOF $ hg merge --tool internal:other 5 abort: intentional aborting [255] $ cat >> .hg/hgrc < [extensions] > fakedirstatewritetime = ! > abort = ! > EOF $ cat b THIS IS FILE B5 $ touch -t 200001010000 b $ hg status -A b M b $ cd .. mercurial-3.7.3/tests/test-rebase-mq-skip.t0000644000175000017500000000736212676531525020263 0ustar mpmmpm00000000000000This emulates the effects of an hg pull --rebase in which the remote repo already has one local mq patch $ cat >> $HGRCPATH < [format] > usegeneraldelta=yes > [extensions] > rebase= > mq= > > [phases] > publish=False > > [alias] > tglog = log -G --template "{rev}: '{desc}' tags: {tags}\n" > EOF $ hg init a $ cd a $ hg qinit -c $ echo c1 > c1 $ hg add c1 $ hg ci -m C1 $ echo r1 > r1 $ hg add r1 $ hg ci -m R1 $ hg up -q 0 $ hg qnew p0.patch -d '1 0' $ echo p0 > p0 $ hg add p0 $ hg qref -m P0 $ hg qnew p1.patch -d '2 0' $ echo p1 > p1 $ hg add p1 $ hg qref -m P1 $ hg export qtip > p1.patch $ hg up -q -C 1 $ hg import p1.patch applying p1.patch $ rm p1.patch $ hg up -q -C qtip $ hg rebase -v rebasing 2:13a46ce44f60 "P0" (p0.patch qbase) resolving manifests removing p0 getting r1 resolving manifests getting p0 committing files: p0 committing manifest committing changelog rebasing 3:148775c71080 "P1" (p1.patch qtip) resolving manifests note: rebase of 3:148775c71080 created no changes to commit rebase merging completed updating mq patch p0.patch to 5:9ecc820b1737 $TESTTMP/a/.hg/patches/p0.patch (glob) 2 changesets found uncompressed size of bundle content: 384 (changelog) 324 (manifests) 129 p0 129 p1 saved backup bundle to $TESTTMP/a/.hg/strip-backup/13a46ce44f60-5da6ecfb-backup.hg (glob) 2 changesets found uncompressed size of bundle content: 439 (changelog) 324 (manifests) 129 p0 129 p1 adding branch adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files rebase completed 1 revisions have been skipped $ hg tglog @ 3: 'P0' tags: p0.patch qbase qtip tip | o 2: 'P1' tags: qparent | o 1: 'R1' tags: | o 0: 'C1' tags: $ cd .. $ hg init b $ cd b $ hg qinit -c $ for i in r0 r1 r2 r3 r4 r5 r6; > do > echo $i > $i > hg ci -Am $i > done adding r0 adding r1 adding r2 adding r3 adding r4 adding r5 adding r6 $ hg qimport -r 1:tip $ hg up -q 0 $ for i in r1 r3 r7 r8; > do > echo $i > $i > hg ci -Am branch2-$i > done adding r1 created new head adding r3 adding r7 adding r8 $ echo somethingelse > r4 $ hg ci -Am branch2-r4 adding r4 $ echo r6 > r6 $ hg ci -Am branch2-r6 adding r6 $ hg up -q qtip $ HGMERGE=internal:fail hg rebase rebasing 1:b4bffa6e4776 "r1" (qbase r1) note: rebase of 1:b4bffa6e4776 created no changes to commit rebasing 2:c0fd129beb01 "r2" (r2) rebasing 3:6ff5b8feed8e "r3" (r3) note: rebase of 3:6ff5b8feed8e created no changes to commit rebasing 4:094320fec554 "r4" (r4) unresolved conflicts (see hg resolve, then hg rebase --continue) [1] $ HGMERGE=internal:local hg resolve --all (no more unresolved files) continue: hg rebase --continue $ hg rebase --continue already rebased 1:b4bffa6e4776 "r1" (qbase r1) as 057f55ff8f44 already rebased 2:c0fd129beb01 "r2" (r2) as 1660ab13ce9a already rebased 3:6ff5b8feed8e "r3" (r3) as 1660ab13ce9a rebasing 4:094320fec554 "r4" (r4) note: rebase of 4:094320fec554 created no changes to commit rebasing 5:681a378595ba "r5" (r5) rebasing 6:512a1f24768b "r6" (qtip r6) note: rebase of 6:512a1f24768b created no changes to commit saved backup bundle to $TESTTMP/b/.hg/strip-backup/b4bffa6e4776-b9bfb84d-backup.hg (glob) $ hg tglog @ 8: 'r5' tags: qtip r5 tip | o 7: 'r2' tags: qbase r2 | o 6: 'branch2-r6' tags: qparent | o 5: 'branch2-r4' tags: | o 4: 'branch2-r8' tags: | o 3: 'branch2-r7' tags: | o 2: 'branch2-r3' tags: | o 1: 'branch2-r1' tags: | o 0: 'r0' tags: $ cd .. mercurial-3.7.3/tests/test-module-imports.t0000644000175000017500000001157212676531525020421 0ustar mpmmpm00000000000000#require test-repo $ import_checker="$TESTDIR"/../contrib/import-checker.py Run the doctests from the import checker, and make sure it's working correctly. $ TERM=dumb $ export TERM $ python -m doctest $import_checker Run additional tests for the import checker $ mkdir testpackage $ cat > testpackage/multiple.py << EOF > from __future__ import absolute_import > import os, sys > EOF $ cat > testpackage/unsorted.py << EOF > from __future__ import absolute_import > import sys > import os > EOF $ cat > testpackage/stdafterlocal.py << EOF > from __future__ import absolute_import > from . import unsorted > import os > EOF $ cat > testpackage/requirerelative.py << EOF > from __future__ import absolute_import > import testpackage.unsorted > EOF $ cat > testpackage/importalias.py << EOF > from __future__ import absolute_import > import ui > EOF $ cat > testpackage/relativestdlib.py << EOF > from __future__ import absolute_import > from .. import os > EOF $ cat > testpackage/symbolimport.py << EOF > from __future__ import absolute_import > from .unsorted import foo > EOF $ cat > testpackage/latesymbolimport.py << EOF > from __future__ import absolute_import > from . import unsorted > from mercurial.node import hex > EOF $ cat > testpackage/multiplegroups.py << EOF > from __future__ import absolute_import > from . import unsorted > from . import more > EOF $ mkdir testpackage/subpackage $ cat > testpackage/subpackage/levelpriority.py << EOF > from __future__ import absolute_import > from . import foo > from .. import parent > EOF $ touch testpackage/subpackage/foo.py $ cat > testpackage/subpackage/__init__.py << EOF > from __future__ import absolute_import > from . import levelpriority # should not cause cycle > EOF $ cat > testpackage/subpackage/localimport.py << EOF > from __future__ import absolute_import > from . import foo > def bar(): > # should not cause "higher-level import should come first" > from .. import unsorted > # but other errors should be detected > from .. import more > import testpackage.subpackage.levelpriority > EOF $ cat > testpackage/importmodulefromsub.py << EOF > from __future__ import absolute_import > from .subpackage import foo # not a "direct symbol import" > EOF $ cat > testpackage/importsymbolfromsub.py << EOF > from __future__ import absolute_import > from .subpackage import foo, nonmodule > EOF $ cat > testpackage/sortedentries.py << EOF > from __future__ import absolute_import > from . import ( > foo, > bar, > ) > EOF $ cat > testpackage/importfromalias.py << EOF > from __future__ import absolute_import > from . import ui > EOF $ cat > testpackage/importfromrelative.py << EOF > from __future__ import absolute_import > from testpackage.unsorted import foo > EOF $ python "$import_checker" testpackage/*.py testpackage/subpackage/*.py testpackage/importalias.py:2: ui module must be "as" aliased to uimod testpackage/importfromalias.py:2: ui from testpackage must be "as" aliased to uimod testpackage/importfromrelative.py:2: import should be relative: testpackage.unsorted testpackage/importfromrelative.py:2: direct symbol import foo from testpackage.unsorted testpackage/importsymbolfromsub.py:2: direct symbol import nonmodule from testpackage.subpackage testpackage/latesymbolimport.py:3: symbol import follows non-symbol import: mercurial.node testpackage/multiple.py:2: multiple imported names: os, sys testpackage/multiplegroups.py:3: multiple "from . import" statements testpackage/relativestdlib.py:2: relative import of stdlib module testpackage/requirerelative.py:2: import should be relative: testpackage.unsorted testpackage/sortedentries.py:2: imports from testpackage not lexically sorted: bar < foo testpackage/stdafterlocal.py:3: stdlib import follows local import: os testpackage/subpackage/levelpriority.py:3: higher-level import should come first: testpackage testpackage/subpackage/localimport.py:7: multiple "from .. import" statements testpackage/subpackage/localimport.py:8: import should be relative: testpackage.subpackage.levelpriority testpackage/symbolimport.py:2: direct symbol import foo from testpackage.unsorted testpackage/unsorted.py:3: imports not lexically sorted: os < sys [1] $ cd "$TESTDIR"/.. There are a handful of cases here that require renaming a module so it doesn't overlap with a stdlib module name. There are also some cycles here that we should still endeavor to fix, and some cycles will be hidden by deduplication algorithm in the cycle detector, so fixing these may expose other cycles. $ hg locate 'mercurial/**.py' 'hgext/**.py' | sed 's-\\-/-g' | python "$import_checker" - Import cycle: hgext.largefiles.basestore -> hgext.largefiles.localstore -> hgext.largefiles.basestore [1] mercurial-3.7.3/tests/test-bookmarks-merge.t0000644000175000017500000001013312676531525020516 0ustar mpmmpm00000000000000# init $ hg init $ echo a > a $ hg add a $ hg commit -m'a' $ echo b > b $ hg add b $ hg commit -m'b' $ hg up -C 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo c > c $ hg add c $ hg commit -m'c' created new head # test merging of diverged bookmarks $ hg bookmark -r 1 "c@diverge" $ hg bookmark -r 1 b $ hg bookmark c $ hg bookmarks b 1:d2ae7f538514 * c 2:d36c0562f908 c@diverge 1:d2ae7f538514 $ hg merge "c@diverge" 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m'merge' $ hg bookmarks b 1:d2ae7f538514 * c 3:b8f96cf4688b $ hg up -C 3 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (leaving bookmark c) $ echo d > d $ hg add d $ hg commit -m'd' $ hg up -C 3 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo e > e $ hg add e $ hg commit -m'e' created new head $ hg up -C 5 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bookmark e $ hg bookmarks b 1:d2ae7f538514 c 3:b8f96cf4688b * e 5:26bee9c5bcf3 # the picked side is bookmarked $ hg up -C 4 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (leaving bookmark e) $ hg merge abort: heads are bookmarked - please merge with an explicit rev (run 'hg heads' to see all heads) [255] # our revision is bookmarked $ hg up -C e 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (activating bookmark e) $ hg merge abort: no matching bookmark to merge - please merge with an explicit rev or bookmark (run 'hg heads' to see all heads) [255] # merge bookmark heads $ hg up -C 4 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (leaving bookmark e) $ echo f > f $ hg commit -Am "f" adding f $ hg bookmarks -r 4 "e@diverged" $ hg up -q -C "e@diverged" $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg parents changeset: 4:a0546fcfe0fb bookmark: e@diverged user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: d changeset: 5:26bee9c5bcf3 bookmark: e parent: 3:b8f96cf4688b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: e $ hg up -C e 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (activating bookmark e) $ hg bookmarks b 1:d2ae7f538514 c 3:b8f96cf4688b * e 5:26bee9c5bcf3 e@diverged 4:a0546fcfe0fb $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m'merge' $ hg bookmarks b 1:d2ae7f538514 c 3:b8f96cf4688b * e 7:ca784329f0ba # test warning when all heads are inactive bookmarks $ hg up -C 6 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (leaving bookmark e) $ echo g > g $ hg commit -Am 'g' adding g $ hg bookmark -i g $ hg bookmarks b 1:d2ae7f538514 c 3:b8f96cf4688b e 7:ca784329f0ba g 8:04dd21731d95 $ hg heads changeset: 8:04dd21731d95 bookmark: g tag: tip parent: 6:be381d1126a0 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: g changeset: 7:ca784329f0ba bookmark: e parent: 5:26bee9c5bcf3 parent: 4:a0546fcfe0fb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge $ hg merge abort: heads are bookmarked - please merge with an explicit rev (run 'hg heads' to see all heads) [255] mercurial-3.7.3/tests/test-dirstate.t0000644000175000017500000000353312676531525017256 0ustar mpmmpm00000000000000------ Test dirstate._dirs refcounting $ hg init t $ cd t $ mkdir -p a/b/c/d $ touch a/b/c/d/x $ touch a/b/c/d/y $ touch a/b/c/d/z $ hg ci -Am m adding a/b/c/d/x adding a/b/c/d/y adding a/b/c/d/z $ hg mv a z moving a/b/c/d/x to z/b/c/d/x (glob) moving a/b/c/d/y to z/b/c/d/y (glob) moving a/b/c/d/z to z/b/c/d/z (glob) Test name collisions $ rm z/b/c/d/x $ mkdir z/b/c/d/x $ touch z/b/c/d/x/y $ hg add z/b/c/d/x/y abort: file 'z/b/c/d/x' in dirstate clashes with 'z/b/c/d/x/y' [255] $ rm -rf z/b/c/d $ touch z/b/c/d $ hg add z/b/c/d abort: directory 'z/b/c/d' already in dirstate [255] $ cd .. Issue1790: dirstate entry locked into unset if file mtime is set into the future Prepare test repo: $ hg init u $ cd u $ echo a > a $ hg add adding a $ hg ci -m1 Set mtime of a into the future: $ touch -t 202101011200 a Status must not set a's entry to unset (issue1790): $ hg status $ hg debugstate n 644 2 2021-01-01 12:00:00 a Test modulo storage/comparison of absurd dates: #if no-aix $ touch -t 195001011200 a $ hg st $ hg debugstate n 644 2 2018-01-19 15:14:08 a #endif Verify that exceptions during a dirstate change leave the dirstate coherent (issue4353) $ cat > ../dirstateexception.py < from mercurial import merge, extensions, error > > def wraprecordupdates(orig, repo, actions, branchmerge): > raise error.Abort("simulated error while recording dirstateupdates") > > def reposetup(ui, repo): > extensions.wrapfunction(merge, 'recordupdates', wraprecordupdates) > EOF $ hg rm a $ hg commit -m 'rm a' $ echo "[extensions]" >> .hg/hgrc $ echo "dirstateex=../dirstateexception.py" >> .hg/hgrc $ hg up 0 abort: simulated error while recording dirstateupdates [255] $ hg log -r . -T '{rev}\n' 1 $ hg status ? a mercurial-3.7.3/tests/test-tags.t0000644000175000017500000005216312676531525016400 0ustar mpmmpm00000000000000setup $ cat >> $HGRCPATH << EOF > [extensions] > blackbox= > mock=$TESTDIR/mockblackbox.py > EOF Helper functions: $ cacheexists() { > [ -f .hg/cache/tags2-visible ] && echo "tag cache exists" || echo "no tag cache" > } $ fnodescacheexists() { > [ -f .hg/cache/hgtagsfnodes1 ] && echo "fnodes cache exists" || echo "no fnodes cache" > } $ dumptags() { > rev=$1 > echo "rev $rev: .hgtags:" > hg cat -r$rev .hgtags > } # XXX need to test that the tag cache works when we strip an old head # and add a new one rooted off non-tip: i.e. node and rev of tip are the # same, but stuff has changed behind tip. Setup: $ hg init t $ cd t $ cacheexists no tag cache $ fnodescacheexists no fnodes cache $ hg id 000000000000 tip $ cacheexists no tag cache $ fnodescacheexists no fnodes cache $ echo a > a $ hg add a $ hg commit -m "test" $ hg co 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg identify acb14030fe0a tip $ hg identify -r 'wdir()' acb14030fe0a tip $ cacheexists tag cache exists No fnodes cache because .hgtags file doesn't exist (this is an implementation detail) $ fnodescacheexists no fnodes cache Try corrupting the cache $ printf 'a b' > .hg/cache/tags2-visible $ hg identify acb14030fe0a tip $ cacheexists tag cache exists $ fnodescacheexists no fnodes cache $ hg identify acb14030fe0a tip Create local tag with long name: $ T=`hg identify --debug --id` $ hg tag -l "This is a local tag with a really long name!" $ hg tags tip 0:acb14030fe0a This is a local tag with a really long name! 0:acb14030fe0a $ rm .hg/localtags Create a tag behind hg's back: $ echo "$T first" > .hgtags $ cat .hgtags acb14030fe0a21b60322c440ad2d20cf7685a376 first $ hg add .hgtags $ hg commit -m "add tags" $ hg tags tip 1:b9154636be93 first 0:acb14030fe0a $ hg identify b9154636be93 tip We should have a fnodes cache now that we have a real tag The cache should have an empty entry for rev 0 and a valid entry for rev 1. $ fnodescacheexists fnodes cache exists $ f --size --hexdump .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1: size=48 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...| 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y| Repeat with cold tag cache: $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1 $ hg identify b9154636be93 tip $ fnodescacheexists fnodes cache exists $ f --size --hexdump .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1: size=48 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...| 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y| And again, but now unable to write tag cache or lock file: #if unix-permissions $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1 $ chmod 555 .hg/cache $ hg identify b9154636be93 tip $ chmod 755 .hg/cache $ chmod 555 .hg $ hg identify b9154636be93 tip $ chmod 755 .hg #endif Tag cache debug info written to blackbox log $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1 $ hg identify b9154636be93 tip $ hg blackbox -l 5 1970/01/01 00:00:00 bob (*)> identify (glob) 1970/01/01 00:00:00 bob (*)> writing 48 bytes to cache/hgtagsfnodes1 (glob) 1970/01/01 00:00:00 bob (*)> 0/1 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) 1970/01/01 00:00:00 bob (*)> identify exited 0 after ?.?? seconds (glob) Failure to acquire lock results in no write $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1 $ echo 'foo:1' > .hg/wlock $ hg identify b9154636be93 tip $ hg blackbox -l 5 1970/01/01 00:00:00 bob (*)> identify (glob) 1970/01/01 00:00:00 bob (*)> not writing .hg/cache/hgtagsfnodes1 because lock cannot be acquired (glob) 1970/01/01 00:00:00 bob (*)> 0/1 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) 1970/01/01 00:00:00 bob (*)> identify exited 0 after * seconds (glob) $ fnodescacheexists no fnodes cache $ rm .hg/wlock $ rm -f .hg/cache/tags2-visible .hg/cache/hgtagsfnodes1 $ hg identify b9154636be93 tip Create a branch: $ echo bb > a $ hg status M a $ hg identify b9154636be93+ tip $ hg co first 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg id acb14030fe0a+ first $ hg id -r 'wdir()' acb14030fe0a+ first $ hg -v id acb14030fe0a+ first $ hg status M a $ echo 1 > b $ hg add b $ hg commit -m "branch" created new head Creating a new commit shouldn't append the .hgtags fnodes cache until tags info is accessed $ f --size --hexdump .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1: size=48 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...| 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y| $ hg id c8edf04160c7 tip First 4 bytes of record 3 are changeset fragment $ f --size --hexdump .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1: size=72 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0010: ff ff ff ff ff ff ff ff b9 15 46 36 26 b7 b4 a7 |..........F6&...| 0020: 73 e0 9e e3 c5 2f 51 0e 19 e0 5e 1f f9 66 d8 59 |s..../Q...^..f.Y| 0030: c8 ed f0 41 00 00 00 00 00 00 00 00 00 00 00 00 |...A............| 0040: 00 00 00 00 00 00 00 00 |........| Merge the two heads: $ hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg id c8edf04160c7+b9154636be93+ tip $ hg status M .hgtags $ hg commit -m "merge" Create a fake head, make sure tag not visible afterwards: $ cp .hgtags tags $ hg tag last $ hg rm .hgtags $ hg commit -m "remove" $ mv tags .hgtags $ hg add .hgtags $ hg commit -m "readd" $ $ hg tags tip 6:35ff301afafe first 0:acb14030fe0a Add invalid tags: $ echo "spam" >> .hgtags $ echo >> .hgtags $ echo "foo bar" >> .hgtags $ echo "a5a5 invalid" >> .hg/localtags $ cat .hgtags acb14030fe0a21b60322c440ad2d20cf7685a376 first spam foo bar $ hg commit -m "tags" Report tag parse error on other head: $ hg up 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 'x y' >> .hgtags $ hg commit -m "head" created new head $ hg tags .hgtags@75d9f02dfe28, line 2: cannot parse entry .hgtags@75d9f02dfe28, line 4: node 'foo' is not well formed .hgtags@c4be69a18c11, line 2: node 'x' is not well formed tip 8:c4be69a18c11 first 0:acb14030fe0a $ hg tip changeset: 8:c4be69a18c11 tag: tip parent: 3:ac5e980c4dc0 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: head Test tag precedence rules: $ cd .. $ hg init t2 $ cd t2 $ echo foo > foo $ hg add foo $ hg ci -m 'add foo' # rev 0 $ hg tag bar # rev 1 $ echo >> foo $ hg ci -m 'change foo 1' # rev 2 $ hg up -C 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg tag -r 1 -f bar # rev 3 $ hg up -C 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo >> foo $ hg ci -m 'change foo 2' # rev 4 created new head $ hg tags tip 4:0c192d7d5e6b bar 1:78391a272241 Repeat in case of cache effects: $ hg tags tip 4:0c192d7d5e6b bar 1:78391a272241 Detailed dump of tag info: $ hg heads -q # expect 4, 3, 2 4:0c192d7d5e6b 3:6fa450212aeb 2:7a94127795a3 $ dumptags 2 rev 2: .hgtags: bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar $ dumptags 3 rev 3: .hgtags: bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar 78391a272241d70354aa14c874552cad6b51bb42 bar $ dumptags 4 rev 4: .hgtags: bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar Dump cache: $ cat .hg/cache/tags2-visible 4 0c192d7d5e6b78a714de54a2e9627952a877e25a bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar 78391a272241d70354aa14c874552cad6b51bb42 bar $ f --size --hexdump .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1: size=120 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0030: 7a 94 12 77 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |z..w.....1....B(| 0040: 78 ee 5a 2d ad bc 94 3d 6f a4 50 21 7d 3b 71 8c |x.Z-...=o.P!};q.| 0050: 96 4e f3 7b 89 e5 50 eb da fd 57 89 e7 6c e1 b0 |.N.{..P...W..l..| 0060: 0c 19 2d 7d 0c 04 f2 a8 af 31 de 17 fa b7 42 28 |..-}.....1....B(| 0070: 78 ee 5a 2d ad bc 94 3d |x.Z-...=| Corrupt the .hgtags fnodes cache Extra junk data at the end should get overwritten on next cache update $ echo extra >> .hg/cache/hgtagsfnodes1 $ echo dummy1 > foo $ hg commit -m throwaway1 $ hg tags tip 5:8dbfe60eff30 bar 1:78391a272241 $ hg blackbox -l 5 1970/01/01 00:00:00 bob (*)> tags (glob) 1970/01/01 00:00:00 bob (*)> writing 24 bytes to cache/hgtagsfnodes1 (glob) 1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) #if unix-permissions no-root Errors writing to .hgtags fnodes cache are silently ignored $ echo dummy2 > foo $ hg commit -m throwaway2 $ chmod a-w .hg/cache/hgtagsfnodes1 $ rm -f .hg/cache/tags2-visible $ hg tags tip 6:b968051b5cf3 bar 1:78391a272241 $ hg blackbox -l 5 1970/01/01 00:00:00 bob (*)> tags (glob) 1970/01/01 00:00:00 bob (*)> couldn't write cache/hgtagsfnodes1: [Errno 13] Permission denied: '$TESTTMP/t2/.hg/cache/hgtagsfnodes1' (glob) 1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) $ chmod a+w .hg/cache/hgtagsfnodes1 $ rm -f .hg/cache/tags2-visible $ hg tags tip 6:b968051b5cf3 bar 1:78391a272241 $ hg blackbox -l 5 1970/01/01 00:00:00 bob (*)> tags (glob) 1970/01/01 00:00:00 bob (*)> writing 24 bytes to cache/hgtagsfnodes1 (glob) 1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) $ f --size .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1: size=168 $ hg -q --config extensions.strip= strip -r 6 --no-backup #endif Stripping doesn't truncate the tags cache until new data is available $ rm -f .hg/cache/hgtagsfnodes1 .hg/cache/tags2-visible $ hg tags tip 5:8dbfe60eff30 bar 1:78391a272241 $ f --size .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1: size=144 $ hg -q --config extensions.strip= strip -r 5 --no-backup $ hg tags tip 4:0c192d7d5e6b bar 1:78391a272241 $ hg blackbox -l 4 1970/01/01 00:00:00 bob (*)> writing 24 bytes to cache/hgtagsfnodes1 (glob) 1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) $ f --size .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1: size=120 $ echo dummy > foo $ hg commit -m throwaway3 $ hg tags tip 5:035f65efb448 bar 1:78391a272241 $ hg blackbox -l 5 1970/01/01 00:00:00 bob (*)> tags (glob) 1970/01/01 00:00:00 bob (*)> writing 24 bytes to cache/hgtagsfnodes1 (glob) 1970/01/01 00:00:00 bob (*)> 2/3 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) $ f --size .hg/cache/hgtagsfnodes1 .hg/cache/hgtagsfnodes1: size=144 $ hg -q --config extensions.strip= strip -r 5 --no-backup Test tag removal: $ hg tag --remove bar # rev 5 $ hg tip -vp changeset: 5:5f6e8655b1c7 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: .hgtags description: Removed tag bar diff -r 0c192d7d5e6b -r 5f6e8655b1c7 .hgtags --- a/.hgtags Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,3 @@ bbd179dfa0a71671c253b3ae0aa1513b60d199fa bar +78391a272241d70354aa14c874552cad6b51bb42 bar +0000000000000000000000000000000000000000 bar $ hg tags tip 5:5f6e8655b1c7 $ hg tags # again, try to expose cache bugs tip 5:5f6e8655b1c7 Remove nonexistent tag: $ hg tag --remove foobar abort: tag 'foobar' does not exist [255] $ hg tip changeset: 5:5f6e8655b1c7 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Removed tag bar Undo a tag with rollback: $ hg rollback # destroy rev 5 (restore bar) repository tip rolled back to revision 4 (undo commit) working directory now based on revision 4 $ hg tags tip 4:0c192d7d5e6b bar 1:78391a272241 $ hg tags tip 4:0c192d7d5e6b bar 1:78391a272241 Test tag rank: $ cd .. $ hg init t3 $ cd t3 $ echo foo > foo $ hg add foo $ hg ci -m 'add foo' # rev 0 $ hg tag -f bar # rev 1 bar -> 0 $ hg tag -f bar # rev 2 bar -> 1 $ hg tag -fr 0 bar # rev 3 bar -> 0 $ hg tag -fr 1 bar # rev 4 bar -> 1 $ hg tag -fr 0 bar # rev 5 bar -> 0 $ hg tags tip 5:85f05169d91d bar 0:bbd179dfa0a7 $ hg co 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo barbar > foo $ hg ci -m 'change foo' # rev 6 created new head $ hg tags tip 6:735c3ca72986 bar 0:bbd179dfa0a7 Don't allow moving tag without -f: $ hg tag -r 3 bar abort: tag 'bar' already exists (use -f to force) [255] $ hg tags tip 6:735c3ca72986 bar 0:bbd179dfa0a7 Strip 1: expose an old head: $ hg --config extensions.mq= strip 5 saved backup bundle to $TESTTMP/t3/.hg/strip-backup/*-backup.hg (glob) $ hg tags # partly stale cache tip 5:735c3ca72986 bar 1:78391a272241 $ hg tags # up-to-date cache tip 5:735c3ca72986 bar 1:78391a272241 Strip 2: destroy whole branch, no old head exposed $ hg --config extensions.mq= strip 4 saved backup bundle to $TESTTMP/t3/.hg/strip-backup/*-backup.hg (glob) $ hg tags # partly stale tip 4:735c3ca72986 bar 0:bbd179dfa0a7 $ rm -f .hg/cache/tags2-visible $ hg tags # cold cache tip 4:735c3ca72986 bar 0:bbd179dfa0a7 Test tag rank with 3 heads: $ cd .. $ hg init t4 $ cd t4 $ echo foo > foo $ hg add adding foo $ hg ci -m 'add foo' # rev 0 $ hg tag bar # rev 1 bar -> 0 $ hg tag -f bar # rev 2 bar -> 1 $ hg up -qC 0 $ hg tag -fr 2 bar # rev 3 bar -> 2 $ hg tags tip 3:197c21bbbf2c bar 2:6fa450212aeb $ hg up -qC 0 $ hg tag -m 'retag rev 0' -fr 0 bar # rev 4 bar -> 0, but bar stays at 2 Bar should still point to rev 2: $ hg tags tip 4:3b4b14ed0202 bar 2:6fa450212aeb Test that removing global/local tags does not get confused when trying to remove a tag of type X which actually only exists as a type Y: $ cd .. $ hg init t5 $ cd t5 $ echo foo > foo $ hg add adding foo $ hg ci -m 'add foo' # rev 0 $ hg tag -r 0 -l localtag $ hg tag --remove localtag abort: tag 'localtag' is not a global tag [255] $ $ hg tag -r 0 globaltag $ hg tag --remove -l globaltag abort: tag 'globaltag' is not a local tag [255] $ hg tags -v tip 1:a0b6fe111088 localtag 0:bbd179dfa0a7 local globaltag 0:bbd179dfa0a7 Test for issue3911 $ hg tag -r 0 -l localtag2 $ hg tag -l --remove localtag2 $ hg tags -v tip 1:a0b6fe111088 localtag 0:bbd179dfa0a7 local globaltag 0:bbd179dfa0a7 $ hg tag -r 1 -f localtag $ hg tags -v tip 2:5c70a037bb37 localtag 1:a0b6fe111088 globaltag 0:bbd179dfa0a7 $ hg tags -v tip 2:5c70a037bb37 localtag 1:a0b6fe111088 globaltag 0:bbd179dfa0a7 $ hg tag -r 1 localtag2 $ hg tags -v tip 3:bbfb8cd42be2 localtag2 1:a0b6fe111088 localtag 1:a0b6fe111088 globaltag 0:bbd179dfa0a7 $ hg tags -v tip 3:bbfb8cd42be2 localtag2 1:a0b6fe111088 localtag 1:a0b6fe111088 globaltag 0:bbd179dfa0a7 $ cd .. Create a repository with tags data to test .hgtags fnodes transfer $ hg init tagsserver $ cd tagsserver $ cat > .hg/hgrc << EOF > [experimental] > bundle2-exp=True > EOF $ touch foo $ hg -q commit -A -m initial $ hg tag -m 'tag 0.1' 0.1 $ echo second > foo $ hg commit -m second $ hg tag -m 'tag 0.2' 0.2 $ hg tags tip 3:40f0358cb314 0.2 2:f63cc8fe54e4 0.1 0:96ee1d7354c4 $ cd .. Cloning should pull down hgtags fnodes mappings and write the cache file $ hg --config experimental.bundle2-exp=True clone --pull tagsserver tagsclient requesting all changes adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved Missing tags2* files means the cache wasn't written through the normal mechanism. $ ls tagsclient/.hg/cache branch2-served hgtagsfnodes1 rbc-names-v1 rbc-revs-v1 Cache should contain the head only, even though other nodes have tags data $ f --size --hexdump tagsclient/.hg/cache/hgtagsfnodes1 tagsclient/.hg/cache/hgtagsfnodes1: size=96 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0030: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0040: ff ff ff ff ff ff ff ff 40 f0 35 8c 19 e0 a7 d3 |........@.5.....| 0050: 8a 5c 6a 82 4d cf fb a5 87 d0 2f a3 1e 4f 2f 8a |.\j.M...../..O/.| Running hg tags should produce tags2* file and not change cache $ hg -R tagsclient tags tip 3:40f0358cb314 0.2 2:f63cc8fe54e4 0.1 0:96ee1d7354c4 $ ls tagsclient/.hg/cache branch2-served hgtagsfnodes1 rbc-names-v1 rbc-revs-v1 tags2-visible $ f --size --hexdump tagsclient/.hg/cache/hgtagsfnodes1 tagsclient/.hg/cache/hgtagsfnodes1: size=96 0000: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0010: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0020: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0030: ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff ff |................| 0040: ff ff ff ff ff ff ff ff 40 f0 35 8c 19 e0 a7 d3 |........@.5.....| 0050: 8a 5c 6a 82 4d cf fb a5 87 d0 2f a3 1e 4f 2f 8a |.\j.M...../..O/.| mercurial-3.7.3/tests/tinyproxy.py0000755000175000017500000001223512676531525016736 0ustar mpmmpm00000000000000#!/usr/bin/env python from __future__ import absolute_import __doc__ = """Tiny HTTP Proxy. This module implements GET, HEAD, POST, PUT and DELETE methods on BaseHTTPServer, and behaves as an HTTP proxy. The CONNECT method is also implemented experimentally, but has not been tested yet. Any help will be greatly appreciated. SUZUKI Hisao """ __version__ = "0.2.1" import BaseHTTPServer import os import select import socket import SocketServer import urlparse class ProxyHandler (BaseHTTPServer.BaseHTTPRequestHandler): __base = BaseHTTPServer.BaseHTTPRequestHandler __base_handle = __base.handle server_version = "TinyHTTPProxy/" + __version__ rbufsize = 0 # self.rfile Be unbuffered def handle(self): (ip, port) = self.client_address allowed = getattr(self, 'allowed_clients', None) if allowed is not None and ip not in allowed: self.raw_requestline = self.rfile.readline() if self.parse_request(): self.send_error(403) else: self.__base_handle() def log_request(self, code='-', size='-'): xheaders = [h for h in self.headers.items() if h[0].startswith('x-')] self.log_message('"%s" %s %s%s', self.requestline, str(code), str(size), ''.join([' %s:%s' % h for h in sorted(xheaders)])) def _connect_to(self, netloc, soc): i = netloc.find(':') if i >= 0: host_port = netloc[:i], int(netloc[i + 1:]) else: host_port = netloc, 80 print "\t" "connect to %s:%d" % host_port try: soc.connect(host_port) except socket.error as arg: try: msg = arg[1] except (IndexError, TypeError): msg = arg self.send_error(404, msg) return 0 return 1 def do_CONNECT(self): soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: if self._connect_to(self.path, soc): self.log_request(200) self.wfile.write(self.protocol_version + " 200 Connection established\r\n") self.wfile.write("Proxy-agent: %s\r\n" % self.version_string()) self.wfile.write("\r\n") self._read_write(soc, 300) finally: print "\t" "bye" soc.close() self.connection.close() def do_GET(self): (scm, netloc, path, params, query, fragment) = urlparse.urlparse( self.path, 'http') if scm != 'http' or fragment or not netloc: self.send_error(400, "bad url %s" % self.path) return soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: if self._connect_to(netloc, soc): self.log_request() soc.send("%s %s %s\r\n" % ( self.command, urlparse.urlunparse(('', '', path, params, query, '')), self.request_version)) self.headers['Connection'] = 'close' del self.headers['Proxy-Connection'] for key_val in self.headers.items(): soc.send("%s: %s\r\n" % key_val) soc.send("\r\n") self._read_write(soc) finally: print "\t" "bye" soc.close() self.connection.close() def _read_write(self, soc, max_idling=20): iw = [self.connection, soc] ow = [] count = 0 while True: count += 1 (ins, _, exs) = select.select(iw, ow, iw, 3) if exs: break if ins: for i in ins: if i is soc: out = self.connection else: out = soc try: data = i.recv(8192) except socket.error: break if data: out.send(data) count = 0 else: print "\t" "idle", count if count == max_idling: break do_HEAD = do_GET do_POST = do_GET do_PUT = do_GET do_DELETE = do_GET class ThreadingHTTPServer (SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer): def __init__(self, *args, **kwargs): BaseHTTPServer.HTTPServer.__init__(self, *args, **kwargs) a = open("proxy.pid", "w") a.write(str(os.getpid()) + "\n") a.close() if __name__ == '__main__': from sys import argv if argv[1:] and argv[1] in ('-h', '--help'): print argv[0], "[port [allowed_client_name ...]]" else: if argv[2:]: allowed = [] for name in argv[2:]: client = socket.gethostbyname(name) allowed.append(client) print "Accept: %s (%s)" % (client, name) ProxyHandler.allowed_clients = allowed del argv[2:] else: print "Any clients will be served..." BaseHTTPServer.test(ProxyHandler, ThreadingHTTPServer) mercurial-3.7.3/tests/test-add.t0000644000175000017500000001221612676531525016165 0ustar mpmmpm00000000000000 $ hg init a $ cd a $ echo a > a $ hg add -n adding a $ hg st ? a $ hg add adding a $ hg st A a $ hg forget a $ hg add adding a $ hg st A a $ echo b > b $ hg add -n b $ hg st A a ? b $ hg add b $ hg st A a A b should fail $ hg add b b already tracked! $ hg st A a A b #if no-windows $ echo foo > con.xml $ hg --config ui.portablefilenames=jump add con.xml abort: ui.portablefilenames value is invalid ('jump') [255] $ hg --config ui.portablefilenames=abort add con.xml abort: filename contains 'con', which is reserved on Windows: 'con.xml' [255] $ hg st A a A b ? con.xml $ hg add con.xml warning: filename contains 'con', which is reserved on Windows: 'con.xml' $ hg st A a A b A con.xml $ hg forget con.xml $ rm con.xml #endif #if eol-in-paths $ echo bla > 'hello:world' $ hg --config ui.portablefilenames=abort add adding hello:world abort: filename contains ':', which is reserved on Windows: 'hello:world' [255] $ hg st A a A b ? hello:world $ hg --config ui.portablefilenames=ignore add adding hello:world $ hg st A a A b A hello:world #endif $ hg ci -m 0 --traceback $ hg log -r "heads(. or wdir() & file('**'))" changeset: 0:* (glob) tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 should fail $ hg add a a already tracked! $ echo aa > a $ hg ci -m 1 $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo aaa > a $ hg ci -m 2 created new head $ hg merge merging a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg st M a ? a.orig wdir doesn't cause a crash, and can be dynamically selected if dirty $ hg log -r "heads(. or wdir() & file('**'))" changeset: 2147483647:ffffffffffff parent: 2:* (glob) parent: 1:* (glob) user: test date: * (glob) should fail $ hg add a a already tracked! $ hg st M a ? a.orig $ hg resolve -m a (no more unresolved files) $ hg ci -m merge Issue683: peculiarity with hg revert of an removed then added file $ hg forget a $ hg add a $ hg st ? a.orig $ hg rm a $ hg st R a ? a.orig $ echo a > a $ hg add a $ hg st M a ? a.orig Forgotten file can be added back (as either clean or modified) $ hg forget b $ hg add b $ hg st -A b C b $ hg forget b $ echo modified > b $ hg add b $ hg st -A b M b $ hg revert -qC b $ hg add c && echo "unexpected addition of missing file" c: * (glob) [1] $ echo c > c $ hg add d c && echo "unexpected addition of missing file" d: * (glob) [1] $ hg st M a A c ? a.orig $ hg up -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved forget and get should have the right order: added but missing dir should be forgotten before file with same name is added $ echo file d > d $ hg add d $ hg ci -md $ hg rm d $ mkdir d $ echo a > d/a $ hg add d/a $ rm -r d $ hg up -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat d file d Test that adding a directory doesn't require case matching (issue4578) #if icasefs $ mkdir -p CapsDir1/CapsDir $ echo abc > CapsDir1/CapsDir/AbC.txt $ mkdir CapsDir1/CapsDir/SubDir $ echo def > CapsDir1/CapsDir/SubDir/Def.txt $ hg add capsdir1/capsdir adding CapsDir1/CapsDir/AbC.txt (glob) adding CapsDir1/CapsDir/SubDir/Def.txt (glob) $ hg forget capsdir1/capsdir/abc.txt removing CapsDir1/CapsDir/AbC.txt (glob) $ hg forget capsdir1/capsdir removing CapsDir1/CapsDir/SubDir/Def.txt (glob) $ hg add capsdir1 adding CapsDir1/CapsDir/AbC.txt (glob) adding CapsDir1/CapsDir/SubDir/Def.txt (glob) $ hg ci -m "AbCDef" capsdir1/capsdir $ hg status -A capsdir1/capsdir C CapsDir1/CapsDir/AbC.txt C CapsDir1/CapsDir/SubDir/Def.txt $ hg files capsdir1/capsdir CapsDir1/CapsDir/AbC.txt (glob) CapsDir1/CapsDir/SubDir/Def.txt (glob) $ echo xyz > CapsDir1/CapsDir/SubDir/Def.txt $ hg ci -m xyz capsdir1/capsdir/subdir/def.txt $ hg revert -r '.^' capsdir1/capsdir reverting CapsDir1/CapsDir/SubDir/Def.txt (glob) The conditional tests above mean the hash on the diff line differs on Windows and OS X $ hg diff capsdir1/capsdir diff -r * CapsDir1/CapsDir/SubDir/Def.txt (glob) --- a/CapsDir1/CapsDir/SubDir/Def.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/CapsDir1/CapsDir/SubDir/Def.txt * +0000 (glob) @@ -1,1 +1,1 @@ -xyz +def $ hg mv CapsDir1/CapsDir/abc.txt CapsDir1/CapsDir/ABC.txt moving CapsDir1/CapsDir/AbC.txt to CapsDir1/CapsDir/ABC.txt (glob) $ hg ci -m "case changing rename" CapsDir1/CapsDir/AbC.txt CapsDir1/CapsDir/ABC.txt $ hg status -A capsdir1/capsdir M CapsDir1/CapsDir/SubDir/Def.txt C CapsDir1/CapsDir/ABC.txt $ hg remove -f 'glob:**.txt' -X capsdir1/capsdir $ hg remove -f 'glob:**.txt' -I capsdir1/capsdir removing CapsDir1/CapsDir/ABC.txt (glob) removing CapsDir1/CapsDir/SubDir/Def.txt (glob) #endif $ cd .. mercurial-3.7.3/tests/test-pathencode.py0000644000175000017500000001432512676531525017737 0ustar mpmmpm00000000000000# This is a randomized test that generates different pathnames every # time it is invoked, and tests the encoding of those pathnames. # # It uses a simple probabilistic model to generate valid pathnames # that have proven likely to expose bugs and divergent behavior in # different encoding implementations. from mercurial import store import binascii, itertools, math, os, random, sys, time import collections validchars = set(map(chr, range(0, 256))) alphanum = range(ord('A'), ord('Z')) for c in '\0/': validchars.remove(c) winreserved = ('aux con prn nul'.split() + ['com%d' % i for i in xrange(1, 10)] + ['lpt%d' % i for i in xrange(1, 10)]) def casecombinations(names): '''Build all case-diddled combinations of names.''' combos = set() for r in names: for i in xrange(len(r) + 1): for c in itertools.combinations(xrange(len(r)), i): d = r for j in c: d = ''.join((d[:j], d[j].upper(), d[j + 1:])) combos.add(d) return sorted(combos) def buildprobtable(fp, cmd='hg manifest tip'): '''Construct and print a table of probabilities for path name components. The numbers are percentages.''' counts = collections.defaultdict(lambda: 0) for line in os.popen(cmd).read().splitlines(): if line[-2:] in ('.i', '.d'): line = line[:-2] if line.startswith('data/'): line = line[5:] for c in line: counts[c] += 1 for c in '\r/\n': counts.pop(c, None) t = sum(counts.itervalues()) / 100.0 fp.write('probtable = (') for i, (k, v) in enumerate(sorted(counts.iteritems(), key=lambda x: x[1], reverse=True)): if (i % 5) == 0: fp.write('\n ') vt = v / t if vt < 0.0005: break fp.write('(%r, %.03f), ' % (k, vt)) fp.write('\n )\n') # A table of character frequencies (as percentages), gleaned by # looking at filelog names from a real-world, very large repo. probtable = ( ('t', 9.828), ('e', 9.042), ('s', 8.011), ('a', 6.801), ('i', 6.618), ('g', 5.053), ('r', 5.030), ('o', 4.887), ('p', 4.363), ('n', 4.258), ('l', 3.830), ('h', 3.693), ('_', 3.659), ('.', 3.377), ('m', 3.194), ('u', 2.364), ('d', 2.296), ('c', 2.163), ('b', 1.739), ('f', 1.625), ('6', 0.666), ('j', 0.610), ('y', 0.554), ('x', 0.487), ('w', 0.477), ('k', 0.476), ('v', 0.473), ('3', 0.336), ('1', 0.335), ('2', 0.326), ('4', 0.310), ('5', 0.305), ('9', 0.302), ('8', 0.300), ('7', 0.299), ('q', 0.298), ('0', 0.250), ('z', 0.223), ('-', 0.118), ('C', 0.095), ('T', 0.087), ('F', 0.085), ('B', 0.077), ('S', 0.076), ('P', 0.076), ('L', 0.059), ('A', 0.058), ('N', 0.051), ('D', 0.049), ('M', 0.046), ('E', 0.039), ('I', 0.035), ('R', 0.035), ('G', 0.028), ('U', 0.026), ('W', 0.025), ('O', 0.017), ('V', 0.015), ('H', 0.013), ('Q', 0.011), ('J', 0.007), ('K', 0.005), ('+', 0.004), ('X', 0.003), ('Y', 0.001), ) for c, _ in probtable: validchars.remove(c) validchars = list(validchars) def pickfrom(rng, table): c = 0 r = rng.random() * sum(i[1] for i in table) for i, p in table: c += p if c >= r: return i reservedcombos = casecombinations(winreserved) # The first component of a name following a slash. firsttable = ( (lambda rng: pickfrom(rng, probtable), 90), (lambda rng: rng.choice(validchars), 5), (lambda rng: rng.choice(reservedcombos), 5), ) # Components of a name following the first. resttable = firsttable[:-1] # Special suffixes. internalsuffixcombos = casecombinations('.hg .i .d'.split()) # The last component of a path, before a slash or at the end of a name. lasttable = resttable + ( (lambda rng: '', 95), (lambda rng: rng.choice(internalsuffixcombos), 5), ) def makepart(rng, k): '''Construct a part of a pathname, without slashes.''' p = pickfrom(rng, firsttable)(rng) l = len(p) ps = [p] maxl = rng.randint(1, k) while l < maxl: p = pickfrom(rng, resttable)(rng) l += len(p) ps.append(p) ps.append(pickfrom(rng, lasttable)(rng)) return ''.join(ps) def makepath(rng, j, k): '''Construct a complete pathname.''' return ('data/' + '/'.join(makepart(rng, k) for _ in xrange(j)) + rng.choice(['.d', '.i'])) def genpath(rng, count): '''Generate random pathnames with gradually increasing lengths.''' mink, maxk = 1, 4096 def steps(): for i in xrange(count): yield mink + int(round(math.sqrt((maxk - mink) * float(i) / count))) for k in steps(): x = rng.randint(1, k) y = rng.randint(1, k) yield makepath(rng, x, y) def runtests(rng, seed, count): nerrs = 0 for p in genpath(rng, count): h = store._pathencode(p) # uses C implementation, if available r = store._hybridencode(p, True) # reference implementation in Python if h != r: if nerrs == 0: print >> sys.stderr, 'seed:', hex(seed)[:-1] print >> sys.stderr, "\np: '%s'" % p.encode("string_escape") print >> sys.stderr, "h: '%s'" % h.encode("string_escape") print >> sys.stderr, "r: '%s'" % r.encode("string_escape") nerrs += 1 return nerrs def main(): import getopt # Empirically observed to take about a second to run count = 100 seed = None opts, args = getopt.getopt(sys.argv[1:], 'c:s:', ['build', 'count=', 'seed=']) for o, a in opts: if o in ('-c', '--count'): count = int(a) elif o in ('-s', '--seed'): seed = long(a, base=0) # accepts base 10 or 16 strings elif o == '--build': buildprobtable(sys.stdout, 'find .hg/store/data -type f && ' 'cat .hg/store/fncache 2>/dev/null') sys.exit(0) if seed is None: try: seed = long(binascii.hexlify(os.urandom(16)), 16) except AttributeError: seed = long(time.time() * 1000) rng = random.Random(seed) if runtests(rng, seed, count): sys.exit(1) if __name__ == '__main__': main() mercurial-3.7.3/tests/test-pull-http.t0000644000175000017500000000425612676531525017373 0ustar mpmmpm00000000000000#require killdaemons $ hg init test $ cd test $ echo a > a $ hg ci -Ama adding a $ cd .. $ hg clone test test2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd test2 $ echo a >> a $ hg ci -mb Cloning with a password in the URL should not save the password in .hg/hgrc: $ hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ hg clone http://foo:xyzzy@localhost:$HGPORT/ test3 requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat test3/.hg/hgrc # example repository config (see "hg help config" for more info) [paths] default = http://foo@localhost:$HGPORT/ # path aliases to other clones of this repo in URLs or filesystem paths # (see "hg help config.paths" for more info) # # default-push = ssh://jdoe@example.net/hg/jdoes-fork # my-fork = ssh://jdoe@example.net/hg/jdoes-fork # my-clone = /home/jdoe/jdoes-clone [ui] # name and email (local to this repository, optional), e.g. # username = Jane Doe $ killdaemons.py expect error, cloning not allowed $ echo '[web]' > .hg/hgrc $ echo 'allowpull = false' >> .hg/hgrc $ hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/ test4 --config experimental.bundle2-exp=True requesting all changes abort: authorization failed [255] $ hg clone http://localhost:$HGPORT/ test4 --config experimental.bundle2-exp=False abort: authorization failed [255] $ killdaemons.py serve errors $ cat errors.log $ req() { > hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log > cat hg.pid >> $DAEMON_PIDS > hg --cwd ../test pull http://localhost:$HGPORT/ > killdaemons.py hg.pid > echo % serve errors > cat errors.log > } expect error, pulling not allowed $ req pulling from http://localhost:$HGPORT/ searching for changes abort: authorization failed % serve errors $ cd .. mercurial-3.7.3/tests/test-diff-hashes.t0000644000175000017500000000176512676531525017625 0ustar mpmmpm00000000000000 $ hg init a $ cd a $ hg diff inexistent1 inexistent2 inexistent1: * (glob) inexistent2: * (glob) $ echo bar > foo $ hg add foo $ hg ci -m 'add foo' $ echo foobar > foo $ hg ci -m 'change foo' $ hg --quiet diff -r 0 -r 1 --- a/foo Thu Jan 01 00:00:00 1970 +0000 +++ b/foo Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -bar +foobar $ hg diff -r 0 -r 1 diff -r a99fb63adac3 -r 9b8568d3af2f foo --- a/foo Thu Jan 01 00:00:00 1970 +0000 +++ b/foo Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -bar +foobar $ hg --verbose diff -r 0 -r 1 diff -r a99fb63adac3 -r 9b8568d3af2f foo --- a/foo Thu Jan 01 00:00:00 1970 +0000 +++ b/foo Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -bar +foobar $ hg --debug diff -r 0 -r 1 diff -r a99fb63adac3f31816a22f665bc3b7a7655b30f4 -r 9b8568d3af2f1749445eef03aede868a6f39f210 foo --- a/foo Thu Jan 01 00:00:00 1970 +0000 +++ b/foo Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,1 @@ -bar +foobar $ cd .. mercurial-3.7.3/tests/test-https.t0000644000175000017500000003716412676531525016610 0ustar mpmmpm00000000000000#require serve ssl Proper https client requires the built-in ssl from Python 2.6. Certificates created with: printf '.\n.\n.\n.\n.\nlocalhost\nhg@localhost\n' | \ openssl req -newkey rsa:512 -keyout priv.pem -nodes -x509 -days 9000 -out pub.pem Can be dumped with: openssl x509 -in pub.pem -text $ cat << EOT > priv.pem > -----BEGIN PRIVATE KEY----- > MIIBVAIBADANBgkqhkiG9w0BAQEFAASCAT4wggE6AgEAAkEApjCWeYGrIa/Vo7LH > aRF8ou0tbgHKE33Use/whCnKEUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8 > j/xgSwIDAQABAkBxHC6+Qlf0VJXGlb6NL16yEVVTQxqDS6hA9zqu6TZjrr0YMfzc > EGNIiZGt7HCBL0zO+cPDg/LeCZc6HQhf0KrhAiEAzlJq4hWWzvguWFIJWSoBeBUG > MF1ACazQO7PYE8M0qfECIQDONHHP0SKZzz/ZwBZcAveC5K61f/v9hONFwbeYulzR > +wIgc9SvbtgB/5Yzpp//4ZAEnR7oh5SClCvyB+KSx52K3nECICbhQphhoXmI10wy > aMTellaq0bpNMHFDziqH9RsqAHhjAiEAgYGxfzkftt5IUUn/iFK89aaIpyrpuaAh > HY8gUVkVRVs= > -----END PRIVATE KEY----- > EOT $ cat << EOT > pub.pem > -----BEGIN CERTIFICATE----- > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNV > BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEw > MTAxNDIwMzAxNFoXDTM1MDYwNTIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0 > MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwXDANBgkqhkiG9w0BAQEFAANL > ADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnKEUm34rDaXQd4lxxX > 6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA+amm > r24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQw > DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAFArvQFiAZJgQczRsbYlG1xl > t+truk37w5B3m3Ick1ntRcQrqs+hf0CO1q6Squ144geYaQ8CDirSR92fICELI1c= > -----END CERTIFICATE----- > EOT $ cat priv.pem pub.pem >> server.pem $ PRIV=`pwd`/server.pem $ cat << EOT > pub-other.pem > -----BEGIN CERTIFICATE----- > MIIBqzCCAVWgAwIBAgIJALwZS731c/ORMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNV > BAMMCWxvY2FsaG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEw > MTAxNDIwNDUxNloXDTM1MDYwNTIwNDUxNlowMTESMBAGA1UEAwwJbG9jYWxob3N0 > MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhvc3QwXDANBgkqhkiG9w0BAQEFAANL > ADBIAkEAsxsapLbHrqqUKuQBxdpK4G3m2LjtyrTSdpzzzFlecxd5yhNP6AyWrufo > K4VMGo2xlu9xOo88nDSUNSKPuD09MwIDAQABo1AwTjAdBgNVHQ4EFgQUoIB1iMhN > y868rpQ2qk9dHnU6ebswHwYDVR0jBBgwFoAUoIB1iMhNy868rpQ2qk9dHnU6ebsw > DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJ544f125CsE7J2t55PdFaF6 > bBlNBb91FCywBgSjhBjf+GG3TNPwrPdc3yqeq+hzJiuInqbOBv9abmMyq8Wsoig= > -----END CERTIFICATE----- > EOT pub.pem patched with other notBefore / notAfter: $ cat << EOT > pub-not-yet.pem > -----BEGIN CERTIFICATE----- > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNVBAMMCWxvY2Fs > aG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTM1MDYwNTIwMzAxNFoXDTM1MDYw > NTIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhv > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnK > EUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA > +ammr24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQwDAYDVR0T > BAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJXV41gWnkgC7jcpPpFRSUSZaxyzrXmD1CIqQf0WgVDb > /12E0vR2DuZitgzUYtBaofM81aTtc0a2/YsrmqePGm0= > -----END CERTIFICATE----- > EOT $ cat priv.pem pub-not-yet.pem > server-not-yet.pem $ cat << EOT > pub-expired.pem > -----BEGIN CERTIFICATE----- > MIIBqzCCAVWgAwIBAgIJANAXFFyWjGnRMA0GCSqGSIb3DQEBBQUAMDExEjAQBgNVBAMMCWxvY2Fs > aG9zdDEbMBkGCSqGSIb3DQEJARYMaGdAbG9jYWxob3N0MB4XDTEwMTAxNDIwMzAxNFoXDTEwMTAx > NDIwMzAxNFowMTESMBAGA1UEAwwJbG9jYWxob3N0MRswGQYJKoZIhvcNAQkBFgxoZ0Bsb2NhbGhv > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEApjCWeYGrIa/Vo7LHaRF8ou0tbgHKE33Use/whCnK > EUm34rDaXQd4lxxX6aDWg06n9tiVStAKTgQAHJY8j/xgSwIDAQABo1AwTjAdBgNVHQ4EFgQUE6sA > +ammr24dGX0kpjxOgO45hzQwHwYDVR0jBBgwFoAUE6sA+ammr24dGX0kpjxOgO45hzQwDAYDVR0T > BAUwAwEB/zANBgkqhkiG9w0BAQUFAANBAJfk57DTRf2nUbYaMSlVAARxMNbFGOjQhAUtY400GhKt > 2uiKCNGKXVXD3AHWe13yHc5KttzbHQStE5Nm/DlWBWQ= > -----END CERTIFICATE----- > EOT $ cat priv.pem pub-expired.pem > server-expired.pem Client certificates created with: openssl genrsa -aes128 -passout pass:1234 -out client-key.pem 512 openssl rsa -in client-key.pem -passin pass:1234 -out client-key-decrypted.pem printf '.\n.\n.\n.\n.\n.\nhg-client@localhost\n.\n.\n' | \ openssl req -new -key client-key.pem -passin pass:1234 -out client-csr.pem openssl x509 -req -days 9000 -in client-csr.pem -CA pub.pem -CAkey priv.pem \ -set_serial 01 -out client-cert.pem $ cat << EOT > client-key.pem > -----BEGIN RSA PRIVATE KEY----- > Proc-Type: 4,ENCRYPTED > DEK-Info: AES-128-CBC,C8B8F103A61A336FB0716D1C0F8BB2E8 > > JolMlCFjEW3q3JJjO9z99NJWeJbFgF5DpUOkfSCxH56hxxtZb9x++rBvBZkxX1bF > BAIe+iI90+jdCLwxbILWuFcrJUaLC5WmO14XDKYVmr2eW9e4MiCYOlO0Q6a9rDFS > jctRCfvubOXFHbBGLH8uKEMpXEkP7Lc60FiIukqjuQEivJjrQirVtZCGwyk3qUi7 > Eyh4Lo63IKGu8T1Bkmn2kaMvFhu7nC/CQLBjSq0YYI1tmCOkVb/3tPrz8oqgDJp2 > u7bLS3q0xDNZ52nVrKIoZC/UlRXGlPyzPpa70/jPIdfCbkwDaBpRVXc+62Pj2n5/ > CnO2xaKwfOG6pDvanBhFD72vuBOkAYlFZPiEku4sc2WlNggsSWCPCIFwzmiHjKIl > bWmdoTq3nb7sNfnBbV0OCa7fS1dFwCm4R1NC7ELENu0= > -----END RSA PRIVATE KEY----- > EOT $ cat << EOT > client-key-decrypted.pem > -----BEGIN RSA PRIVATE KEY----- > MIIBOgIBAAJBAJs4LS3glAYU92bg5kPgRPNW84ewB0fWJfAKccCp1ACHAdZPeaKb > FCinVMYKAVbVqBkyrZ/Tyr8aSfMz4xO4+KsCAwEAAQJAeKDr25+Q6jkZHEbkLRP6 > AfMtR+Ixhk6TJT24sbZKIC2V8KuJTDEvUhLU0CAr1nH79bDqiSsecOiVCr2HHyfT > AQIhAM2C5rHbTs9R3PkywFEqq1gU3ztCnpiWglO7/cIkuGBhAiEAwVpMSAf77kop > 4h/1kWsgMALQTJNsXd4CEUK4BOxvJIsCIQCbarVAKBQvoT81jfX27AfscsxnKnh5 > +MjSvkanvdFZwQIgbbcTefwt1LV4trtz2SR0i0nNcOZmo40Kl0jIquKO3qkCIH01 > mJHzZr3+jQqeIFtr5P+Xqi30DJxgrnEobbJ0KFjY > -----END RSA PRIVATE KEY----- > EOT $ cat << EOT > client-cert.pem > -----BEGIN CERTIFICATE----- > MIIBPjCB6QIBATANBgkqhkiG9w0BAQsFADAxMRIwEAYDVQQDDAlsb2NhbGhvc3Qx > GzAZBgkqhkiG9w0BCQEWDGhnQGxvY2FsaG9zdDAeFw0xNTA1MDcwNjI5NDVaFw0z > OTEyMjcwNjI5NDVaMCQxIjAgBgkqhkiG9w0BCQEWE2hnLWNsaWVudEBsb2NhbGhv > c3QwXDANBgkqhkiG9w0BAQEFAANLADBIAkEAmzgtLeCUBhT3ZuDmQ+BE81bzh7AH > R9Yl8ApxwKnUAIcB1k95opsUKKdUxgoBVtWoGTKtn9PKvxpJ8zPjE7j4qwIDAQAB > MA0GCSqGSIb3DQEBCwUAA0EAfBTqBG5pYhuGk+ZnyUufgS+d7Nk/sZAZjNdCAEj/ > NFPo5fR1jM6jlEWoWbeg298+SkjV7tfO+2nt0otUFkdM6A== > -----END CERTIFICATE----- > EOT $ hg init test $ cd test $ echo foo>foo $ mkdir foo.d foo.d/bAr.hg.d foo.d/baR.d.hg $ echo foo>foo.d/foo $ echo bar>foo.d/bAr.hg.d/BaR $ echo bar>foo.d/baR.d.hg/bAR $ hg commit -A -m 1 adding foo adding foo.d/bAr.hg.d/BaR adding foo.d/baR.d.hg/bAR adding foo.d/foo $ hg serve -p $HGPORT -d --pid-file=../hg0.pid --certificate=$PRIV $ cat ../hg0.pid >> $DAEMON_PIDS cacert not found $ hg in --config web.cacerts=no-such.pem https://localhost:$HGPORT/ abort: could not find web.cacerts: no-such.pem [255] Test server address cannot be reused #if windows $ hg serve -p $HGPORT --certificate=$PRIV 2>&1 abort: cannot start server at ':$HGPORT': [255] #else $ hg serve -p $HGPORT --certificate=$PRIV 2>&1 abort: cannot start server at ':$HGPORT': Address already in use [255] #endif $ cd .. OS X has a dummy CA cert that enables use of the system CA store when using Apple's OpenSSL. This trick do not work with plain OpenSSL. $ DISABLEOSXDUMMYCERT= #if defaultcacerts $ hg clone https://localhost:$HGPORT/ copy-pull abort: error: *certificate verify failed* (glob) [255] $ DISABLEOSXDUMMYCERT="--config=web.cacerts=!" #endif clone via pull $ hg clone https://localhost:$HGPORT/ copy-pull $DISABLEOSXDUMMYCERT warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting) requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 4 changes to 4 files updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg verify -R copy-pull checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 1 changesets, 4 total revisions $ cd test $ echo bar > bar $ hg commit -A -d '1 0' -m 2 adding bar $ cd .. pull without cacert $ cd copy-pull $ echo '[hooks]' >> .hg/hgrc $ echo "changegroup = printenv.py changegroup" >> .hg/hgrc $ hg pull $DISABLEOSXDUMMYCERT pulling from https://localhost:$HGPORT/ warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=https://localhost:$HGPORT/ (glob) (run 'hg update' to get a working copy) $ cd .. cacert configured in local repo $ cp copy-pull/.hg/hgrc copy-pull/.hg/hgrc.bu $ echo "[web]" >> copy-pull/.hg/hgrc $ echo "cacerts=`pwd`/pub.pem" >> copy-pull/.hg/hgrc $ hg -R copy-pull pull --traceback pulling from https://localhost:$HGPORT/ searching for changes no changes found $ mv copy-pull/.hg/hgrc.bu copy-pull/.hg/hgrc cacert configured globally, also testing expansion of environment variables in the filename $ echo "[web]" >> $HGRCPATH $ echo 'cacerts=$P/pub.pem' >> $HGRCPATH $ P=`pwd` hg -R copy-pull pull pulling from https://localhost:$HGPORT/ searching for changes no changes found $ P=`pwd` hg -R copy-pull pull --insecure pulling from https://localhost:$HGPORT/ warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting) searching for changes no changes found cacert mismatch $ hg -R copy-pull pull --config web.cacerts=pub.pem https://127.0.0.1:$HGPORT/ pulling from https://127.0.0.1:$HGPORT/ abort: 127.0.0.1 certificate error: certificate is for localhost (configure hostfingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca or use --insecure to connect insecurely) [255] $ hg -R copy-pull pull --config web.cacerts=pub.pem https://127.0.0.1:$HGPORT/ --insecure pulling from https://127.0.0.1:$HGPORT/ warning: 127.0.0.1 certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting) searching for changes no changes found $ hg -R copy-pull pull --config web.cacerts=pub-other.pem pulling from https://localhost:$HGPORT/ abort: error: *certificate verify failed* (glob) [255] $ hg -R copy-pull pull --config web.cacerts=pub-other.pem --insecure pulling from https://localhost:$HGPORT/ warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting) searching for changes no changes found Test server cert which isn't valid yet $ hg -R test serve -p $HGPORT1 -d --pid-file=hg1.pid --certificate=server-not-yet.pem $ cat hg1.pid >> $DAEMON_PIDS $ hg -R copy-pull pull --config web.cacerts=pub-not-yet.pem https://localhost:$HGPORT1/ pulling from https://localhost:$HGPORT1/ abort: error: *certificate verify failed* (glob) [255] Test server cert which no longer is valid $ hg -R test serve -p $HGPORT2 -d --pid-file=hg2.pid --certificate=server-expired.pem $ cat hg2.pid >> $DAEMON_PIDS $ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/ pulling from https://localhost:$HGPORT2/ abort: error: *certificate verify failed* (glob) [255] Fingerprints $ echo "[hostfingerprints]" >> copy-pull/.hg/hgrc $ echo "localhost = 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca" >> copy-pull/.hg/hgrc $ echo "127.0.0.1 = 914f1aff87249c09b6859b88b1906d30756491ca" >> copy-pull/.hg/hgrc - works without cacerts $ hg -R copy-pull id https://localhost:$HGPORT/ --config web.cacerts=! 5fed3813f7f5 - fails when cert doesn't match hostname (port is ignored) $ hg -R copy-pull id https://localhost:$HGPORT1/ abort: certificate for localhost has unexpected fingerprint 28:ff:71:bf:65:31:14:23:ad:62:92:b4:0e:31:99:18:fc:83:e3:9b (check hostfingerprint configuration) [255] - ignores that certificate doesn't match hostname $ hg -R copy-pull id https://127.0.0.1:$HGPORT/ 5fed3813f7f5 HGPORT1 is reused below for tinyproxy tests. Kill that server. $ killdaemons.py hg1.pid Prepare for connecting through proxy $ tinyproxy.py $HGPORT1 localhost >proxy.log &1 & $ while [ ! -f proxy.pid ]; do sleep 0; done $ cat proxy.pid >> $DAEMON_PIDS $ echo "[http_proxy]" >> copy-pull/.hg/hgrc $ echo "always=True" >> copy-pull/.hg/hgrc $ echo "[hostfingerprints]" >> copy-pull/.hg/hgrc $ echo "localhost =" >> copy-pull/.hg/hgrc Test unvalidated https through proxy $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --insecure --traceback pulling from https://localhost:$HGPORT/ warning: localhost certificate with fingerprint 91:4f:1a:ff:87:24:9c:09:b6:85:9b:88:b1:90:6d:30:75:64:91:ca not verified (check hostfingerprints or web.cacerts config setting) searching for changes no changes found Test https with cacert and fingerprint through proxy $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub.pem pulling from https://localhost:$HGPORT/ searching for changes no changes found $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull https://127.0.0.1:$HGPORT/ pulling from https://127.0.0.1:$HGPORT/ searching for changes no changes found Test https with cert problems through proxy $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-other.pem pulling from https://localhost:$HGPORT/ abort: error: *certificate verify failed* (glob) [255] $ http_proxy=http://localhost:$HGPORT1/ hg -R copy-pull pull --config web.cacerts=pub-expired.pem https://localhost:$HGPORT2/ pulling from https://localhost:$HGPORT2/ abort: error: *certificate verify failed* (glob) [255] $ killdaemons.py hg0.pid #if sslcontext Start patched hgweb that requires client certificates: $ cat << EOT > reqclientcert.py > import ssl > from mercurial.hgweb import server > class _httprequesthandlersslclientcert(server._httprequesthandlerssl): > @staticmethod > def preparehttpserver(httpserver, ssl_cert): > sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1) > sslcontext.verify_mode = ssl.CERT_REQUIRED > sslcontext.load_cert_chain(ssl_cert) > # verify clients by server certificate > sslcontext.load_verify_locations(ssl_cert) > httpserver.socket = sslcontext.wrap_socket(httpserver.socket, > server_side=True) > server._httprequesthandlerssl = _httprequesthandlersslclientcert > EOT $ cd test $ hg serve -p $HGPORT -d --pid-file=../hg0.pid --certificate=$PRIV \ > --config extensions.reqclientcert=../reqclientcert.py $ cat ../hg0.pid >> $DAEMON_PIDS $ cd .. without client certificate: $ P=`pwd` hg id https://localhost:$HGPORT/ abort: error: *handshake failure* (glob) [255] with client certificate: $ cat << EOT >> $HGRCPATH > [auth] > l.prefix = localhost > l.cert = client-cert.pem > l.key = client-key.pem > EOT $ P=`pwd` hg id https://localhost:$HGPORT/ \ > --config auth.l.key=client-key-decrypted.pem 5fed3813f7f5 $ printf '1234\n' | env P=`pwd` hg id https://localhost:$HGPORT/ \ > --config ui.interactive=True --config ui.nontty=True passphrase for client-key.pem: 5fed3813f7f5 $ env P=`pwd` hg id https://localhost:$HGPORT/ abort: error: * (glob) [255] #endif mercurial-3.7.3/tests/test-rename-merge2.t0000644000175000017500000011067512676531525020073 0ustar mpmmpm00000000000000 $ mkdir -p t $ cd t $ cat < merge > import sys, os > f = open(sys.argv[1], "wb") > f.write("merge %s %s %s" % (sys.argv[1], sys.argv[2], sys.argv[3])) > f.close() > EOF perform a test merge with possible renaming args: $1 = action in local branch $2 = action in remote branch $3 = action in working dir $4 = expected result $ tm() > { > hg init t > cd t > echo "[merge]" >> .hg/hgrc > echo "followcopies = 1" >> .hg/hgrc > > # base > echo base > a > echo base > rev # used to force commits > hg add a rev > hg ci -m "base" > > # remote > echo remote > rev > if [ "$2" != "" ] ; then $2 ; fi > hg ci -m "remote" > > # local > hg co -q 0 > echo local > rev > if [ "$1" != "" ] ; then $1 ; fi > hg ci -m "local" > > # working dir > echo local > rev > if [ "$3" != "" ] ; then $3 ; fi > > # merge > echo "--------------" > echo "test L:$1 R:$2 W:$3 - $4" > echo "--------------" > hg merge -y --debug --traceback --tool="python ../merge" > > echo "--------------" > hg status -camC -X rev > > hg ci -m "merge" > > echo "--------------" > echo > > cd .. > rm -r t > } $ up() { > cp rev $1 > hg add $1 2> /dev/null > if [ "$2" != "" ] ; then > cp rev $2 > hg add $2 2> /dev/null > fi > } $ uc() { up $1; hg cp $1 $2; } # update + copy $ um() { up $1; hg mv $1 $2; } $ nc() { hg cp $1 $2; } # just copy $ nm() { hg mv $1 $2; } # just move $ tm "up a " "nc a b" " " "1 get local a to b" created new head -------------- test L:up a R:nc a b W: - 1 get local a to b -------------- searching for copies back to rev 1 unmatched files in other: b all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' * checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: e300d1c794ec+, remote: 4ce40f5aca24 preserving a for resolve of b preserving rev for resolve of rev a: remote unchanged -> k b: remote copied from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging a and b to b my b@e300d1c794ec+ other b@4ce40f5aca24 ancestor a@924404dff337 premerge successful rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@e300d1c794ec+ other rev@4ce40f5aca24 ancestor rev@924404dff337 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@e300d1c794ec+ other rev@4ce40f5aca24 ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b a C a -------------- $ tm "nc a b" "up a " " " "2 get rem change to a and b" created new head -------------- test L:nc a b R:up a W: - 2 get rem change to a and b -------------- searching for copies back to rev 1 unmatched files in local: b all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' * checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 86a2aa42fc76+, remote: f4db7e329e71 preserving b for resolve of b preserving rev for resolve of rev a: remote is newer -> g getting a b: local copied/moved from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b and a to b my b@86a2aa42fc76+ other a@f4db7e329e71 ancestor a@924404dff337 premerge successful rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@86a2aa42fc76+ other rev@f4db7e329e71 ancestor rev@924404dff337 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@86a2aa42fc76+ other rev@f4db7e329e71 ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 1 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M a M b a -------------- $ tm "up a " "nm a b" " " "3 get local a change to b, remove a" created new head -------------- test L:up a R:nm a b W: - 3 get local a change to b, remove a -------------- searching for copies back to rev 1 unmatched files in other: b all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' * checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: e300d1c794ec+, remote: bdb19105162a preserving a for resolve of b preserving rev for resolve of rev removing a b: remote moved from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging a and b to b my b@e300d1c794ec+ other b@bdb19105162a ancestor a@924404dff337 premerge successful rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@e300d1c794ec+ other rev@bdb19105162a ancestor rev@924404dff337 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@e300d1c794ec+ other rev@bdb19105162a ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b a -------------- $ tm "nm a b" "up a " " " "4 get remote change to b" created new head -------------- test L:nm a b R:up a W: - 4 get remote change to b -------------- searching for copies back to rev 1 unmatched files in local: b all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' * checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 02963e448370+, remote: f4db7e329e71 preserving b for resolve of b preserving rev for resolve of rev b: local copied/moved from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b and a to b my b@02963e448370+ other a@f4db7e329e71 ancestor a@924404dff337 premerge successful rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@02963e448370+ other rev@f4db7e329e71 ancestor rev@924404dff337 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@02963e448370+ other rev@f4db7e329e71 ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b a -------------- $ tm " " "nc a b" " " "5 get b" created new head -------------- test L: R:nc a b W: - 5 get b -------------- searching for copies back to rev 1 unmatched files in other: b all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 94b33a1b7f2d+, remote: 4ce40f5aca24 preserving rev for resolve of rev b: remote created -> g getting b rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@94b33a1b7f2d+ other rev@4ce40f5aca24 ancestor rev@924404dff337 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@94b33a1b7f2d+ other rev@4ce40f5aca24 ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 1 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b C a -------------- $ tm "nc a b" " " " " "6 nothing" created new head -------------- test L:nc a b R: W: - 6 nothing -------------- searching for copies back to rev 1 unmatched files in local: b all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 86a2aa42fc76+, remote: 97c705ade336 preserving rev for resolve of rev rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@86a2aa42fc76+ other rev@97c705ade336 ancestor rev@924404dff337 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@86a2aa42fc76+ other rev@97c705ade336 ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- C a C b -------------- $ tm " " "nm a b" " " "7 get b" created new head -------------- test L: R:nm a b W: - 7 get b -------------- searching for copies back to rev 1 unmatched files in other: b all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 94b33a1b7f2d+, remote: bdb19105162a preserving rev for resolve of rev a: other deleted -> r removing a b: remote created -> g getting b rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@94b33a1b7f2d+ other rev@bdb19105162a ancestor rev@924404dff337 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@94b33a1b7f2d+ other rev@bdb19105162a ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 1 files updated, 1 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b -------------- $ tm "nm a b" " " " " "8 nothing" created new head -------------- test L:nm a b R: W: - 8 nothing -------------- searching for copies back to rev 1 unmatched files in local: b all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 02963e448370+, remote: 97c705ade336 preserving rev for resolve of rev rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@02963e448370+ other rev@97c705ade336 ancestor rev@924404dff337 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@02963e448370+ other rev@97c705ade336 ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- C b -------------- $ tm "um a b" "um a b" " " "9 do merge with ancestor in a" created new head -------------- test L:um a b R:um a b W: - 9 do merge with ancestor in a -------------- searching for copies back to rev 1 unmatched files new in both: b resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 62e7bf090eba+, remote: 49b6d8032493 preserving b for resolve of b preserving rev for resolve of rev b: both renamed from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b my b@62e7bf090eba+ other b@49b6d8032493 ancestor a@924404dff337 rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@62e7bf090eba+ other rev@49b6d8032493 ancestor rev@924404dff337 b: both renamed from a -> m (merge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) my b@62e7bf090eba+ other b@49b6d8032493 ancestor a@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob) merge tool returned: 0 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@62e7bf090eba+ other rev@49b6d8032493 ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b -------------- m "um a c" "um x c" " " "10 do merge with no ancestor" $ tm "nm a b" "nm a c" " " "11 get c, keep b" created new head -------------- test L:nm a b R:nm a c W: - 11 get c, keep b -------------- searching for copies back to rev 1 unmatched files in local: b unmatched files in other: c all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' ! src: 'a' -> dst: 'c' ! checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 02963e448370+, remote: fe905ef2c33e note: possible conflict - a was renamed multiple times to: b c preserving rev for resolve of rev c: remote created -> g getting c rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@02963e448370+ other rev@fe905ef2c33e ancestor rev@924404dff337 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@02963e448370+ other rev@fe905ef2c33e ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 1 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M c C b -------------- $ tm "nc a b" "up b " " " "12 merge b no ancestor" created new head -------------- test L:nc a b R:up b W: - 12 merge b no ancestor -------------- searching for copies back to rev 1 unmatched files new in both: b resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 86a2aa42fc76+, remote: af30c7647fc7 preserving b for resolve of b preserving rev for resolve of rev b: both created -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b my b@86a2aa42fc76+ other b@af30c7647fc7 ancestor b@000000000000 rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@86a2aa42fc76+ other rev@af30c7647fc7 ancestor rev@924404dff337 b: both created -> m (merge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) my b@86a2aa42fc76+ other b@af30c7647fc7 ancestor b@000000000000 launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob) merge tool returned: 0 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@86a2aa42fc76+ other rev@af30c7647fc7 ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b C a -------------- $ tm "up b " "nm a b" " " "13 merge b no ancestor" created new head -------------- test L:up b R:nm a b W: - 13 merge b no ancestor -------------- searching for copies back to rev 1 unmatched files new in both: b resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 59318016310c+, remote: bdb19105162a preserving b for resolve of b preserving rev for resolve of rev a: other deleted -> r removing a b: both created -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000 rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337 b: both created -> m (merge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000 launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob) merge tool returned: 0 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 2 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b -------------- $ tm "nc a b" "up a b" " " "14 merge b no ancestor" created new head -------------- test L:nc a b R:up a b W: - 14 merge b no ancestor -------------- searching for copies back to rev 1 unmatched files new in both: b resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 86a2aa42fc76+, remote: 8dbce441892a preserving b for resolve of b preserving rev for resolve of rev a: remote is newer -> g getting a b: both created -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000 rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337 b: both created -> m (merge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000 launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob) merge tool returned: 0 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 1 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M a M b -------------- $ tm "up b " "nm a b" " " "15 merge b no ancestor, remove a" created new head -------------- test L:up b R:nm a b W: - 15 merge b no ancestor, remove a -------------- searching for copies back to rev 1 unmatched files new in both: b resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 59318016310c+, remote: bdb19105162a preserving b for resolve of b preserving rev for resolve of rev a: other deleted -> r removing a b: both created -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000 rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337 b: both created -> m (merge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) my b@59318016310c+ other b@bdb19105162a ancestor b@000000000000 launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob) merge tool returned: 0 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@59318016310c+ other rev@bdb19105162a ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 2 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b -------------- $ tm "nc a b" "up a b" " " "16 get a, merge b no ancestor" created new head -------------- test L:nc a b R:up a b W: - 16 get a, merge b no ancestor -------------- searching for copies back to rev 1 unmatched files new in both: b resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 86a2aa42fc76+, remote: 8dbce441892a preserving b for resolve of b preserving rev for resolve of rev a: remote is newer -> g getting a b: both created -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000 rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337 b: both created -> m (merge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) my b@86a2aa42fc76+ other b@8dbce441892a ancestor b@000000000000 launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob) merge tool returned: 0 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@86a2aa42fc76+ other rev@8dbce441892a ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 1 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M a M b -------------- $ tm "up a b" "nc a b" " " "17 keep a, merge b no ancestor" created new head -------------- test L:up a b R:nc a b W: - 17 keep a, merge b no ancestor -------------- searching for copies back to rev 1 unmatched files new in both: b resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 0b76e65c8289+, remote: 4ce40f5aca24 preserving b for resolve of b preserving rev for resolve of rev a: remote unchanged -> k b: both created -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b my b@0b76e65c8289+ other b@4ce40f5aca24 ancestor b@000000000000 rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@0b76e65c8289+ other rev@4ce40f5aca24 ancestor rev@924404dff337 b: both created -> m (merge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) my b@0b76e65c8289+ other b@4ce40f5aca24 ancestor b@000000000000 launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob) merge tool returned: 0 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@0b76e65c8289+ other rev@4ce40f5aca24 ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b C a -------------- $ tm "nm a b" "up a b" " " "18 merge b no ancestor" created new head -------------- test L:nm a b R:up a b W: - 18 merge b no ancestor -------------- searching for copies back to rev 1 unmatched files new in both: b resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 02963e448370+, remote: 8dbce441892a preserving b for resolve of b preserving rev for resolve of rev a: prompt deleted/changed -> m (premerge) picked tool ':prompt' for a (binary False symlink False changedelete True) remote changed a which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u b: both created -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b my b@02963e448370+ other b@8dbce441892a ancestor b@000000000000 rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@02963e448370+ other rev@8dbce441892a ancestor rev@924404dff337 b: both created -> m (merge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) my b@02963e448370+ other b@8dbce441892a ancestor b@000000000000 launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob) merge tool returned: 0 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@02963e448370+ other rev@8dbce441892a ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 2 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon -------------- M a M b abort: unresolved merge conflicts (see "hg help resolve") -------------- $ tm "up a b" "nm a b" " " "19 merge b no ancestor, prompt remove a" created new head -------------- test L:up a b R:nm a b W: - 19 merge b no ancestor, prompt remove a -------------- searching for copies back to rev 1 unmatched files new in both: b resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 0b76e65c8289+, remote: bdb19105162a preserving a for resolve of a preserving b for resolve of b preserving rev for resolve of rev a: prompt changed/deleted -> m (premerge) picked tool ':prompt' for a (binary False symlink False changedelete True) local changed a which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u b: both created -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b my b@0b76e65c8289+ other b@bdb19105162a ancestor b@000000000000 rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@0b76e65c8289+ other rev@bdb19105162a ancestor rev@924404dff337 b: both created -> m (merge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) my b@0b76e65c8289+ other b@bdb19105162a ancestor b@000000000000 launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob) merge tool returned: 0 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@0b76e65c8289+ other rev@bdb19105162a ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 2 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon -------------- M b C a abort: unresolved merge conflicts (see "hg help resolve") -------------- $ tm "up a " "um a b" " " "20 merge a and b to b, remove a" created new head -------------- test L:up a R:um a b W: - 20 merge a and b to b, remove a -------------- searching for copies back to rev 1 unmatched files in other: b all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' * checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: e300d1c794ec+, remote: 49b6d8032493 preserving a for resolve of b preserving rev for resolve of rev removing a b: remote moved from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging a and b to b my b@e300d1c794ec+ other b@49b6d8032493 ancestor a@924404dff337 rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@e300d1c794ec+ other rev@49b6d8032493 ancestor rev@924404dff337 b: remote moved from a -> m (merge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) my b@e300d1c794ec+ other b@49b6d8032493 ancestor a@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob) merge tool returned: 0 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@e300d1c794ec+ other rev@49b6d8032493 ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b a -------------- $ tm "um a b" "up a " " " "21 merge a and b to b" created new head -------------- test L:um a b R:up a W: - 21 merge a and b to b -------------- searching for copies back to rev 1 unmatched files in local: b all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' * checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 62e7bf090eba+, remote: f4db7e329e71 preserving b for resolve of b preserving rev for resolve of rev b: local copied/moved from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b and a to b my b@62e7bf090eba+ other a@f4db7e329e71 ancestor a@924404dff337 rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@62e7bf090eba+ other rev@f4db7e329e71 ancestor rev@924404dff337 b: local copied/moved from a -> m (merge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) my b@62e7bf090eba+ other a@f4db7e329e71 ancestor a@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/b* * * (glob) merge tool returned: 0 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@62e7bf090eba+ other rev@f4db7e329e71 ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b a -------------- m "nm a b" "um x a" " " "22 get a, keep b" $ tm "nm a b" "up a c" " " "23 get c, keep b" created new head -------------- test L:nm a b R:up a c W: - 23 get c, keep b -------------- searching for copies back to rev 1 unmatched files in local: b unmatched files in other: c all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' * checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 924404dff337, local: 02963e448370+, remote: 2b958612230f preserving b for resolve of b preserving rev for resolve of rev c: remote created -> g getting c b: local copied/moved from a -> m (premerge) picked tool 'python ../merge' for b (binary False symlink False changedelete False) merging b and a to b my b@02963e448370+ other a@2b958612230f ancestor a@924404dff337 premerge successful rev: versions differ -> m (premerge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) merging rev my rev@02963e448370+ other rev@2b958612230f ancestor rev@924404dff337 rev: versions differ -> m (merge) picked tool 'python ../merge' for rev (binary False symlink False changedelete False) my rev@02963e448370+ other rev@2b958612230f ancestor rev@924404dff337 launching merge tool: python ../merge *$TESTTMP/t/t/rev* * * (glob) merge tool returned: 0 1 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) -------------- M b a M c -------------- $ cd .. Systematic and terse testing of merge merges and ancestor calculation: Expected result: \ a m1 m2 dst 0 - f f f "versions differ" 1 f g g g "versions differ" 2 f f f f "versions differ" 3 f f g f+g "remote copied to " + f 4 f f g g "remote moved to " + f 5 f g f f+g "local copied to " + f2 6 f g f g "local moved to " + f2 7 - (f) f f "remote differs from untracked local" 8 f (f) f f "remote differs from untracked local" $ hg init ancestortest $ cd ancestortest $ for x in 1 2 3 4 5 6 8; do mkdir $x; echo a > $x/f; done $ hg ci -Aqm "a" $ mkdir 0 $ touch 0/f $ hg mv 1/f 1/g $ hg cp 5/f 5/g $ hg mv 6/f 6/g $ hg rm 8/f $ for x in */*; do echo m1 > $x; done $ hg ci -Aqm "m1" $ hg up -qr0 $ mkdir 0 7 $ touch 0/f 7/f $ hg mv 1/f 1/g $ hg cp 3/f 3/g $ hg mv 4/f 4/g $ for x in */*; do echo m2 > $x; done $ hg ci -Aqm "m2" $ hg up -qr1 $ mkdir 7 8 $ echo m > 7/f $ echo m > 8/f $ hg merge -f --tool internal:dump -v --debug -r2 | sed '/^resolving manifests/,$d' 2> /dev/null searching for copies back to rev 1 unmatched files in local: 5/g 6/g unmatched files in other: 3/g 4/g 7/f unmatched files new in both: 0/f 1/g all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: '3/f' -> dst: '3/g' * src: '4/f' -> dst: '4/g' * src: '5/f' -> dst: '5/g' * src: '6/f' -> dst: '6/g' * checking for directory renames $ hg mani 0/f 1/g 2/f 3/f 4/f 5/f 5/g 6/g $ for f in */*; do echo $f:; cat $f; done 0/f: m1 0/f.base: 0/f.local: m1 0/f.orig: m1 0/f.other: m2 1/g: m1 1/g.base: a 1/g.local: m1 1/g.orig: m1 1/g.other: m2 2/f: m1 2/f.base: a 2/f.local: m1 2/f.orig: m1 2/f.other: m2 3/f: m1 3/f.base: a 3/f.local: m1 3/f.orig: m1 3/f.other: m2 3/g: m1 3/g.base: a 3/g.local: m1 3/g.orig: m1 3/g.other: m2 4/g: m1 4/g.base: a 4/g.local: m1 4/g.orig: m1 4/g.other: m2 5/f: m1 5/f.base: a 5/f.local: m1 5/f.orig: m1 5/f.other: m2 5/g: m1 5/g.base: a 5/g.local: m1 5/g.orig: m1 5/g.other: m2 6/g: m1 6/g.base: a 6/g.local: m1 6/g.orig: m1 6/g.other: m2 7/f: m 7/f.base: 7/f.local: m 7/f.orig: m 7/f.other: m2 8/f: m2 $ cd .. mercurial-3.7.3/tests/test-bookmarks.t0000644000175000017500000005341112676531525017427 0ustar mpmmpm00000000000000 $ hg init no bookmarks $ hg bookmarks no bookmarks set $ hg bookmarks -Tjson [ ] bookmark rev -1 $ hg bookmark X list bookmarks $ hg bookmarks * X -1:000000000000 list bookmarks with color $ hg --config extensions.color= --config color.mode=ansi \ > bookmarks --color=always \x1b[0;32m * \x1b[0m\x1b[0;32mX\x1b[0m\x1b[0;32m -1:000000000000\x1b[0m (esc) $ echo a > a $ hg add a $ hg commit -m 0 bookmark X moved to rev 0 $ hg bookmarks * X 0:f7b1eb17ad24 look up bookmark $ hg log -r X changeset: 0:f7b1eb17ad24 bookmark: X tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 second bookmark for rev 0, command should work even with ui.strict on $ hg --config ui.strict=1 bookmark X2 bookmark rev -1 again $ hg bookmark -r null Y list bookmarks $ hg bookmarks X 0:f7b1eb17ad24 * X2 0:f7b1eb17ad24 Y -1:000000000000 $ echo b > b $ hg add b $ hg commit -m 1 $ hg bookmarks -Tjson [ { "active": false, "bookmark": "X", "node": "f7b1eb17ad24730a1651fccd46c43826d1bbc2ac", "rev": 0 }, { "active": true, "bookmark": "X2", "node": "925d80f479bb026b0fb3deb27503780b13f74123", "rev": 1 }, { "active": false, "bookmark": "Y", "node": "0000000000000000000000000000000000000000", "rev": -1 } ] bookmarks revset $ hg log -r 'bookmark()' changeset: 0:f7b1eb17ad24 bookmark: X user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 changeset: 1:925d80f479bb bookmark: X2 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 $ hg log -r 'bookmark(Y)' $ hg log -r 'bookmark(X2)' changeset: 1:925d80f479bb bookmark: X2 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 $ hg log -r 'bookmark("re:X")' changeset: 0:f7b1eb17ad24 bookmark: X user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 changeset: 1:925d80f479bb bookmark: X2 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 $ hg log -r 'bookmark("literal:X")' changeset: 0:f7b1eb17ad24 bookmark: X user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 $ hg log -r 'bookmark(unknown)' abort: bookmark 'unknown' does not exist! [255] $ hg log -r 'bookmark("literal:unknown")' abort: bookmark 'unknown' does not exist! [255] $ hg log -r 'bookmark("re:unknown")' abort: no bookmarks exist that match 'unknown'! [255] $ hg log -r 'present(bookmark("literal:unknown"))' $ hg log -r 'present(bookmark("re:unknown"))' $ hg help revsets | grep 'bookmark(' "bookmark([name])" bookmarks X and X2 moved to rev 1, Y at rev -1 $ hg bookmarks X 0:f7b1eb17ad24 * X2 1:925d80f479bb Y -1:000000000000 bookmark rev 0 again $ hg bookmark -r 0 Z $ hg update X 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (activating bookmark X) $ echo c > c $ hg add c $ hg commit -m 2 created new head bookmarks X moved to rev 2, Y at rev -1, Z at rev 0 $ hg bookmarks * X 2:db815d6d32e6 X2 1:925d80f479bb Y -1:000000000000 Z 0:f7b1eb17ad24 rename nonexistent bookmark $ hg bookmark -m A B abort: bookmark 'A' does not exist [255] rename to existent bookmark $ hg bookmark -m X Y abort: bookmark 'Y' already exists (use -f to force) [255] force rename to existent bookmark $ hg bookmark -f -m X Y list bookmarks $ hg bookmark X2 1:925d80f479bb * Y 2:db815d6d32e6 Z 0:f7b1eb17ad24 bookmarks from a revset $ hg bookmark -r '.^1' REVSET $ hg bookmark -r ':tip' TIP $ hg up -q TIP $ hg bookmarks REVSET 0:f7b1eb17ad24 * TIP 2:db815d6d32e6 X2 1:925d80f479bb Y 2:db815d6d32e6 Z 0:f7b1eb17ad24 $ hg bookmark -d REVSET $ hg bookmark -d TIP rename without new name or multiple names $ hg bookmark -m Y abort: new bookmark name required [255] $ hg bookmark -m Y Y2 Y3 abort: only one new bookmark name allowed [255] delete without name $ hg bookmark -d abort: bookmark name required [255] delete nonexistent bookmark $ hg bookmark -d A abort: bookmark 'A' does not exist [255] bookmark name with spaces should be stripped $ hg bookmark ' x y ' list bookmarks $ hg bookmarks X2 1:925d80f479bb Y 2:db815d6d32e6 Z 0:f7b1eb17ad24 * x y 2:db815d6d32e6 look up stripped bookmark name $ hg log -r '"x y"' changeset: 2:db815d6d32e6 bookmark: Y bookmark: x y tag: tip parent: 0:f7b1eb17ad24 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 reject bookmark name with newline $ hg bookmark ' > ' abort: bookmark names cannot consist entirely of whitespace [255] $ hg bookmark -m Z ' > ' abort: bookmark names cannot consist entirely of whitespace [255] bookmark with reserved name $ hg bookmark tip abort: the name 'tip' is reserved [255] $ hg bookmark . abort: the name '.' is reserved [255] $ hg bookmark null abort: the name 'null' is reserved [255] bookmark with existing name $ hg bookmark X2 abort: bookmark 'X2' already exists (use -f to force) [255] $ hg bookmark -m Y Z abort: bookmark 'Z' already exists (use -f to force) [255] bookmark with name of branch $ hg bookmark default abort: a bookmark cannot have the name of an existing branch [255] $ hg bookmark -m Y default abort: a bookmark cannot have the name of an existing branch [255] bookmark with integer name $ hg bookmark 10 abort: cannot use an integer as a name [255] incompatible options $ hg bookmark -m Y -d Z abort: --delete and --rename are incompatible [255] $ hg bookmark -r 1 -d Z abort: --rev is incompatible with --delete [255] $ hg bookmark -r 1 -m Z Y abort: --rev is incompatible with --rename [255] force bookmark with existing name $ hg bookmark -f X2 force bookmark back to where it was, should deactivate it $ hg bookmark -fr1 X2 $ hg bookmarks X2 1:925d80f479bb Y 2:db815d6d32e6 Z 0:f7b1eb17ad24 x y 2:db815d6d32e6 forward bookmark to descendant without --force $ hg bookmark Z moving bookmark 'Z' forward from f7b1eb17ad24 list bookmarks $ hg bookmark X2 1:925d80f479bb Y 2:db815d6d32e6 * Z 2:db815d6d32e6 x y 2:db815d6d32e6 revision but no bookmark name $ hg bookmark -r . abort: bookmark name required [255] bookmark name with whitespace only $ hg bookmark ' ' abort: bookmark names cannot consist entirely of whitespace [255] $ hg bookmark -m Y ' ' abort: bookmark names cannot consist entirely of whitespace [255] invalid bookmark $ hg bookmark 'foo:bar' abort: ':' cannot be used in a name [255] $ hg bookmark 'foo > bar' abort: '\n' cannot be used in a name [255] the bookmark extension should be ignored now that it is part of core $ echo "[extensions]" >> $HGRCPATH $ echo "bookmarks=" >> $HGRCPATH $ hg bookmarks X2 1:925d80f479bb Y 2:db815d6d32e6 * Z 2:db815d6d32e6 x y 2:db815d6d32e6 test summary $ hg summary parent: 2:db815d6d32e6 tip 2 branch: default bookmarks: *Z Y x y commit: (clean) update: 1 new changesets, 2 branch heads (merge) phases: 3 draft test id $ hg id db815d6d32e6 tip Y/Z/x y test rollback $ echo foo > f1 $ hg bookmark tmp-rollback $ hg ci -Amr adding f1 $ hg bookmarks X2 1:925d80f479bb Y 2:db815d6d32e6 Z 2:db815d6d32e6 * tmp-rollback 3:2bf5cfec5864 x y 2:db815d6d32e6 $ hg rollback repository tip rolled back to revision 2 (undo commit) working directory now based on revision 2 $ hg bookmarks X2 1:925d80f479bb Y 2:db815d6d32e6 Z 2:db815d6d32e6 * tmp-rollback 2:db815d6d32e6 x y 2:db815d6d32e6 $ hg bookmark -f Z -r 1 $ hg rollback repository tip rolled back to revision 2 (undo bookmark) $ hg bookmarks X2 1:925d80f479bb Y 2:db815d6d32e6 Z 2:db815d6d32e6 * tmp-rollback 2:db815d6d32e6 x y 2:db815d6d32e6 $ hg bookmark -d tmp-rollback activate bookmark on working dir parent without --force $ hg bookmark --inactive Z $ hg bookmark Z test clone $ hg bookmark -r 2 -i @ $ hg bookmark -r 2 -i a@ $ hg bookmarks @ 2:db815d6d32e6 X2 1:925d80f479bb Y 2:db815d6d32e6 * Z 2:db815d6d32e6 a@ 2:db815d6d32e6 x y 2:db815d6d32e6 $ hg clone . cloned-bookmarks updating to bookmark @ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R cloned-bookmarks bookmarks * @ 2:db815d6d32e6 X2 1:925d80f479bb Y 2:db815d6d32e6 Z 2:db815d6d32e6 a@ 2:db815d6d32e6 x y 2:db815d6d32e6 test clone with pull protocol $ hg clone --pull . cloned-bookmarks-pull requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files (+1 heads) updating to bookmark @ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R cloned-bookmarks-pull bookmarks * @ 2:db815d6d32e6 X2 1:925d80f479bb Y 2:db815d6d32e6 Z 2:db815d6d32e6 a@ 2:db815d6d32e6 x y 2:db815d6d32e6 delete multiple bookmarks at once $ hg bookmark -d @ a@ test clone with a bookmark named "default" (issue3677) $ hg bookmark -r 1 -f -i default $ hg clone . cloned-bookmark-default updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R cloned-bookmark-default bookmarks X2 1:925d80f479bb Y 2:db815d6d32e6 Z 2:db815d6d32e6 default 1:925d80f479bb x y 2:db815d6d32e6 $ hg -R cloned-bookmark-default parents -q 2:db815d6d32e6 $ hg bookmark -d default test clone with a specific revision $ hg clone -r 925d80 . cloned-bookmarks-rev adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R cloned-bookmarks-rev bookmarks X2 1:925d80f479bb test clone with update to a bookmark $ hg clone -u Z . ../cloned-bookmarks-update updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R ../cloned-bookmarks-update bookmarks X2 1:925d80f479bb Y 2:db815d6d32e6 * Z 2:db815d6d32e6 x y 2:db815d6d32e6 create bundle with two heads $ hg clone . tobundle updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo x > tobundle/x $ hg -R tobundle add tobundle/x $ hg -R tobundle commit -m'x' $ hg -R tobundle update -r -2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo y > tobundle/y $ hg -R tobundle branch test marked working directory as branch test (branches are permanent and global, did you want a bookmark?) $ hg -R tobundle add tobundle/y $ hg -R tobundle commit -m'y' $ hg -R tobundle bundle tobundle.hg searching for changes 2 changesets found $ hg unbundle tobundle.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) update to active bookmark if it's not the parent $ hg summary parent: 2:db815d6d32e6 2 branch: default bookmarks: *Z Y x y commit: 1 added, 1 unknown (new branch head) update: 2 new changesets (update) phases: 5 draft $ hg update 1 files updated, 0 files merged, 0 files removed, 0 files unresolved updating bookmark Z $ hg bookmarks X2 1:925d80f479bb Y 2:db815d6d32e6 * Z 3:125c9a1d6df6 x y 2:db815d6d32e6 pull --update works the same as pull && update $ hg bookmark -r3 Y moving bookmark 'Y' forward from db815d6d32e6 $ cp -r ../cloned-bookmarks-update ../cloned-bookmarks-manual-update (manual version) $ hg -R ../cloned-bookmarks-manual-update update Y 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (activating bookmark Y) $ hg -R ../cloned-bookmarks-manual-update pull . pulling from . searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) updating bookmark Y updating bookmark Z (run 'hg heads' to see heads, 'hg merge' to merge) (# tests strange but with --date crashing when bookmark have to move) $ hg -R ../cloned-bookmarks-manual-update update -d 1986 abort: revision matching date not found [255] $ hg -R ../cloned-bookmarks-manual-update update updating to active bookmark Y 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (activating bookmark Y) (all in one version) $ hg -R ../cloned-bookmarks-update update Y 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (activating bookmark Y) $ hg -R ../cloned-bookmarks-update pull --update . pulling from . searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) updating bookmark Y updating bookmark Z updating to active bookmark Y 1 files updated, 0 files merged, 0 files removed, 0 files unresolved test wrongly formated bookmark $ echo '' >> .hg/bookmarks $ hg bookmarks X2 1:925d80f479bb Y 3:125c9a1d6df6 * Z 3:125c9a1d6df6 x y 2:db815d6d32e6 $ echo "Ican'thasformatedlines" >> .hg/bookmarks $ hg bookmarks malformed line in .hg/bookmarks: "Ican'thasformatedlines" X2 1:925d80f479bb Y 3:125c9a1d6df6 * Z 3:125c9a1d6df6 x y 2:db815d6d32e6 test missing revisions $ echo "925d80f479bc z" > .hg/bookmarks $ hg book no bookmarks set test stripping a non-checked-out but bookmarked revision $ hg log --graph o changeset: 4:9ba5f110a0b3 | branch: test | tag: tip | parent: 2:db815d6d32e6 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: y | | @ changeset: 3:125c9a1d6df6 |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: x | o changeset: 2:db815d6d32e6 | parent: 0:f7b1eb17ad24 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: 2 | | o changeset: 1:925d80f479bb |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: 1 | o changeset: 0:f7b1eb17ad24 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 $ hg book should-end-on-two $ hg co --clean 4 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (leaving bookmark should-end-on-two) $ hg book four $ hg --config extensions.mq= strip 3 saved backup bundle to * (glob) should-end-on-two should end up pointing to revision 2, as that's the tipmost surviving ancestor of the stripped revision. $ hg log --graph @ changeset: 3:9ba5f110a0b3 | branch: test | bookmark: four | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: y | o changeset: 2:db815d6d32e6 | bookmark: should-end-on-two | parent: 0:f7b1eb17ad24 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: 2 | | o changeset: 1:925d80f479bb |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: 1 | o changeset: 0:f7b1eb17ad24 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 test non-linear update not clearing active bookmark $ hg up 1 1 files updated, 0 files merged, 2 files removed, 0 files unresolved (leaving bookmark four) $ hg book drop $ hg up -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (leaving bookmark drop) $ hg sum parent: 2:db815d6d32e6 2 branch: default bookmarks: should-end-on-two commit: 2 unknown (clean) update: 1 new changesets, 2 branch heads (merge) phases: 4 draft $ hg book drop 1:925d80f479bb four 3:9ba5f110a0b3 should-end-on-two 2:db815d6d32e6 $ hg book -d drop $ hg up four 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (activating bookmark four) no-op update doesn't deactive bookmarks $ hg up 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg sum parent: 3:9ba5f110a0b3 tip y branch: test bookmarks: *four commit: 2 unknown (clean) update: (current) phases: 4 draft test clearing divergent bookmarks of linear ancestors $ hg bookmark Z -r 0 $ hg bookmark Z@1 -r 1 $ hg bookmark Z@2 -r 2 $ hg bookmark Z@3 -r 3 $ hg book Z 0:f7b1eb17ad24 Z@1 1:925d80f479bb Z@2 2:db815d6d32e6 Z@3 3:9ba5f110a0b3 * four 3:9ba5f110a0b3 should-end-on-two 2:db815d6d32e6 $ hg bookmark Z moving bookmark 'Z' forward from f7b1eb17ad24 $ hg book * Z 3:9ba5f110a0b3 Z@1 1:925d80f479bb four 3:9ba5f110a0b3 should-end-on-two 2:db815d6d32e6 test clearing only a single divergent bookmark across branches $ hg book foo -r 1 $ hg book foo@1 -r 0 $ hg book foo@2 -r 2 $ hg book foo@3 -r 3 $ hg book foo -r foo@3 $ hg book * Z 3:9ba5f110a0b3 Z@1 1:925d80f479bb foo 3:9ba5f110a0b3 foo@1 0:f7b1eb17ad24 foo@2 2:db815d6d32e6 four 3:9ba5f110a0b3 should-end-on-two 2:db815d6d32e6 pull --update works the same as pull && update (case #2) It is assumed that "hg pull" itself doesn't update current active bookmark ('Y' in tests below). $ hg pull -q ../cloned-bookmarks-update divergent bookmark Z stored as Z@2 (pulling revision on another named branch with --update updates neither the working directory nor current active bookmark: "no-op" case) $ echo yy >> y $ hg commit -m yy $ hg -R ../cloned-bookmarks-update bookmarks | grep ' Y ' * Y 3:125c9a1d6df6 $ hg -R ../cloned-bookmarks-update pull . --update pulling from . searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files divergent bookmark Z stored as Z@default adding remote bookmark foo adding remote bookmark four adding remote bookmark should-end-on-two 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R ../cloned-bookmarks-update parents -T "{rev}:{node|short}\n" 3:125c9a1d6df6 $ hg -R ../cloned-bookmarks-update bookmarks | grep ' Y ' * Y 3:125c9a1d6df6 (pulling revision on current named/topological branch with --update updates the working directory and current active bookmark) $ hg update -C -q 125c9a1d6df6 $ echo xx >> x $ hg commit -m xx $ hg -R ../cloned-bookmarks-update bookmarks | grep ' Y ' * Y 3:125c9a1d6df6 $ hg -R ../cloned-bookmarks-update pull . --update pulling from . searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files divergent bookmark Z stored as Z@default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved updating bookmark Y $ hg -R ../cloned-bookmarks-update parents -T "{rev}:{node|short}\n" 6:81dcce76aa0b $ hg -R ../cloned-bookmarks-update bookmarks | grep ' Y ' * Y 6:81dcce76aa0b mercurial-3.7.3/tests/test-push-warn.t0000644000175000017500000004214412676531525017364 0ustar mpmmpm00000000000000 $ cat << EOF >> $HGRCPATH > [experimental] > # drop me once bundle2 is the default, > # added to get test change early. > bundle2-exp = True > EOF $ hg init a $ cd a $ echo foo > t1 $ hg add t1 $ hg commit -m "1" $ cd .. $ hg clone a b updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd a $ echo foo > t2 $ hg add t2 $ hg commit -m "2" $ cd ../b $ echo foo > t3 $ hg add t3 $ hg commit -m "3" Specifying a revset that evaluates to null will abort $ hg push -r '0 & 1' ../a pushing to ../a abort: specified revisions evaluate to an empty set (use different revision arguments) [255] $ hg push ../a pushing to ../a searching for changes remote has heads on branch 'default' that are not known locally: 1c9246a22a0a abort: push creates new remote head 1e108cc5548c! (pull and merge or see "hg help push" for details about pushing new heads) [255] $ hg push --debug ../a pushing to ../a query 1; heads searching for changes taking quick initial sample query 2; still undecided: 1, sample size is: 1 2 total queries listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" listing keys for "bookmarks" remote has heads on branch 'default' that are not known locally: 1c9246a22a0a new remote heads on branch 'default': 1e108cc5548c abort: push creates new remote head 1e108cc5548c! (pull and merge or see "hg help push" for details about pushing new heads) [255] $ hg pull ../a pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg push ../a pushing to ../a searching for changes abort: push creates new remote head 1e108cc5548c! (merge or see "hg help push" for details about pushing new heads) [255] $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m "4" $ hg push ../a pushing to ../a searching for changes adding changesets adding manifests adding file changes added 2 changesets with 1 changes to 1 files $ cd .. $ hg init c $ cd c $ for i in 0 1 2; do > echo $i >> foo > hg ci -Am $i > done adding foo $ cd .. $ hg clone c d updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd d $ for i in 0 1; do > hg co -C $i > echo d-$i >> foo > hg ci -m d-$i > done 1 files updated, 0 files merged, 0 files removed, 0 files unresolved created new head 1 files updated, 0 files merged, 0 files removed, 0 files unresolved created new head $ HGMERGE=true hg merge 3 merging foo 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m c-d $ hg push ../c pushing to ../c searching for changes abort: push creates new remote head 6346d66eb9f5! (merge or see "hg help push" for details about pushing new heads) [255] $ hg push -r 2 ../c pushing to ../c searching for changes no changes found [1] $ hg push -r 3 ../c pushing to ../c searching for changes abort: push creates new remote head a5dda829a167! (merge or see "hg help push" for details about pushing new heads) [255] $ hg push -v -r 3 -r 4 ../c pushing to ../c searching for changes new remote heads on branch 'default': a5dda829a167 ee8fbc7a0295 abort: push creates new remote head a5dda829a167! (merge or see "hg help push" for details about pushing new heads) [255] $ hg push -v -f -r 3 -r 4 ../c pushing to ../c searching for changes 2 changesets found uncompressed size of bundle content: 348 (changelog) 326 (manifests) 253 foo adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files (+2 heads) $ hg push -r 5 ../c pushing to ../c searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (-1 heads) $ hg in ../c comparing with ../c searching for changes no changes found [1] Issue450: push -r warns about remote head creation even if no heads will be created $ hg init ../e $ hg push -r 0 ../e pushing to ../e searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files $ hg push -r 1 ../e pushing to ../e searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files $ cd .. Issue736: named branches are not considered for detection of unmerged heads in "hg push" $ hg init f $ cd f $ hg -q branch a $ echo 0 > foo $ hg -q ci -Am 0 $ echo 1 > foo $ hg -q ci -m 1 $ hg -q up 0 $ echo 2 > foo $ hg -q ci -m 2 $ hg -q up 0 $ hg -q branch b $ echo 3 > foo $ hg -q ci -m 3 $ cd .. $ hg -q clone f g $ cd g Push on existing branch and new branch: $ hg -q up 1 $ echo 4 > foo $ hg -q ci -m 4 $ hg -q up 0 $ echo 5 > foo $ hg -q branch c $ hg -q ci -m 5 $ hg push ../f pushing to ../f searching for changes abort: push creates new remote branches: c! (use 'hg push --new-branch' to create new remote branches) [255] $ hg push -r 4 -r 5 ../f pushing to ../f searching for changes abort: push creates new remote branches: c! (use 'hg push --new-branch' to create new remote branches) [255] Multiple new branches: $ hg -q branch d $ echo 6 > foo $ hg -q ci -m 6 $ hg push ../f pushing to ../f searching for changes abort: push creates new remote branches: c, d! (use 'hg push --new-branch' to create new remote branches) [255] $ hg push -r 4 -r 6 ../f pushing to ../f searching for changes abort: push creates new remote branches: c, d! (use 'hg push --new-branch' to create new remote branches) [255] $ cd ../g Fail on multiple head push: $ hg -q up 1 $ echo 7 > foo $ hg -q ci -m 7 $ hg push -r 4 -r 7 ../f pushing to ../f searching for changes abort: push creates new remote head 0b715ef6ff8f on branch 'a'! (merge or see "hg help push" for details about pushing new heads) [255] Push replacement head on existing branches: $ hg -q up 3 $ echo 8 > foo $ hg -q ci -m 8 $ hg push -r 7 -r 8 ../f pushing to ../f searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files Merge of branch a to other branch b followed by unrelated push on branch a: $ hg -q up 7 $ HGMERGE=true hg -q merge 8 $ hg -q ci -m 9 $ hg -q up 8 $ echo 10 > foo $ hg -q ci -m 10 $ hg push -r 9 ../f pushing to ../f searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (-1 heads) $ hg push -r 10 ../f pushing to ../f searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) Cheating the counting algorithm: $ hg -q up 9 $ HGMERGE=true hg -q merge 2 $ hg -q ci -m 11 $ hg -q up 1 $ echo 12 > foo $ hg -q ci -m 12 $ hg push -r 11 -r 12 ../f pushing to ../f searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files Failed push of new named branch: $ echo 12 > foo $ hg -q ci -m 12a [1] $ hg -q up 11 $ echo 13 > foo $ hg -q branch e $ hg -q ci -m 13d $ hg push -r 12 -r 13 ../f pushing to ../f searching for changes abort: push creates new remote branches: e! (use 'hg push --new-branch' to create new remote branches) [255] Using --new-branch to push new named branch: $ hg push --new-branch -r 12 -r 13 ../f pushing to ../f searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Pushing multi headed new branch: $ echo 14 > foo $ hg -q branch f $ hg -q ci -m 14 $ echo 15 > foo $ hg -q ci -m 15 $ hg -q up 14 $ echo 16 > foo $ hg -q ci -m 16 $ hg push --branch f --new-branch ../f pushing to ../f searching for changes abort: push creates new branch 'f' with multiple heads (merge or see "hg help push" for details about pushing new heads) [255] $ hg push --branch f --new-branch --force ../f pushing to ../f searching for changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files (+1 heads) Checking prepush logic does not allow silently pushing multiple new heads but also doesn't report too many heads: $ cd .. $ hg init h $ echo init > h/init $ hg -R h ci -Am init adding init $ echo a > h/a $ hg -R h ci -Am a adding a $ hg clone h i updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R h up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo b > h/b $ hg -R h ci -Am b adding b created new head $ hg -R i up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo c > i/c $ hg -R i ci -Am c adding c created new head $ for i in `python $TESTDIR/seq.py 3`; do hg -R h up -q 0; echo $i > h/b; hg -R h ci -qAm$i; done $ hg -R i push h pushing to h searching for changes remote has heads on branch 'default' that are not known locally: 534543e22c29 764f8ec07b96 afe7cc7679f5 ce4212fc8847 abort: push creates new remote head 97bd0c84d346! (pull and merge or see "hg help push" for details about pushing new heads) [255] $ hg -R h up -q 0; echo x > h/b; hg -R h ci -qAmx $ hg -R i push h pushing to h searching for changes remote has heads on branch 'default' that are not known locally: 18ddb72c4590 534543e22c29 764f8ec07b96 afe7cc7679f5 and 1 others abort: push creates new remote head 97bd0c84d346! (pull and merge or see "hg help push" for details about pushing new heads) [255] $ hg -R i push h -v pushing to h searching for changes remote has heads on branch 'default' that are not known locally: 18ddb72c4590 534543e22c29 764f8ec07b96 afe7cc7679f5 ce4212fc8847 new remote heads on branch 'default': 97bd0c84d346 abort: push creates new remote head 97bd0c84d346! (pull and merge or see "hg help push" for details about pushing new heads) [255] Check prepush logic with merged branches: $ hg init j $ hg -R j branch a marked working directory as branch a (branches are permanent and global, did you want a bookmark?) $ echo init > j/foo $ hg -R j ci -Am init adding foo $ hg clone j k updating to branch a 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a1 > j/foo $ hg -R j ci -m a1 $ hg -R k branch b marked working directory as branch b $ echo b > k/foo $ hg -R k ci -m b $ hg -R k up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R k merge b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg -R k ci -m merge $ hg -R k push -r a j pushing to j searching for changes abort: push creates new remote branches: b! (use 'hg push --new-branch' to create new remote branches) [255] Prepush -r should not allow you to sneak in new heads: $ hg init l $ cd l $ echo a >> foo $ hg -q add foo $ hg -q branch a $ hg -q ci -ma $ hg -q up null $ echo a >> foo $ hg -q add foo $ hg -q branch b $ hg -q ci -mb $ cd .. $ hg -q clone l m -u a $ cd m $ hg -q merge b $ hg -q ci -mmb $ hg -q up 0 $ echo a >> foo $ hg -q ci -ma2 $ hg -q up 2 $ echo a >> foo $ hg -q branch -f b $ hg -q ci -mb2 $ hg -q merge 3 $ hg -q ci -mma $ hg push ../l -b b pushing to ../l searching for changes abort: push creates new remote head 451211cc22b0 on branch 'a'! (merge or see "hg help push" for details about pushing new heads) [255] $ cd .. Check prepush with new branch head on former topo non-head: $ hg init n $ cd n $ hg branch A marked working directory as branch A (branches are permanent and global, did you want a bookmark?) $ echo a >a $ hg ci -Ama adding a $ hg branch B marked working directory as branch B $ echo b >b $ hg ci -Amb adding b b is now branch head of B, and a topological head a is now branch head of A, but not a topological head $ hg clone . inner updating to branch B 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd inner $ hg up B 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b1 >b1 $ hg ci -Amb1 adding b1 in the clone b1 is now the head of B $ cd .. $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo a2 >a2 $ hg ci -Ama2 adding a2 a2 is now the new branch head of A, and a new topological head it replaces a former inner branch head, so it should at most warn about A, not B glog of local: $ hg log -G --template "{rev}: {branches} {desc}\n" @ 2: A a2 | | o 1: B b |/ o 0: A a glog of remote: $ hg log -G -R inner --template "{rev}: {branches} {desc}\n" @ 2: B b1 | o 1: B b | o 0: A a outgoing: $ hg out inner --template "{rev}: {branches} {desc}\n" comparing with inner searching for changes 2: A a2 $ hg push inner pushing to inner searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) $ cd .. Check prepush with new branch head on former topo head: $ hg init o $ cd o $ hg branch A marked working directory as branch A (branches are permanent and global, did you want a bookmark?) $ echo a >a $ hg ci -Ama adding a $ hg branch B marked working directory as branch B $ echo b >b $ hg ci -Amb adding b b is now branch head of B, and a topological head $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo a1 >a1 $ hg ci -Ama1 adding a1 a1 is now branch head of A, and a topological head $ hg clone . inner updating to branch A 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd inner $ hg up B 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo b1 >b1 $ hg ci -Amb1 adding b1 in the clone b1 is now the head of B $ cd .. $ echo a2 >a2 $ hg ci -Ama2 adding a2 a2 is now the new branch head of A, and a topological head it replaces a former topological and branch head, so this should not warn glog of local: $ hg log -G --template "{rev}: {branches} {desc}\n" @ 3: A a2 | o 2: A a1 | | o 1: B b |/ o 0: A a glog of remote: $ hg log -G -R inner --template "{rev}: {branches} {desc}\n" @ 3: B b1 | | o 2: A a1 | | o | 1: B b |/ o 0: A a outgoing: $ hg out inner --template "{rev}: {branches} {desc}\n" comparing with inner searching for changes 3: A a2 $ hg push inner pushing to inner searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files $ cd .. Check prepush with new branch head and new child of former branch head but child is on different branch: $ hg init p $ cd p $ hg branch A marked working directory as branch A (branches are permanent and global, did you want a bookmark?) $ echo a0 >a $ hg ci -Ama0 adding a $ echo a1 >a $ hg ci -ma1 $ hg up null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg branch B marked working directory as branch B $ echo b0 >b $ hg ci -Amb0 adding b $ echo b1 >b $ hg ci -mb1 $ hg clone . inner updating to branch B 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg up A 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg branch -f B marked working directory as branch B $ echo a3 >a $ hg ci -ma3 created new head $ hg up 3 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg branch -f A marked working directory as branch A $ echo b3 >b $ hg ci -mb3 created new head glog of local: $ hg log -G --template "{rev}: {branches} {desc}\n" @ 5: A b3 | | o 4: B a3 | | o | 3: B b1 | | o | 2: B b0 / o 1: A a1 | o 0: A a0 glog of remote: $ hg log -G -R inner --template "{rev}: {branches} {desc}\n" @ 3: B b1 | o 2: B b0 o 1: A a1 | o 0: A a0 outgoing: $ hg out inner --template "{rev}: {branches} {desc}\n" comparing with inner searching for changes 4: B a3 5: A b3 $ hg push inner pushing to inner searching for changes abort: push creates new remote head 7d0f4fb6cf04 on branch 'A'! (merge or see "hg help push" for details about pushing new heads) [255] $ hg push inner -r4 -r5 pushing to inner searching for changes abort: push creates new remote head 7d0f4fb6cf04 on branch 'A'! (merge or see "hg help push" for details about pushing new heads) [255] $ hg in inner comparing with inner searching for changes no changes found [1] $ cd .. mercurial-3.7.3/tests/test-dispatch.t0000644000175000017500000000226012676531525017232 0ustar mpmmpm00000000000000test command parsing and dispatch $ hg init a $ cd a Redundant options used to crash (issue436): $ hg -v log -v $ hg -v log -v x $ echo a > a $ hg ci -Ama adding a Missing arg: $ hg cat hg cat: invalid arguments hg cat [OPTION]... FILE... output the current or given revision of files options ([+] can be repeated): -o --output FORMAT print output to file with formatted name -r --rev REV print the given revision --decode apply any matching decode filter -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns (use "hg cat -h" to show more help) [255] [defaults] $ hg cat a a $ cat >> $HGRCPATH < [defaults] > cat = -r null > EOF $ hg cat a a: no such file in rev 000000000000 [1] $ cd "$TESTTMP" OSError "No such file or directory" / "The system cannot find the path specified" should include filename even when it is empty $ hg -R a archive '' abort: *: '' (glob) [255] #if no-outer-repo No repo: $ hg cat abort: no repository found in '$TESTTMP' (.hg not found)! [255] #endif mercurial-3.7.3/tests/test-ssh-bundle1.t0000644000175000017500000004030712676531525017564 0ustar mpmmpm00000000000000This test is a duplicate of 'test-http.t' feel free to factor out parts that are not bundle1/bundle2 specific. $ cat << EOF >> $HGRCPATH > [experimental] > # This test is dedicated to interaction through old bundle > bundle2-exp = False > [format] # temporary settings > usegeneraldelta=yes > EOF This test tries to exercise the ssh functionality with a dummy script creating 'remote' repo $ hg init remote $ cd remote $ echo this > foo $ echo this > fooO $ hg ci -A -m "init" foo fooO insert a closed branch (issue4428) $ hg up null 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg branch closed marked working directory as branch closed (branches are permanent and global, did you want a bookmark?) $ hg ci -mc0 $ hg ci --close-branch -mc1 $ hg up -q default configure for serving $ cat < .hg/hgrc > [server] > uncompressed = True > > [hooks] > changegroup = printenv.py changegroup-in-remote 0 ../dummylog > EOF $ cd .. repo not found error $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/nonexistent local remote: abort: repository nonexistent not found! abort: no suitable response from remote hg! [255] non-existent absolute path $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy//`pwd`/nonexistent local remote: abort: repository /$TESTTMP/nonexistent not found! abort: no suitable response from remote hg! [255] clone remote via stream $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/remote local-stream streaming all changes 4 files to transfer, 615 bytes of data transferred 615 bytes in * seconds (*) (glob) searching for changes no changes found updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd local-stream $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 3 changesets, 2 total revisions $ hg branches default 0:1160648e36ce $ cd .. clone bookmarks via stream $ hg -R local-stream book mybook $ hg clone -e "python \"$TESTDIR/dummyssh\"" --uncompressed ssh://user@dummy/local-stream stream2 streaming all changes 4 files to transfer, 615 bytes of data transferred 615 bytes in * seconds (*) (glob) searching for changes no changes found updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd stream2 $ hg book mybook 0:1160648e36ce $ cd .. $ rm -rf local-stream stream2 clone remote via pull $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved verify $ cd local $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 3 changesets, 2 total revisions $ echo '[hooks]' >> .hg/hgrc $ echo "changegroup = printenv.py changegroup-in-local 0 ../dummylog" >> .hg/hgrc empty default pull $ hg paths default = ssh://user@dummy/remote $ hg pull -e "python \"$TESTDIR/dummyssh\"" pulling from ssh://user@dummy/remote searching for changes no changes found pull from wrong ssh URL $ hg pull -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/doesnotexist pulling from ssh://user@dummy/doesnotexist remote: abort: repository doesnotexist not found! abort: no suitable response from remote hg! [255] local change $ echo bleah > foo $ hg ci -m "add" updating rc $ echo "default-push = ssh://user@dummy/remote" >> .hg/hgrc $ echo "[ui]" >> .hg/hgrc $ echo "ssh = python \"$TESTDIR/dummyssh\"" >> .hg/hgrc find outgoing $ hg out ssh://user@dummy/remote comparing with ssh://user@dummy/remote searching for changes changeset: 3:a28a9d1a809c tag: tip parent: 0:1160648e36ce user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add find incoming on the remote side $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/local comparing with ssh://user@dummy/local searching for changes changeset: 3:a28a9d1a809c tag: tip parent: 0:1160648e36ce user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add find incoming on the remote side (using absolute path) $ hg incoming -R ../remote -e "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/`pwd`" comparing with ssh://user@dummy/$TESTTMP/local searching for changes changeset: 3:a28a9d1a809c tag: tip parent: 0:1160648e36ce user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add push $ hg push pushing to ssh://user@dummy/remote searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files $ cd ../remote check remote tip $ hg tip changeset: 3:a28a9d1a809c tag: tip parent: 0:1160648e36ce user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 4 changesets, 3 total revisions $ hg cat -r tip foo bleah $ echo z > z $ hg ci -A -m z z created new head test pushkeys and bookmarks $ cd ../local $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote namespaces bookmarks namespaces phases $ hg book foo -r 0 $ hg out -B comparing with ssh://user@dummy/remote searching for changed bookmarks foo 1160648e36ce $ hg push -B foo pushing to ssh://user@dummy/remote searching for changes no changes found exporting bookmark foo [1] $ hg debugpushkey --config ui.ssh="python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote bookmarks foo 1160648e36cec0054048a7edc4110c6f84fde594 $ hg book -f foo $ hg push --traceback pushing to ssh://user@dummy/remote searching for changes no changes found updating bookmark foo [1] $ hg book -d foo $ hg in -B comparing with ssh://user@dummy/remote searching for changed bookmarks foo a28a9d1a809c $ hg book -f -r 0 foo $ hg pull -B foo pulling from ssh://user@dummy/remote no changes found updating bookmark foo $ hg book -d foo $ hg push -B foo pushing to ssh://user@dummy/remote searching for changes no changes found deleting remote bookmark foo [1] a bad, evil hook that prints to stdout $ cat < $TESTTMP/badhook > import sys > sys.stdout.write("KABOOM\n") > EOF $ echo '[hooks]' >> ../remote/.hg/hgrc $ echo "changegroup.stdout = python $TESTTMP/badhook" >> ../remote/.hg/hgrc $ echo r > r $ hg ci -A -m z r push should succeed even though it has an unexpected response $ hg push pushing to ssh://user@dummy/remote searching for changes remote has heads on branch 'default' that are not known locally: 6c0482d977a3 remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: KABOOM $ hg -R ../remote heads changeset: 5:1383141674ec tag: tip parent: 3:a28a9d1a809c user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: z changeset: 4:6c0482d977a3 parent: 0:1160648e36ce user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: z clone bookmarks $ hg -R ../remote bookmark test $ hg -R ../remote bookmarks * test 4:6c0482d977a3 $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/remote local-bookmarks requesting all changes adding changesets adding manifests adding file changes added 6 changesets with 5 changes to 4 files (+1 heads) updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R local-bookmarks bookmarks test 4:6c0482d977a3 passwords in ssh urls are not supported (we use a glob here because different Python versions give different results here) $ hg push ssh://user:erroneouspwd@dummy/remote pushing to ssh://user:*@dummy/remote (glob) abort: password in URL not supported! [255] $ cd .. hide outer repo $ hg init Test remote paths with spaces (issue2983): $ hg init --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo" $ touch "$TESTTMP/a repo/test" $ hg -R 'a repo' commit -A -m "test" adding test $ hg -R 'a repo' tag tag $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo" 73649e48688a $ hg id --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo#noNoNO" abort: unknown revision 'noNoNO'! [255] Test (non-)escaping of remote paths with spaces when cloning (issue3145): $ hg clone --ssh "python \"$TESTDIR/dummyssh\"" "ssh://user@dummy/a repo" destination directory: a repo abort: destination 'a repo' is not empty [255] Test hg-ssh using a helper script that will restore PYTHONPATH (which might have been cleared by a hg.exe wrapper) and invoke hg-ssh with the right parameters: $ cat > ssh.sh << EOF > userhost="\$1" > SSH_ORIGINAL_COMMAND="\$2" > export SSH_ORIGINAL_COMMAND > PYTHONPATH="$PYTHONPATH" > export PYTHONPATH > python "$TESTDIR/../contrib/hg-ssh" "$TESTTMP/a repo" > EOF $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a repo" 73649e48688a $ hg id --ssh "sh ssh.sh" "ssh://user@dummy/a'repo" remote: Illegal repository "$TESTTMP/a'repo" (glob) abort: no suitable response from remote hg! [255] $ hg id --ssh "sh ssh.sh" --remotecmd hacking "ssh://user@dummy/a'repo" remote: Illegal command "hacking -R 'a'\''repo' serve --stdio" abort: no suitable response from remote hg! [255] $ SSH_ORIGINAL_COMMAND="'hg' -R 'a'repo' serve --stdio" python "$TESTDIR/../contrib/hg-ssh" Illegal command "'hg' -R 'a'repo' serve --stdio": No closing quotation [255] Test hg-ssh in read-only mode: $ cat > ssh.sh << EOF > userhost="\$1" > SSH_ORIGINAL_COMMAND="\$2" > export SSH_ORIGINAL_COMMAND > PYTHONPATH="$PYTHONPATH" > export PYTHONPATH > python "$TESTDIR/../contrib/hg-ssh" --read-only "$TESTTMP/remote" > EOF $ hg clone --ssh "sh ssh.sh" "ssh://user@dummy/$TESTTMP/remote" read-only-local requesting all changes adding changesets adding manifests adding file changes added 6 changesets with 5 changes to 4 files (+1 heads) updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd read-only-local $ echo "baz" > bar $ hg ci -A -m "unpushable commit" bar $ hg push --ssh "sh ../ssh.sh" pushing to ssh://user@dummy/*/remote (glob) searching for changes remote: Permission denied remote: abort: pretxnopen.hg-ssh hook failed remote: Permission denied remote: pushkey-abort: prepushkey.hg-ssh hook failed updating 6c0482d977a3 to public failed! [1] $ cd .. stderr from remote commands should be printed before stdout from local code (issue4336) $ hg clone remote stderr-ordering updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd stderr-ordering $ cat >> localwrite.py << EOF > from mercurial import exchange, extensions > > def wrappedpush(orig, repo, *args, **kwargs): > res = orig(repo, *args, **kwargs) > repo.ui.write('local stdout\n') > return res > > def extsetup(ui): > extensions.wrapfunction(exchange, 'push', wrappedpush) > EOF $ cat >> .hg/hgrc << EOF > [paths] > default-push = ssh://user@dummy/remote > [ui] > ssh = python "$TESTDIR/dummyssh" > [extensions] > localwrite = localwrite.py > EOF $ echo localwrite > foo $ hg commit -m 'testing localwrite' $ hg push pushing to ssh://user@dummy/remote searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: KABOOM local stdout debug output $ hg pull --debug ssh://user@dummy/remote pulling from ssh://user@dummy/remote running python ".*/dummyssh" user@dummy ('|")hg -R remote serve --stdio('|") (re) sending hello command sending between command remote: 371 remote: capabilities: lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 remote: 1 preparing listkeys for "bookmarks" sending listkeys command received listkey for "bookmarks": 45 bytes query 1; heads sending batch command searching for changes all remote heads known locally no changes found preparing listkeys for "phases" sending listkeys command received listkey for "phases": 15 bytes checking for updated bookmarks $ cd .. $ cat dummylog Got arguments 1:user@dummy 2:hg -R nonexistent serve --stdio Got arguments 1:user@dummy 2:hg -R /$TESTTMP/nonexistent serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R local-stream serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R doesnotexist serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R local serve --stdio Got arguments 1:user@dummy 2:hg -R $TESTTMP/local serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio changegroup-in-remote hook: HG_NODE=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_NODE_LAST=a28a9d1a809cab7d4e2fde4bee738a9ede948b60 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:ssh:127.0.0.1 (glob) Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio changegroup-in-remote hook: HG_NODE=1383141674ec756a6056f6a9097618482fe0f4a6 HG_NODE_LAST=1383141674ec756a6056f6a9097618482fe0f4a6 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:ssh:127.0.0.1 (glob) Got arguments 1:user@dummy 2:hg -R remote serve --stdio Got arguments 1:user@dummy 2:hg init 'a repo' Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R 'a repo' serve --stdio Got arguments 1:user@dummy 2:hg -R remote serve --stdio changegroup-in-remote hook: HG_NODE=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_NODE_LAST=65c38f4125f9602c8db4af56530cc221d93b8ef8 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:ssh:127.0.0.1 (glob) Got arguments 1:user@dummy 2:hg -R remote serve --stdio remote hook failure is attributed to remote $ cat > $TESTTMP/failhook << EOF > def hook(ui, repo, **kwargs): > ui.write('hook failure!\n') > ui.flush() > return 1 > EOF $ echo "pretxnchangegroup.fail = python:$TESTTMP/failhook:hook" >> remote/.hg/hgrc $ hg -q --config ui.ssh="python $TESTDIR/dummyssh" clone ssh://user@dummy/remote hookout $ cd hookout $ touch hookfailure $ hg -q commit -A -m 'remote hook failure' $ hg --config ui.ssh="python $TESTDIR/dummyssh" push pushing to ssh://user@dummy/remote searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: hook failure! remote: transaction abort! remote: rollback completed remote: abort: pretxnchangegroup.fail hook failed [1] mercurial-3.7.3/tests/test-manifestv2.t0000644000175000017500000000452312676531525017515 0ustar mpmmpm00000000000000Create repo with old manifest $ cat << EOF >> $HGRCPATH > [format] > usegeneraldelta=yes > EOF $ hg init existing $ cd existing $ echo footext > foo $ hg add foo $ hg commit -m initial We're using v1, so no manifestv2 entry is in requires yet. $ grep manifestv2 .hg/requires [1] Let's clone this with manifestv2 enabled to switch to the new format for future commits. $ cd .. $ hg clone --pull existing new --config experimental.manifestv2=1 requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd new Check that entry was added to .hg/requires. $ grep manifestv2 .hg/requires manifestv2 Make a new commit. $ echo newfootext > foo $ hg commit -m new Check that the manifest actually switched to v2. $ hg debugdata -m 0 foo\x0021e958b1dca695a60ee2e9cf151753204ee0f9e9 (esc) $ hg debugdata -m 1 \x00 (esc) \x00foo\x00 (esc) I\xab\x7f\xb8(\x83\xcas\x15\x9d\xc2\xd3\xd3:5\x08\xbad5_ (esc) Check that manifestv2 is used if the requirement is present, even if it's disabled in the config. $ echo newerfootext > foo $ hg --config experimental.manifestv2=False commit -m newer $ hg debugdata -m 2 \x00 (esc) \x00foo\x00 (esc) \xa6\xb1\xfb\xef]\x91\xa1\x19`\xf3.#\x90S\xf8\x06 \xe2\x19\x00 (esc) Check that we can still read v1 manifests. $ hg files -r 0 foo $ cd .. Check that entry is added to .hg/requires on repo creation $ hg --config experimental.manifestv2=True init repo $ cd repo $ grep manifestv2 .hg/requires manifestv2 Set up simple repo $ echo a > file1 $ echo b > file2 $ echo c > file3 $ hg ci -Aqm 'initial' $ echo d > file2 $ hg ci -m 'modify file2' Check that 'hg verify', which uses manifest.readdelta(), works $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 2 changesets, 4 total revisions Check that manifest revlog is smaller than for v1 $ hg debugindex -m rev offset length delta linkrev nodeid p1 p2 0 0 81 -1 0 57361477c778 000000000000 000000000000 1 81 33 0 1 aeaab5a2ef74 57361477c778 000000000000 mercurial-3.7.3/tests/test-histedit-no-change.t0000644000175000017500000001474412676531525021117 0ustar mpmmpm00000000000000test for old histedit issue #6: editing a changeset without any actual change would corrupt the repository $ . "$TESTDIR/histedit-helpers.sh" $ cat >> $HGRCPATH < [extensions] > histedit= > EOF $ initrepo () > { > dir="$1" > comment="$2" > if [ -n "${comment}" ]; then > echo % ${comment} > echo % ${comment} | sed 's:.:-:g' > fi > hg init ${dir} > cd ${dir} > for x in a b c d e f ; do > echo $x > $x > hg add $x > hg ci -m $x > done > cd .. > } $ geneditor () > { > # generate an editor script for selecting changesets to be edited > choice=$1 # changesets that should be edited (using sed line ranges) > cat < # editing the rules, replacing 'pick' with 'edit' for the chosen lines > sed '${choice}s:^pick:edit:' "\$1" > "\${1}.tmp" > mv "\${1}.tmp" "\$1" > # displaying the resulting rules, minus comments and empty lines > sed '/^#/d;/^$/d;s:^:| :' "\$1" >&2 > EOF > } $ startediting () > { > # begin an editing session > choice="$1" # changesets that should be edited > number="$2" # number of changesets considered (from tip) > comment="$3" > geneditor "${choice}" > edit.sh > echo % start editing the history ${comment} > HGEDITOR="sh ./edit.sh" hg histedit -- -${number} 2>&1 | fixbundle > } $ continueediting () > { > # continue an edit already in progress > editor="$1" # message editor when finalizing editing > comment="$2" > echo % finalize changeset editing ${comment} > HGEDITOR=${editor} hg histedit --continue 2>&1 | fixbundle > } $ graphlog () > { > comment="${1:-log}" > echo % "${comment}" > hg log -G --template '{rev} {node} \"{desc|firstline}\"\n' > } $ initrepo r1 "test editing with no change" % test editing with no change ----------------------------- $ cd r1 $ graphlog "log before editing" % log before editing @ 5 652413bf663ef2a641cab26574e46d5f5a64a55a "f" | o 4 e860deea161a2f77de56603b340ebbb4536308ae "e" | o 3 055a42cdd88768532f9cf79daa407fc8d138de9b "d" | o 2 177f92b773850b59254aa5e923436f921b55483b "c" | o 1 d2ae7f538514cd87c17547b0de4cea71fe1af9fb "b" | o 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b "a" $ startediting 2 3 "(not changing anything)" # edit the 2nd of 3 changesets % start editing the history (not changing anything) | pick 055a42cdd887 3 d | edit e860deea161a 4 e | pick 652413bf663e 5 f 0 files updated, 0 files merged, 2 files removed, 0 files unresolved Editing (e860deea161a), you may commit or record as needed now. (hg histedit --continue to resume) $ continueediting true "(leaving commit message unaltered)" % finalize changeset editing (leaving commit message unaltered) check state of working copy $ hg id 794fe033d0a0 tip $ graphlog "log after history editing" % log after history editing @ 5 794fe033d0a030f8df77c5de945fca35c9181c30 "f" | o 4 04d2fab980779f332dec458cc944f28de8b43435 "e" | o 3 055a42cdd88768532f9cf79daa407fc8d138de9b "d" | o 2 177f92b773850b59254aa5e923436f921b55483b "c" | o 1 d2ae7f538514cd87c17547b0de4cea71fe1af9fb "b" | o 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b "a" $ cd .. $ initrepo r2 "test editing with no change, then abort" % test editing with no change, then abort ----------------------------------------- $ cd r2 $ graphlog "log before editing" % log before editing @ 5 652413bf663ef2a641cab26574e46d5f5a64a55a "f" | o 4 e860deea161a2f77de56603b340ebbb4536308ae "e" | o 3 055a42cdd88768532f9cf79daa407fc8d138de9b "d" | o 2 177f92b773850b59254aa5e923436f921b55483b "c" | o 1 d2ae7f538514cd87c17547b0de4cea71fe1af9fb "b" | o 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b "a" $ startediting 1,2 3 "(not changing anything)" # edit the 1st two of 3 changesets % start editing the history (not changing anything) | edit 055a42cdd887 3 d | edit e860deea161a 4 e | pick 652413bf663e 5 f 0 files updated, 0 files merged, 3 files removed, 0 files unresolved Editing (055a42cdd887), you may commit or record as needed now. (hg histedit --continue to resume) $ continueediting true "(leaving commit message unaltered)" % finalize changeset editing (leaving commit message unaltered) Editing (e860deea161a), you may commit or record as needed now. (hg histedit --continue to resume) $ graphlog "log after first edit" % log after first edit @ 6 e5ae3ca2f1ffdbd89ec41ebc273a231f7c3022f2 "d" | | o 5 652413bf663ef2a641cab26574e46d5f5a64a55a "f" | | | o 4 e860deea161a2f77de56603b340ebbb4536308ae "e" | | | o 3 055a42cdd88768532f9cf79daa407fc8d138de9b "d" |/ o 2 177f92b773850b59254aa5e923436f921b55483b "c" | o 1 d2ae7f538514cd87c17547b0de4cea71fe1af9fb "b" | o 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b "a" abort editing session, after first forcibly updating away $ hg up 0 abort: histedit in progress (use 'hg histedit --continue' or 'hg histedit --abort') [255] $ mv .hg/histedit-state .hg/histedit-state-ignore $ hg up 0 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ mv .hg/histedit-state-ignore .hg/histedit-state $ hg sum parent: 0:cb9a9f314b8b a branch: default commit: 1 added, 1 unknown (new branch head) update: 6 new changesets (update) phases: 7 draft hist: 2 remaining (histedit --continue) $ hg histedit --abort 2>&1 | fixbundle modified files should survive the abort when we've moved away already $ hg st A e ? edit.sh $ graphlog "log after abort" % log after abort o 5 652413bf663ef2a641cab26574e46d5f5a64a55a "f" | o 4 e860deea161a2f77de56603b340ebbb4536308ae "e" | o 3 055a42cdd88768532f9cf79daa407fc8d138de9b "d" | o 2 177f92b773850b59254aa5e923436f921b55483b "c" | o 1 d2ae7f538514cd87c17547b0de4cea71fe1af9fb "b" | @ 0 cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b "a" aborting and not changing files can skip mentioning updating (no) files $ hg up 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg commit --close-branch -m 'closebranch' $ startediting 1 1 "(not changing anything)" # edit the 3rd of 3 changesets % start editing the history (not changing anything) | edit 292aec348d9e 6 closebranch Editing (292aec348d9e), you may commit or record as needed now. (hg histedit --continue to resume) $ hg histedit --abort $ cd .. mercurial-3.7.3/tests/test-status-rev.t0000644000175000017500000001454712676531525017563 0ustar mpmmpm00000000000000Tests of 'hg status --rev ' to make sure status between and '.' get combined correctly with the dirstate status. $ hg init First commit $ python $TESTDIR/generate-working-copy-states.py state 2 1 $ hg addremove --similarity 0 adding content1_content1_content1-tracked adding content1_content1_content1-untracked adding content1_content1_content3-tracked adding content1_content1_content3-untracked adding content1_content1_missing-tracked adding content1_content1_missing-untracked adding content1_content2_content1-tracked adding content1_content2_content1-untracked adding content1_content2_content2-tracked adding content1_content2_content2-untracked adding content1_content2_content3-tracked adding content1_content2_content3-untracked adding content1_content2_missing-tracked adding content1_content2_missing-untracked adding content1_missing_content1-tracked adding content1_missing_content1-untracked adding content1_missing_content3-tracked adding content1_missing_content3-untracked adding content1_missing_missing-tracked adding content1_missing_missing-untracked $ hg commit -m first Second commit $ python $TESTDIR/generate-working-copy-states.py state 2 2 $ hg addremove --similarity 0 removing content1_missing_content1-tracked removing content1_missing_content1-untracked removing content1_missing_content3-tracked removing content1_missing_content3-untracked removing content1_missing_missing-tracked removing content1_missing_missing-untracked adding missing_content2_content2-tracked adding missing_content2_content2-untracked adding missing_content2_content3-tracked adding missing_content2_content3-untracked adding missing_content2_missing-tracked adding missing_content2_missing-untracked $ hg commit -m second Working copy $ python $TESTDIR/generate-working-copy-states.py state 2 wc $ hg addremove --similarity 0 adding content1_missing_content1-tracked adding content1_missing_content1-untracked adding content1_missing_content3-tracked adding content1_missing_content3-untracked adding content1_missing_missing-tracked adding content1_missing_missing-untracked adding missing_missing_content3-tracked adding missing_missing_content3-untracked adding missing_missing_missing-tracked adding missing_missing_missing-untracked $ hg forget *_*_*-untracked $ rm *_*_missing-* Status compared to parent of the working copy, i.e. the dirstate status $ hg status -A --rev 1 'glob:missing_content2_content3-tracked' M missing_content2_content3-tracked $ hg status -A --rev 1 'glob:missing_content2_content2-tracked' C missing_content2_content2-tracked $ hg status -A --rev 1 'glob:missing_missing_content3-tracked' A missing_missing_content3-tracked $ hg status -A --rev 1 'glob:missing_missing_content3-untracked' ? missing_missing_content3-untracked $ hg status -A --rev 1 'glob:missing_content2_*-untracked' R missing_content2_content2-untracked R missing_content2_content3-untracked R missing_content2_missing-untracked $ hg status -A --rev 1 'glob:missing_*_missing-tracked' ! missing_content2_missing-tracked ! missing_missing_missing-tracked #if windows $ hg status -A --rev 1 'glob:missing_missing_missing-untracked' missing_missing_missing-untracked: The system cannot find the file specified #else $ hg status -A --rev 1 'glob:missing_missing_missing-untracked' missing_missing_missing-untracked: No such file or directory #endif Status between first and second commit. Should ignore dirstate status. $ hg status -A --rev 0:1 'glob:content1_content2_*' M content1_content2_content1-tracked M content1_content2_content1-untracked M content1_content2_content2-tracked M content1_content2_content2-untracked M content1_content2_content3-tracked M content1_content2_content3-untracked M content1_content2_missing-tracked M content1_content2_missing-untracked $ hg status -A --rev 0:1 'glob:content1_content1_*' C content1_content1_content1-tracked C content1_content1_content1-untracked C content1_content1_content3-tracked C content1_content1_content3-untracked C content1_content1_missing-tracked C content1_content1_missing-untracked $ hg status -A --rev 0:1 'glob:missing_content2_*' A missing_content2_content2-tracked A missing_content2_content2-untracked A missing_content2_content3-tracked A missing_content2_content3-untracked A missing_content2_missing-tracked A missing_content2_missing-untracked $ hg status -A --rev 0:1 'glob:content1_missing_*' R content1_missing_content1-tracked R content1_missing_content1-untracked R content1_missing_content3-tracked R content1_missing_content3-untracked R content1_missing_missing-tracked R content1_missing_missing-untracked $ hg status -A --rev 0:1 'glob:missing_missing_*' Status compared to one revision back, checking that the dirstate status is correctly combined with the inter-revision status $ hg status -A --rev 0 'glob:content1_*_content[23]-tracked' M content1_content1_content3-tracked M content1_content2_content2-tracked M content1_content2_content3-tracked M content1_missing_content3-tracked $ hg status -A --rev 0 'glob:content1_*_content1-tracked' C content1_content1_content1-tracked C content1_content2_content1-tracked C content1_missing_content1-tracked $ hg status -A --rev 0 'glob:missing_*_content?-tracked' A missing_content2_content2-tracked A missing_content2_content3-tracked A missing_missing_content3-tracked BROKEN: missing_content2_content[23]-untracked exist, so should be listed $ hg status -A --rev 0 'glob:missing_*_content?-untracked' ? missing_missing_content3-untracked $ hg status -A --rev 0 'glob:content1_*_*-untracked' R content1_content1_content1-untracked R content1_content1_content3-untracked R content1_content1_missing-untracked R content1_content2_content1-untracked R content1_content2_content2-untracked R content1_content2_content3-untracked R content1_content2_missing-untracked R content1_missing_content1-untracked R content1_missing_content3-untracked R content1_missing_missing-untracked $ hg status -A --rev 0 'glob:*_*_missing-tracked' ! content1_content1_missing-tracked ! content1_content2_missing-tracked ! content1_missing_missing-tracked ! missing_content2_missing-tracked ! missing_missing_missing-tracked $ hg status -A --rev 0 'glob:missing_*_missing-untracked' mercurial-3.7.3/tests/test-ui-color.py.out0000644000175000017500000000006712676531525020162 0ustar mpmmpm00000000000000warning error 'buffered\n' colored? True colored? True mercurial-3.7.3/tests/test-hgwebdir-paths.py0000644000175000017500000000205212676531525020527 0ustar mpmmpm00000000000000import os from mercurial import hg, ui from mercurial.hgweb.hgwebdir_mod import hgwebdir os.mkdir('webdir') os.chdir('webdir') webdir = os.path.realpath('.') u = ui.ui() hg.repository(u, 'a', create=1) hg.repository(u, 'b', create=1) os.chdir('b') hg.repository(u, 'd', create=1) os.chdir('..') hg.repository(u, 'c', create=1) os.chdir('..') paths = {'t/a/': '%s/a' % webdir, 'b': '%s/b' % webdir, 'coll': '%s/*' % webdir, 'rcoll': '%s/**' % webdir} config = os.path.join(webdir, 'hgwebdir.conf') configfile = open(config, 'w') configfile.write('[paths]\n') for k, v in paths.items(): configfile.write('%s = %s\n' % (k, v)) configfile.close() confwd = hgwebdir(config) dictwd = hgwebdir(paths) assert len(confwd.repos) == len(dictwd.repos), 'different numbers' assert len(confwd.repos) == 9, 'expected 9 repos, found %d' % len(confwd.repos) found = dict(confwd.repos) for key, path in dictwd.repos: assert key in found, 'repository %s was not found' % key assert found[key] == path, 'different paths for repo %s' % key mercurial-3.7.3/tests/test-schemes.t0000644000175000017500000000200712676531525017061 0ustar mpmmpm00000000000000#require serve $ cat <> $HGRCPATH > [extensions] > schemes= > > [schemes] > l = http://localhost:$HGPORT/ > parts = http://{1}:$HGPORT/ > z = file:\$PWD/ > EOF $ hg init test $ cd test $ echo a > a $ hg ci -Am initial adding a invalid scheme $ hg log -R z:z abort: no '://' in scheme url 'z:z' [255] http scheme $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ hg incoming l:// comparing with l:// searching for changes no changes found [1] check that {1} syntax works $ hg incoming --debug parts://localhost using http://localhost:$HGPORT/ sending capabilities command comparing with parts://localhost/ query 1; heads sending batch command searching for changes all remote heads known locally no changes found [1] check that paths are expanded $ PWD=`pwd` hg incoming z:// comparing with z:// searching for changes no changes found [1] errors $ cat errors.log $ cd .. mercurial-3.7.3/tests/test-revert-flags.t0000644000175000017500000000047012676531525020035 0ustar mpmmpm00000000000000#require execbit $ hg init repo $ cd repo $ echo foo > foo $ chmod 644 foo $ hg ci -qAm '644' $ chmod 755 foo $ hg ci -qAm '755' reverting to rev 0 $ hg revert -a -r 0 reverting foo $ hg st M foo $ hg diff --git diff --git a/foo b/foo old mode 100755 new mode 100644 $ cd .. mercurial-3.7.3/tests/notcapable0000644000175000017500000000144612676531525016331 0ustar mpmmpm00000000000000# Disable the $CAP wire protocol capability. if test -z "$CAP" then echo "CAP environment variable not set." fi cat > notcapable-$CAP.py << EOF from mercurial import extensions, peer, localrepo def extsetup(): extensions.wrapfunction(peer.peerrepository, 'capable', wrapcapable) extensions.wrapfunction(localrepo.localrepository, 'peer', wrappeer) def wrapcapable(orig, self, name, *args, **kwargs): if name in '$CAP'.split(' '): return False return orig(self, name, *args, **kwargs) def wrappeer(orig, self): # Since we're disabling some newer features, we need to make sure local # repos add in the legacy features again. return localrepo.locallegacypeer(self) EOF echo '[extensions]' >> $HGRCPATH echo "notcapable-$CAP = `pwd`/notcapable-$CAP.py" >> $HGRCPATH mercurial-3.7.3/tests/test-debugextensions.t0000644000175000017500000000305012676531525020637 0ustar mpmmpm00000000000000 $ hg debugextensions $ debugpath=`pwd`/extwithoutinfos.py $ cat > extwithoutinfos.py < EOF $ cat >> $HGRCPATH < [extensions] > color= > histedit= > patchbomb= > rebase= > mq= > ext1 = $debugpath > EOF $ hg debugextensions color ext1 (untested!) histedit mq patchbomb rebase $ hg debugextensions -v color location: */hgext/color.pyc (glob) tested with: internal ext1 location: */extwithoutinfos.pyc (glob) histedit location: */hgext/histedit.pyc (glob) tested with: internal mq location: */hgext/mq.pyc (glob) tested with: internal patchbomb location: */hgext/patchbomb.pyc (glob) tested with: internal rebase location: */hgext/rebase.pyc (glob) tested with: internal $ hg debugextensions -Tjson | sed 's|\\\\|/|g' [ { "buglink": "", "name": "color", "source": "*/hgext/color.pyc", (glob) "testedwith": "internal" }, { "buglink": "", "name": "ext1", "source": "*/extwithoutinfos.pyc", (glob) "testedwith": "" }, { "buglink": "", "name": "histedit", "source": "*/hgext/histedit.pyc", (glob) "testedwith": "internal" }, { "buglink": "", "name": "mq", "source": "*/hgext/mq.pyc", (glob) "testedwith": "internal" }, { "buglink": "", "name": "patchbomb", "source": "*/hgext/patchbomb.pyc", (glob) "testedwith": "internal" }, { "buglink": "", "name": "rebase", "source": "*/hgext/rebase.pyc", (glob) "testedwith": "internal" } ] mercurial-3.7.3/tests/test-largefiles-wireproto.t0000644000175000017500000002362112676531525021604 0ustar mpmmpm00000000000000This file contains testcases that tend to be related to the wire protocol part of largefiles. $ USERCACHE="$TESTTMP/cache"; export USERCACHE $ mkdir "${USERCACHE}" $ cat >> $HGRCPATH < [extensions] > largefiles= > purge= > rebase= > transplant= > [phases] > publish=False > [largefiles] > minsize=2 > patterns=glob:**.dat > usercache=${USERCACHE} > [web] > allow_archive = zip > [hooks] > precommit=sh -c "echo \\"Invoking status precommit hook\\"; hg status" > EOF #if serve vanilla clients not locked out from largefiles servers on vanilla repos $ mkdir r1 $ cd r1 $ hg init $ echo c1 > f1 $ hg add f1 $ hg commit -m "m1" Invoking status precommit hook A f1 $ cd .. $ hg serve -R r1 -d -p $HGPORT --pid-file hg.pid $ cat hg.pid >> $DAEMON_PIDS $ hg --config extensions.largefiles=! clone http://localhost:$HGPORT r2 requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved largefiles clients still work with vanilla servers $ hg --config extensions.largefiles=! serve -R r1 -d -p $HGPORT1 --pid-file hg.pid $ cat hg.pid >> $DAEMON_PIDS $ hg clone http://localhost:$HGPORT1 r3 requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved #endif vanilla clients locked out from largefiles http repos $ mkdir r4 $ cd r4 $ hg init $ echo c1 > f1 $ hg add --large f1 $ hg commit -m "m1" Invoking status precommit hook A f1 $ cd .. largefiles can be pushed locally (issue3583) $ hg init dest $ cd r4 $ hg outgoing ../dest comparing with ../dest searching for changes changeset: 0:639881c12b4c tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: m1 $ hg push ../dest pushing to ../dest searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files exit code with nothing outgoing (issue3611) $ hg outgoing ../dest comparing with ../dest searching for changes no changes found [1] $ cd .. #if serve $ hg serve -R r4 -d -p $HGPORT2 --pid-file hg.pid $ cat hg.pid >> $DAEMON_PIDS $ hg --config extensions.largefiles=! clone http://localhost:$HGPORT2 r5 abort: remote error: This repository uses the largefiles extension. Please enable it in your Mercurial config file. [255] used all HGPORTs, kill all daemons $ killdaemons.py #endif vanilla clients locked out from largefiles ssh repos $ hg --config extensions.largefiles=! clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/r4 r5 remote: remote: This repository uses the largefiles extension. remote: remote: Please enable it in your Mercurial config file. remote: remote: - abort: remote error (check previous remote output) [255] #if serve largefiles clients refuse to push largefiles repos to vanilla servers $ mkdir r6 $ cd r6 $ hg init $ echo c1 > f1 $ hg add f1 $ hg commit -m "m1" Invoking status precommit hook A f1 $ cat >> .hg/hgrc < [web] > push_ssl = false > allow_push = * > ! $ cd .. $ hg clone r6 r7 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd r7 $ echo c2 > f2 $ hg add --large f2 $ hg commit -m "m2" Invoking status precommit hook A f2 $ hg --config extensions.largefiles=! -R ../r6 serve -d -p $HGPORT --pid-file ../hg.pid $ cat ../hg.pid >> $DAEMON_PIDS $ hg push http://localhost:$HGPORT pushing to http://localhost:$HGPORT/ searching for changes abort: http://localhost:$HGPORT/ does not appear to be a largefile store [255] $ cd .. putlfile errors are shown (issue3123) Corrupt the cached largefile in r7 and move it out of the servers usercache $ mv r7/.hg/largefiles/4cdac4d8b084d0b599525cf732437fb337d422a8 . $ echo 'client side corruption' > r7/.hg/largefiles/4cdac4d8b084d0b599525cf732437fb337d422a8 $ rm "$USERCACHE/4cdac4d8b084d0b599525cf732437fb337d422a8" $ hg init empty $ hg serve -R empty -d -p $HGPORT1 --pid-file hg.pid \ > --config 'web.allow_push=*' --config web.push_ssl=False $ cat hg.pid >> $DAEMON_PIDS $ hg push -R r7 http://localhost:$HGPORT1 pushing to http://localhost:$HGPORT1/ searching for changes remote: largefiles: failed to put 4cdac4d8b084d0b599525cf732437fb337d422a8 into store: largefile contents do not match hash abort: remotestore: could not put $TESTTMP/r7/.hg/largefiles/4cdac4d8b084d0b599525cf732437fb337d422a8 to remote store http://localhost:$HGPORT1/ (glob) [255] $ mv 4cdac4d8b084d0b599525cf732437fb337d422a8 r7/.hg/largefiles/4cdac4d8b084d0b599525cf732437fb337d422a8 Push of file that exists on server but is corrupted - magic healing would be nice ... but too magic $ echo "server side corruption" > empty/.hg/largefiles/4cdac4d8b084d0b599525cf732437fb337d422a8 $ hg push -R r7 http://localhost:$HGPORT1 pushing to http://localhost:$HGPORT1/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 2 changesets with 2 changes to 2 files $ cat empty/.hg/largefiles/4cdac4d8b084d0b599525cf732437fb337d422a8 server side corruption $ rm -rf empty Push a largefiles repository to a served empty repository $ hg init r8 $ echo c3 > r8/f1 $ hg add --large r8/f1 -R r8 $ hg commit -m "m1" -R r8 Invoking status precommit hook A f1 $ hg init empty $ hg serve -R empty -d -p $HGPORT2 --pid-file hg.pid \ > --config 'web.allow_push=*' --config web.push_ssl=False $ cat hg.pid >> $DAEMON_PIDS $ rm "${USERCACHE}"/* $ hg push -R r8 http://localhost:$HGPORT2/#default pushing to http://localhost:$HGPORT2/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files $ [ -f "${USERCACHE}"/02a439e5c31c526465ab1a0ca1f431f76b827b90 ] $ [ -f empty/.hg/largefiles/02a439e5c31c526465ab1a0ca1f431f76b827b90 ] Clone over http, no largefiles pulled on clone. $ hg clone http://localhost:$HGPORT2/#default http-clone -U adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Archive contains largefiles >>> import urllib2, os >>> u = 'http://localhost:%s/archive/default.zip' % os.environ['HGPORT2'] >>> with open('archive.zip', 'w') as f: ... f.write(urllib2.urlopen(u).read()) $ unzip -t archive.zip Archive: archive.zip testing: empty-default/.hg_archival.txt OK testing: empty-default/f1 OK No errors detected in compressed data of archive.zip. test 'verify' with remotestore: $ rm "${USERCACHE}"/02a439e5c31c526465ab1a0ca1f431f76b827b90 $ mv empty/.hg/largefiles/02a439e5c31c526465ab1a0ca1f431f76b827b90 . $ hg -R http-clone verify --large --lfa checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions searching 1 changesets for largefiles changeset 0:cf03e5bb9936: f1 missing verified existence of 1 revisions of 1 largefiles [1] $ mv 02a439e5c31c526465ab1a0ca1f431f76b827b90 empty/.hg/largefiles/ $ hg -R http-clone -q verify --large --lfa largefiles pulled on update - a largefile missing on the server: $ mv empty/.hg/largefiles/02a439e5c31c526465ab1a0ca1f431f76b827b90 . $ hg -R http-clone up --config largefiles.usercache=http-clone-usercache getting changed largefiles f1: largefile 02a439e5c31c526465ab1a0ca1f431f76b827b90 not available from http://localhost:$HGPORT2/ 0 largefiles updated, 0 removed 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R http-clone st ! f1 $ hg -R http-clone up -Cqr null largefiles pulled on update - a largefile corrupted on the server: $ echo corruption > empty/.hg/largefiles/02a439e5c31c526465ab1a0ca1f431f76b827b90 $ hg -R http-clone up --config largefiles.usercache=http-clone-usercache getting changed largefiles f1: data corruption (expected 02a439e5c31c526465ab1a0ca1f431f76b827b90, got 6a7bb2556144babe3899b25e5428123735bb1e27) 0 largefiles updated, 0 removed 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R http-clone st ! f1 $ [ ! -f http-clone/.hg/largefiles/02a439e5c31c526465ab1a0ca1f431f76b827b90 ] $ [ ! -f http-clone/f1 ] $ [ ! -f http-clone-usercache ] $ hg -R http-clone verify --large --lfc checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions searching 1 changesets for largefiles verified contents of 1 revisions of 1 largefiles $ hg -R http-clone up -Cqr null largefiles pulled on update - no server side problems: $ mv 02a439e5c31c526465ab1a0ca1f431f76b827b90 empty/.hg/largefiles/ $ hg -R http-clone --debug up --config largefiles.usercache=http-clone-usercache --config progress.debug=true resolving manifests branchmerge: False, force: False, partial: False ancestor: 000000000000, local: 000000000000+, remote: cf03e5bb9936 .hglf/f1: remote created -> g getting .hglf/f1 updating: .hglf/f1 1/1 files (100.00%) getting changed largefiles using http://localhost:$HGPORT2/ sending capabilities command sending batch command getting largefiles: 0/1 lfile (0.00%) getting f1:02a439e5c31c526465ab1a0ca1f431f76b827b90 sending getlfile command found 02a439e5c31c526465ab1a0ca1f431f76b827b90 in store 1 largefiles updated, 0 removed 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ ls http-clone-usercache/* http-clone-usercache/02a439e5c31c526465ab1a0ca1f431f76b827b90 $ rm -rf empty http-clone* used all HGPORTs, kill all daemons $ killdaemons.py #endif mercurial-3.7.3/tests/failfilemerge.py0000644000175000017500000000074312676531525017442 0ustar mpmmpm00000000000000# extension to emulate interupting filemerge._filemerge from __future__ import absolute_import from mercurial import ( filemerge, extensions, error, ) def failfilemerge(filemergefn, premerge, repo, mynode, orig, fcd, fco, fca, labels=None): raise error.Abort("^C") return filemergefn(premerge, repo, mynode, orig, fcd, fco, fca, labels) def extsetup(ui): extensions.wrapfunction(filemerge, '_filemerge', failfilemerge) mercurial-3.7.3/tests/test-dispatch.py.out0000644000175000017500000000074112676531525020227 0ustar mpmmpm00000000000000running: init test1 result: None running: add foo result: 0 running: commit -m commit1 -d 2000-01-01 foo result: None running: commit -m commit2 -d 2000-01-02 foo result: None running: log -r 0 changeset: 0:0e4634943879 user: test date: Sat Jan 01 00:00:00 2000 +0000 summary: commit1 result: None running: log -r tip changeset: 1:45589e459b2e tag: tip user: test date: Sun Jan 02 00:00:00 2000 +0000 summary: commit2 result: None mercurial-3.7.3/tests/test-command-template.t0000644000175000017500000026736112676531525020701 0ustar mpmmpm00000000000000 $ hg init a $ cd a $ echo a > a $ hg add a $ echo line 1 > b $ echo line 2 >> b $ hg commit -l b -d '1000000 0' -u 'User Name ' $ hg add b $ echo other 1 > c $ echo other 2 >> c $ echo >> c $ echo other 3 >> c $ hg commit -l c -d '1100000 0' -u 'A. N. Other ' $ hg add c $ hg commit -m 'no person' -d '1200000 0' -u 'other@place' $ echo c >> c $ hg commit -m 'no user, no domain' -d '1300000 0' -u 'person' $ echo foo > .hg/branch $ hg commit -m 'new branch' -d '1400000 0' -u 'person' $ hg co -q 3 $ echo other 4 >> d $ hg add d $ hg commit -m 'new head' -d '1500000 0' -u 'person' $ hg merge -q foo $ hg commit -m 'merge' -d '1500001 0' -u 'person' Second branch starting at nullrev: $ hg update null 0 files updated, 0 files merged, 4 files removed, 0 files unresolved $ echo second > second $ hg add second $ hg commit -m second -d '1000000 0' -u 'User Name ' created new head $ echo third > third $ hg add third $ hg mv second fourth $ hg commit -m third -d "2020-01-01 10:01" $ hg log --template '{join(file_copies, ",\n")}\n' -r . fourth (second) $ hg log -T '{file_copies % "{source} -> {name}\n"}' -r . second -> fourth $ hg log -T '{rev} {ifcontains("fourth", file_copies, "t", "f")}\n' -r .:7 8 t 7 f Working-directory revision has special identifiers, though they are still experimental: $ hg log -r 'wdir()' -T '{rev}:{node}\n' 2147483647:ffffffffffffffffffffffffffffffffffffffff Some keywords are invalid for working-directory revision, but they should never cause crash: $ hg log -r 'wdir()' -T '{manifest}\n' Quoting for ui.logtemplate $ hg tip --config "ui.logtemplate={rev}\n" 8 $ hg tip --config "ui.logtemplate='{rev}\n'" 8 $ hg tip --config 'ui.logtemplate="{rev}\n"' 8 Make sure user/global hgrc does not affect tests $ echo '[ui]' > .hg/hgrc $ echo 'logtemplate =' >> .hg/hgrc $ echo 'style =' >> .hg/hgrc Add some simple styles to settings $ echo '[templates]' >> .hg/hgrc $ printf 'simple = "{rev}\\n"\n' >> .hg/hgrc $ printf 'simple2 = {rev}\\n\n' >> .hg/hgrc $ hg log -l1 -Tsimple 8 $ hg log -l1 -Tsimple2 8 Test templates and style maps in files: $ echo "{rev}" > tmpl $ hg log -l1 -T./tmpl 8 $ hg log -l1 -Tblah/blah blah/blah (no-eol) $ printf 'changeset = "{rev}\\n"\n' > map-simple $ hg log -l1 -T./map-simple 8 Template should precede style option $ hg log -l1 --style default -T '{rev}\n' 8 Add a commit with empty description, to ensure that the templates below will omit the description line. $ echo c >> c $ hg add c $ hg commit -qm ' ' Default style is like normal output. Phases style should be the same as default style, except for extra phase lines. $ hg log > log.out $ hg log --style default > style.out $ cmp log.out style.out || diff -u log.out style.out $ hg log -T phases > phases.out $ diff -U 0 log.out phases.out | egrep -v '^---|^\+\+\+|^@@' +phase: draft +phase: draft +phase: draft +phase: draft +phase: draft +phase: draft +phase: draft +phase: draft +phase: draft +phase: draft $ hg log -v > log.out $ hg log -v --style default > style.out $ cmp log.out style.out || diff -u log.out style.out $ hg log -v -T phases > phases.out $ diff -U 0 log.out phases.out | egrep -v '^---|^\+\+\+|^@@' +phase: draft +phase: draft +phase: draft +phase: draft +phase: draft +phase: draft +phase: draft +phase: draft +phase: draft +phase: draft $ hg log -q > log.out $ hg log -q --style default > style.out $ cmp log.out style.out || diff -u log.out style.out $ hg log -q -T phases > phases.out $ cmp log.out phases.out || diff -u log.out phases.out $ hg log --debug > log.out $ hg log --debug --style default > style.out $ cmp log.out style.out || diff -u log.out style.out $ hg log --debug -T phases > phases.out $ cmp log.out phases.out || diff -u log.out phases.out Default style of working-directory revision should also be the same (but date may change while running tests): $ hg log -r 'wdir()' | sed 's|^date:.*|date:|' > log.out $ hg log -r 'wdir()' --style default | sed 's|^date:.*|date:|' > style.out $ cmp log.out style.out || diff -u log.out style.out $ hg log -r 'wdir()' -v | sed 's|^date:.*|date:|' > log.out $ hg log -r 'wdir()' -v --style default | sed 's|^date:.*|date:|' > style.out $ cmp log.out style.out || diff -u log.out style.out $ hg log -r 'wdir()' -q > log.out $ hg log -r 'wdir()' -q --style default > style.out $ cmp log.out style.out || diff -u log.out style.out $ hg log -r 'wdir()' --debug | sed 's|^date:.*|date:|' > log.out $ hg log -r 'wdir()' --debug --style default \ > | sed 's|^date:.*|date:|' > style.out $ cmp log.out style.out || diff -u log.out style.out Default style should also preserve color information (issue2866): $ cp $HGRCPATH $HGRCPATH-bak $ cat <> $HGRCPATH > [extensions] > color= > EOF $ hg --color=debug log > log.out $ hg --color=debug log --style default > style.out $ cmp log.out style.out || diff -u log.out style.out $ hg --color=debug log -T phases > phases.out $ diff -U 0 log.out phases.out | egrep -v '^---|^\+\+\+|^@@' +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] $ hg --color=debug -v log > log.out $ hg --color=debug -v log --style default > style.out $ cmp log.out style.out || diff -u log.out style.out $ hg --color=debug -v log -T phases > phases.out $ diff -U 0 log.out phases.out | egrep -v '^---|^\+\+\+|^@@' +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] +[log.phase|phase: draft] $ hg --color=debug -q log > log.out $ hg --color=debug -q log --style default > style.out $ cmp log.out style.out || diff -u log.out style.out $ hg --color=debug -q log -T phases > phases.out $ cmp log.out phases.out || diff -u log.out phases.out $ hg --color=debug --debug log > log.out $ hg --color=debug --debug log --style default > style.out $ cmp log.out style.out || diff -u log.out style.out $ hg --color=debug --debug log -T phases > phases.out $ cmp log.out phases.out || diff -u log.out phases.out $ mv $HGRCPATH-bak $HGRCPATH Remove commit with empty commit message, so as to not pollute further tests. $ hg --config extensions.strip= strip -q . Revision with no copies (used to print a traceback): $ hg tip -v --template '\n' Compact style works: $ hg log -Tcompact 8[tip] 95c24699272e 2020-01-01 10:01 +0000 test third 7:-1 29114dbae42b 1970-01-12 13:46 +0000 user second 6:5,4 d41e714fe50d 1970-01-18 08:40 +0000 person merge 5:3 13207e5a10d9 1970-01-18 08:40 +0000 person new head 4 bbe44766e73d 1970-01-17 04:53 +0000 person new branch 3 10e46f2dcbf4 1970-01-16 01:06 +0000 person no user, no domain 2 97054abb4ab8 1970-01-14 21:20 +0000 other no person 1 b608e9d1a3f0 1970-01-13 17:33 +0000 other other 1 0 1e4e1b8f71e0 1970-01-12 13:46 +0000 user line 1 $ hg log -v --style compact 8[tip] 95c24699272e 2020-01-01 10:01 +0000 test third 7:-1 29114dbae42b 1970-01-12 13:46 +0000 User Name second 6:5,4 d41e714fe50d 1970-01-18 08:40 +0000 person merge 5:3 13207e5a10d9 1970-01-18 08:40 +0000 person new head 4 bbe44766e73d 1970-01-17 04:53 +0000 person new branch 3 10e46f2dcbf4 1970-01-16 01:06 +0000 person no user, no domain 2 97054abb4ab8 1970-01-14 21:20 +0000 other@place no person 1 b608e9d1a3f0 1970-01-13 17:33 +0000 A. N. Other other 1 other 2 other 3 0 1e4e1b8f71e0 1970-01-12 13:46 +0000 User Name line 1 line 2 $ hg log --debug --style compact 8[tip]:7,-1 95c24699272e 2020-01-01 10:01 +0000 test third 7:-1,-1 29114dbae42b 1970-01-12 13:46 +0000 User Name second 6:5,4 d41e714fe50d 1970-01-18 08:40 +0000 person merge 5:3,-1 13207e5a10d9 1970-01-18 08:40 +0000 person new head 4:3,-1 bbe44766e73d 1970-01-17 04:53 +0000 person new branch 3:2,-1 10e46f2dcbf4 1970-01-16 01:06 +0000 person no user, no domain 2:1,-1 97054abb4ab8 1970-01-14 21:20 +0000 other@place no person 1:0,-1 b608e9d1a3f0 1970-01-13 17:33 +0000 A. N. Other other 1 other 2 other 3 0:-1,-1 1e4e1b8f71e0 1970-01-12 13:46 +0000 User Name line 1 line 2 Test xml styles: $ hg log --style xml -r 'not all()' $ hg log --style xml tip test 2020-01-01T10:01:00+00:00 third User Name 1970-01-12T13:46:40+00:00 second person 1970-01-18T08:40:01+00:00 merge person 1970-01-18T08:40:00+00:00 new head foo person 1970-01-17T04:53:20+00:00 new branch person 1970-01-16T01:06:40+00:00 no user, no domain other 1970-01-14T21:20:00+00:00 no person A. N. Other 1970-01-13T17:33:20+00:00 other 1 other 2 other 3 User Name 1970-01-12T13:46:40+00:00 line 1 line 2 $ hg log -v --style xml tip test 2020-01-01T10:01:00+00:00 third fourth third second fourth User Name 1970-01-12T13:46:40+00:00 second second person 1970-01-18T08:40:01+00:00 merge person 1970-01-18T08:40:00+00:00 new head d foo person 1970-01-17T04:53:20+00:00 new branch person 1970-01-16T01:06:40+00:00 no user, no domain c other 1970-01-14T21:20:00+00:00 no person c A. N. Other 1970-01-13T17:33:20+00:00 other 1 other 2 other 3 b User Name 1970-01-12T13:46:40+00:00 line 1 line 2 a $ hg log --debug --style xml tip test 2020-01-01T10:01:00+00:00 third fourth third second fourth default User Name 1970-01-12T13:46:40+00:00 second second default person 1970-01-18T08:40:01+00:00 merge default person 1970-01-18T08:40:00+00:00 new head d default foo person 1970-01-17T04:53:20+00:00 new branch foo person 1970-01-16T01:06:40+00:00 no user, no domain c default other 1970-01-14T21:20:00+00:00 no person c default A. N. Other 1970-01-13T17:33:20+00:00 other 1 other 2 other 3 b default User Name 1970-01-12T13:46:40+00:00 line 1 line 2 a default Test JSON style: $ hg log -k nosuch -Tjson [] $ hg log -qr . -Tjson [ { "rev": 8, "node": "95c24699272ef57d062b8bccc32c878bf841784a" } ] $ hg log -vpr . -Tjson --stat [ { "rev": 8, "node": "95c24699272ef57d062b8bccc32c878bf841784a", "branch": "default", "phase": "draft", "user": "test", "date": [1577872860, 0], "desc": "third", "bookmarks": [], "tags": ["tip"], "parents": ["29114dbae42b9f078cf2714dbe3a86bba8ec7453"], "files": ["fourth", "second", "third"], "diffstat": " fourth | 1 +\n second | 1 -\n third | 1 +\n 3 files changed, 2 insertions(+), 1 deletions(-)\n", "diff": "diff -r 29114dbae42b -r 95c24699272e fourth\n--- /dev/null\tThu Jan 01 00:00:00 1970 +0000\n+++ b/fourth\tWed Jan 01 10:01:00 2020 +0000\n@@ -0,0 +1,1 @@\n+second\ndiff -r 29114dbae42b -r 95c24699272e second\n--- a/second\tMon Jan 12 13:46:40 1970 +0000\n+++ /dev/null\tThu Jan 01 00:00:00 1970 +0000\n@@ -1,1 +0,0 @@\n-second\ndiff -r 29114dbae42b -r 95c24699272e third\n--- /dev/null\tThu Jan 01 00:00:00 1970 +0000\n+++ b/third\tWed Jan 01 10:01:00 2020 +0000\n@@ -0,0 +1,1 @@\n+third\n" } ] honor --git but not format-breaking diffopts $ hg --config diff.noprefix=True log --git -vpr . -Tjson [ { "rev": 8, "node": "95c24699272ef57d062b8bccc32c878bf841784a", "branch": "default", "phase": "draft", "user": "test", "date": [1577872860, 0], "desc": "third", "bookmarks": [], "tags": ["tip"], "parents": ["29114dbae42b9f078cf2714dbe3a86bba8ec7453"], "files": ["fourth", "second", "third"], "diff": "diff --git a/second b/fourth\nrename from second\nrename to fourth\ndiff --git a/third b/third\nnew file mode 100644\n--- /dev/null\n+++ b/third\n@@ -0,0 +1,1 @@\n+third\n" } ] $ hg log -T json [ { "rev": 8, "node": "95c24699272ef57d062b8bccc32c878bf841784a", "branch": "default", "phase": "draft", "user": "test", "date": [1577872860, 0], "desc": "third", "bookmarks": [], "tags": ["tip"], "parents": ["29114dbae42b9f078cf2714dbe3a86bba8ec7453"] }, { "rev": 7, "node": "29114dbae42b9f078cf2714dbe3a86bba8ec7453", "branch": "default", "phase": "draft", "user": "User Name ", "date": [1000000, 0], "desc": "second", "bookmarks": [], "tags": [], "parents": ["0000000000000000000000000000000000000000"] }, { "rev": 6, "node": "d41e714fe50d9e4a5f11b4d595d543481b5f980b", "branch": "default", "phase": "draft", "user": "person", "date": [1500001, 0], "desc": "merge", "bookmarks": [], "tags": [], "parents": ["13207e5a10d9fd28ec424934298e176197f2c67f", "bbe44766e73d5f11ed2177f1838de10c53ef3e74"] }, { "rev": 5, "node": "13207e5a10d9fd28ec424934298e176197f2c67f", "branch": "default", "phase": "draft", "user": "person", "date": [1500000, 0], "desc": "new head", "bookmarks": [], "tags": [], "parents": ["10e46f2dcbf4823578cf180f33ecf0b957964c47"] }, { "rev": 4, "node": "bbe44766e73d5f11ed2177f1838de10c53ef3e74", "branch": "foo", "phase": "draft", "user": "person", "date": [1400000, 0], "desc": "new branch", "bookmarks": [], "tags": [], "parents": ["10e46f2dcbf4823578cf180f33ecf0b957964c47"] }, { "rev": 3, "node": "10e46f2dcbf4823578cf180f33ecf0b957964c47", "branch": "default", "phase": "draft", "user": "person", "date": [1300000, 0], "desc": "no user, no domain", "bookmarks": [], "tags": [], "parents": ["97054abb4ab824450e9164180baf491ae0078465"] }, { "rev": 2, "node": "97054abb4ab824450e9164180baf491ae0078465", "branch": "default", "phase": "draft", "user": "other@place", "date": [1200000, 0], "desc": "no person", "bookmarks": [], "tags": [], "parents": ["b608e9d1a3f0273ccf70fb85fd6866b3482bf965"] }, { "rev": 1, "node": "b608e9d1a3f0273ccf70fb85fd6866b3482bf965", "branch": "default", "phase": "draft", "user": "A. N. Other ", "date": [1100000, 0], "desc": "other 1\nother 2\n\nother 3", "bookmarks": [], "tags": [], "parents": ["1e4e1b8f71e05681d422154f5421e385fec3454f"] }, { "rev": 0, "node": "1e4e1b8f71e05681d422154f5421e385fec3454f", "branch": "default", "phase": "draft", "user": "User Name ", "date": [1000000, 0], "desc": "line 1\nline 2", "bookmarks": [], "tags": [], "parents": ["0000000000000000000000000000000000000000"] } ] $ hg heads -v -Tjson [ { "rev": 8, "node": "95c24699272ef57d062b8bccc32c878bf841784a", "branch": "default", "phase": "draft", "user": "test", "date": [1577872860, 0], "desc": "third", "bookmarks": [], "tags": ["tip"], "parents": ["29114dbae42b9f078cf2714dbe3a86bba8ec7453"], "files": ["fourth", "second", "third"] }, { "rev": 6, "node": "d41e714fe50d9e4a5f11b4d595d543481b5f980b", "branch": "default", "phase": "draft", "user": "person", "date": [1500001, 0], "desc": "merge", "bookmarks": [], "tags": [], "parents": ["13207e5a10d9fd28ec424934298e176197f2c67f", "bbe44766e73d5f11ed2177f1838de10c53ef3e74"], "files": [] }, { "rev": 4, "node": "bbe44766e73d5f11ed2177f1838de10c53ef3e74", "branch": "foo", "phase": "draft", "user": "person", "date": [1400000, 0], "desc": "new branch", "bookmarks": [], "tags": [], "parents": ["10e46f2dcbf4823578cf180f33ecf0b957964c47"], "files": [] } ] $ hg log --debug -Tjson [ { "rev": 8, "node": "95c24699272ef57d062b8bccc32c878bf841784a", "branch": "default", "phase": "draft", "user": "test", "date": [1577872860, 0], "desc": "third", "bookmarks": [], "tags": ["tip"], "parents": ["29114dbae42b9f078cf2714dbe3a86bba8ec7453"], "manifest": "94961b75a2da554b4df6fb599e5bfc7d48de0c64", "extra": {"branch": "default"}, "modified": [], "added": ["fourth", "third"], "removed": ["second"] }, { "rev": 7, "node": "29114dbae42b9f078cf2714dbe3a86bba8ec7453", "branch": "default", "phase": "draft", "user": "User Name ", "date": [1000000, 0], "desc": "second", "bookmarks": [], "tags": [], "parents": ["0000000000000000000000000000000000000000"], "manifest": "f2dbc354b94e5ec0b4f10680ee0cee816101d0bf", "extra": {"branch": "default"}, "modified": [], "added": ["second"], "removed": [] }, { "rev": 6, "node": "d41e714fe50d9e4a5f11b4d595d543481b5f980b", "branch": "default", "phase": "draft", "user": "person", "date": [1500001, 0], "desc": "merge", "bookmarks": [], "tags": [], "parents": ["13207e5a10d9fd28ec424934298e176197f2c67f", "bbe44766e73d5f11ed2177f1838de10c53ef3e74"], "manifest": "4dc3def4f9b4c6e8de820f6ee74737f91e96a216", "extra": {"branch": "default"}, "modified": [], "added": [], "removed": [] }, { "rev": 5, "node": "13207e5a10d9fd28ec424934298e176197f2c67f", "branch": "default", "phase": "draft", "user": "person", "date": [1500000, 0], "desc": "new head", "bookmarks": [], "tags": [], "parents": ["10e46f2dcbf4823578cf180f33ecf0b957964c47"], "manifest": "4dc3def4f9b4c6e8de820f6ee74737f91e96a216", "extra": {"branch": "default"}, "modified": [], "added": ["d"], "removed": [] }, { "rev": 4, "node": "bbe44766e73d5f11ed2177f1838de10c53ef3e74", "branch": "foo", "phase": "draft", "user": "person", "date": [1400000, 0], "desc": "new branch", "bookmarks": [], "tags": [], "parents": ["10e46f2dcbf4823578cf180f33ecf0b957964c47"], "manifest": "cb5a1327723bada42f117e4c55a303246eaf9ccc", "extra": {"branch": "foo"}, "modified": [], "added": [], "removed": [] }, { "rev": 3, "node": "10e46f2dcbf4823578cf180f33ecf0b957964c47", "branch": "default", "phase": "draft", "user": "person", "date": [1300000, 0], "desc": "no user, no domain", "bookmarks": [], "tags": [], "parents": ["97054abb4ab824450e9164180baf491ae0078465"], "manifest": "cb5a1327723bada42f117e4c55a303246eaf9ccc", "extra": {"branch": "default"}, "modified": ["c"], "added": [], "removed": [] }, { "rev": 2, "node": "97054abb4ab824450e9164180baf491ae0078465", "branch": "default", "phase": "draft", "user": "other@place", "date": [1200000, 0], "desc": "no person", "bookmarks": [], "tags": [], "parents": ["b608e9d1a3f0273ccf70fb85fd6866b3482bf965"], "manifest": "6e0e82995c35d0d57a52aca8da4e56139e06b4b1", "extra": {"branch": "default"}, "modified": [], "added": ["c"], "removed": [] }, { "rev": 1, "node": "b608e9d1a3f0273ccf70fb85fd6866b3482bf965", "branch": "default", "phase": "draft", "user": "A. N. Other ", "date": [1100000, 0], "desc": "other 1\nother 2\n\nother 3", "bookmarks": [], "tags": [], "parents": ["1e4e1b8f71e05681d422154f5421e385fec3454f"], "manifest": "4e8d705b1e53e3f9375e0e60dc7b525d8211fe55", "extra": {"branch": "default"}, "modified": [], "added": ["b"], "removed": [] }, { "rev": 0, "node": "1e4e1b8f71e05681d422154f5421e385fec3454f", "branch": "default", "phase": "draft", "user": "User Name ", "date": [1000000, 0], "desc": "line 1\nline 2", "bookmarks": [], "tags": [], "parents": ["0000000000000000000000000000000000000000"], "manifest": "a0c8bcbbb45c63b90b70ad007bf38961f64f2af0", "extra": {"branch": "default"}, "modified": [], "added": ["a"], "removed": [] } ] Error if style not readable: #if unix-permissions no-root $ touch q $ chmod 0 q $ hg log --style ./q abort: Permission denied: ./q [255] #endif Error if no style: $ hg log --style notexist abort: style 'notexist' not found (available styles: bisect, changelog, compact, default, phases, status, xml) [255] $ hg log -T list available styles: bisect, changelog, compact, default, phases, status, xml abort: specify a template [255] Error if style missing key: $ echo 'q = q' > t $ hg log --style ./t abort: "changeset" not in template map [255] Error if style missing value: $ echo 'changeset =' > t $ hg log --style t abort: t:1: missing value [255] Error if include fails: $ echo 'changeset = q' >> t #if unix-permissions no-root $ hg log --style ./t abort: template file ./q: Permission denied [255] $ rm q #endif Include works: $ echo '{rev}' > q $ hg log --style ./t 8 7 6 5 4 3 2 1 0 Check that recursive reference does not fall into RuntimeError (issue4758): common mistake: $ hg log -T '{changeset}\n' abort: recursive reference 'changeset' in template [255] circular reference: $ cat << EOF > issue4758 > changeset = '{foo}' > foo = '{changeset}' > EOF $ hg log --style ./issue4758 abort: recursive reference 'foo' in template [255] buildmap() -> gettemplate(), where no thunk was made: $ hg log -T '{files % changeset}\n' abort: recursive reference 'changeset' in template [255] not a recursion if a keyword of the same name exists: $ cat << EOF > issue4758 > changeset = '{tags % rev}' > rev = '{rev} {tag}\n' > EOF $ hg log --style ./issue4758 -r tip 8 tip Check that {phase} works correctly on parents: $ cat << EOF > parentphase > changeset_debug = '{rev} ({phase}):{parents}\n' > parent = ' {rev} ({phase})' > EOF $ hg phase -r 5 --public $ hg phase -r 7 --secret --force $ hg log --debug -G --style ./parentphase @ 8 (secret): 7 (secret) -1 (public) | o 7 (secret): -1 (public) -1 (public) o 6 (draft): 5 (public) 4 (draft) |\ | o 5 (public): 3 (public) -1 (public) | | o | 4 (draft): 3 (public) -1 (public) |/ o 3 (public): 2 (public) -1 (public) | o 2 (public): 1 (public) -1 (public) | o 1 (public): 0 (public) -1 (public) | o 0 (public): -1 (public) -1 (public) Missing non-standard names give no error (backward compatibility): $ echo "changeset = '{c}'" > t $ hg log --style ./t Defining non-standard name works: $ cat < t > changeset = '{c}' > c = q > EOF $ hg log --style ./t 8 7 6 5 4 3 2 1 0 ui.style works: $ echo '[ui]' > .hg/hgrc $ echo 'style = t' >> .hg/hgrc $ hg log 8 7 6 5 4 3 2 1 0 Issue338: $ hg log --style=changelog > changelog $ cat changelog 2020-01-01 test * fourth, second, third: third [95c24699272e] [tip] 1970-01-12 User Name * second: second [29114dbae42b] 1970-01-18 person * merge [d41e714fe50d] * d: new head [13207e5a10d9] 1970-01-17 person * new branch [bbe44766e73d] 1970-01-16 person * c: no user, no domain [10e46f2dcbf4] 1970-01-14 other * c: no person [97054abb4ab8] 1970-01-13 A. N. Other * b: other 1 other 2 other 3 [b608e9d1a3f0] 1970-01-12 User Name * a: line 1 line 2 [1e4e1b8f71e0] Issue2130: xml output for 'hg heads' is malformed $ hg heads --style changelog 2020-01-01 test * fourth, second, third: third [95c24699272e] [tip] 1970-01-18 person * merge [d41e714fe50d] 1970-01-17 person * new branch [bbe44766e73d] Keys work: $ for key in author branch branches date desc file_adds file_dels file_mods \ > file_copies file_copies_switch files \ > manifest node parents rev tags diffstat extras \ > p1rev p2rev p1node p2node; do > for mode in '' --verbose --debug; do > hg log $mode --template "$key$mode: {$key}\n" > done > done author: test author: User Name author: person author: person author: person author: person author: other@place author: A. N. Other author: User Name author--verbose: test author--verbose: User Name author--verbose: person author--verbose: person author--verbose: person author--verbose: person author--verbose: other@place author--verbose: A. N. Other author--verbose: User Name author--debug: test author--debug: User Name author--debug: person author--debug: person author--debug: person author--debug: person author--debug: other@place author--debug: A. N. Other author--debug: User Name branch: default branch: default branch: default branch: default branch: foo branch: default branch: default branch: default branch: default branch--verbose: default branch--verbose: default branch--verbose: default branch--verbose: default branch--verbose: foo branch--verbose: default branch--verbose: default branch--verbose: default branch--verbose: default branch--debug: default branch--debug: default branch--debug: default branch--debug: default branch--debug: foo branch--debug: default branch--debug: default branch--debug: default branch--debug: default branches: branches: branches: branches: branches: foo branches: branches: branches: branches: branches--verbose: branches--verbose: branches--verbose: branches--verbose: branches--verbose: foo branches--verbose: branches--verbose: branches--verbose: branches--verbose: branches--debug: branches--debug: branches--debug: branches--debug: branches--debug: foo branches--debug: branches--debug: branches--debug: branches--debug: date: 1577872860.00 date: 1000000.00 date: 1500001.00 date: 1500000.00 date: 1400000.00 date: 1300000.00 date: 1200000.00 date: 1100000.00 date: 1000000.00 date--verbose: 1577872860.00 date--verbose: 1000000.00 date--verbose: 1500001.00 date--verbose: 1500000.00 date--verbose: 1400000.00 date--verbose: 1300000.00 date--verbose: 1200000.00 date--verbose: 1100000.00 date--verbose: 1000000.00 date--debug: 1577872860.00 date--debug: 1000000.00 date--debug: 1500001.00 date--debug: 1500000.00 date--debug: 1400000.00 date--debug: 1300000.00 date--debug: 1200000.00 date--debug: 1100000.00 date--debug: 1000000.00 desc: third desc: second desc: merge desc: new head desc: new branch desc: no user, no domain desc: no person desc: other 1 other 2 other 3 desc: line 1 line 2 desc--verbose: third desc--verbose: second desc--verbose: merge desc--verbose: new head desc--verbose: new branch desc--verbose: no user, no domain desc--verbose: no person desc--verbose: other 1 other 2 other 3 desc--verbose: line 1 line 2 desc--debug: third desc--debug: second desc--debug: merge desc--debug: new head desc--debug: new branch desc--debug: no user, no domain desc--debug: no person desc--debug: other 1 other 2 other 3 desc--debug: line 1 line 2 file_adds: fourth third file_adds: second file_adds: file_adds: d file_adds: file_adds: file_adds: c file_adds: b file_adds: a file_adds--verbose: fourth third file_adds--verbose: second file_adds--verbose: file_adds--verbose: d file_adds--verbose: file_adds--verbose: file_adds--verbose: c file_adds--verbose: b file_adds--verbose: a file_adds--debug: fourth third file_adds--debug: second file_adds--debug: file_adds--debug: d file_adds--debug: file_adds--debug: file_adds--debug: c file_adds--debug: b file_adds--debug: a file_dels: second file_dels: file_dels: file_dels: file_dels: file_dels: file_dels: file_dels: file_dels: file_dels--verbose: second file_dels--verbose: file_dels--verbose: file_dels--verbose: file_dels--verbose: file_dels--verbose: file_dels--verbose: file_dels--verbose: file_dels--verbose: file_dels--debug: second file_dels--debug: file_dels--debug: file_dels--debug: file_dels--debug: file_dels--debug: file_dels--debug: file_dels--debug: file_dels--debug: file_mods: file_mods: file_mods: file_mods: file_mods: file_mods: c file_mods: file_mods: file_mods: file_mods--verbose: file_mods--verbose: file_mods--verbose: file_mods--verbose: file_mods--verbose: file_mods--verbose: c file_mods--verbose: file_mods--verbose: file_mods--verbose: file_mods--debug: file_mods--debug: file_mods--debug: file_mods--debug: file_mods--debug: file_mods--debug: c file_mods--debug: file_mods--debug: file_mods--debug: file_copies: fourth (second) file_copies: file_copies: file_copies: file_copies: file_copies: file_copies: file_copies: file_copies: file_copies--verbose: fourth (second) file_copies--verbose: file_copies--verbose: file_copies--verbose: file_copies--verbose: file_copies--verbose: file_copies--verbose: file_copies--verbose: file_copies--verbose: file_copies--debug: fourth (second) file_copies--debug: file_copies--debug: file_copies--debug: file_copies--debug: file_copies--debug: file_copies--debug: file_copies--debug: file_copies--debug: file_copies_switch: file_copies_switch: file_copies_switch: file_copies_switch: file_copies_switch: file_copies_switch: file_copies_switch: file_copies_switch: file_copies_switch: file_copies_switch--verbose: file_copies_switch--verbose: file_copies_switch--verbose: file_copies_switch--verbose: file_copies_switch--verbose: file_copies_switch--verbose: file_copies_switch--verbose: file_copies_switch--verbose: file_copies_switch--verbose: file_copies_switch--debug: file_copies_switch--debug: file_copies_switch--debug: file_copies_switch--debug: file_copies_switch--debug: file_copies_switch--debug: file_copies_switch--debug: file_copies_switch--debug: file_copies_switch--debug: files: fourth second third files: second files: files: d files: files: c files: c files: b files: a files--verbose: fourth second third files--verbose: second files--verbose: files--verbose: d files--verbose: files--verbose: c files--verbose: c files--verbose: b files--verbose: a files--debug: fourth second third files--debug: second files--debug: files--debug: d files--debug: files--debug: c files--debug: c files--debug: b files--debug: a manifest: 6:94961b75a2da manifest: 5:f2dbc354b94e manifest: 4:4dc3def4f9b4 manifest: 4:4dc3def4f9b4 manifest: 3:cb5a1327723b manifest: 3:cb5a1327723b manifest: 2:6e0e82995c35 manifest: 1:4e8d705b1e53 manifest: 0:a0c8bcbbb45c manifest--verbose: 6:94961b75a2da manifest--verbose: 5:f2dbc354b94e manifest--verbose: 4:4dc3def4f9b4 manifest--verbose: 4:4dc3def4f9b4 manifest--verbose: 3:cb5a1327723b manifest--verbose: 3:cb5a1327723b manifest--verbose: 2:6e0e82995c35 manifest--verbose: 1:4e8d705b1e53 manifest--verbose: 0:a0c8bcbbb45c manifest--debug: 6:94961b75a2da554b4df6fb599e5bfc7d48de0c64 manifest--debug: 5:f2dbc354b94e5ec0b4f10680ee0cee816101d0bf manifest--debug: 4:4dc3def4f9b4c6e8de820f6ee74737f91e96a216 manifest--debug: 4:4dc3def4f9b4c6e8de820f6ee74737f91e96a216 manifest--debug: 3:cb5a1327723bada42f117e4c55a303246eaf9ccc manifest--debug: 3:cb5a1327723bada42f117e4c55a303246eaf9ccc manifest--debug: 2:6e0e82995c35d0d57a52aca8da4e56139e06b4b1 manifest--debug: 1:4e8d705b1e53e3f9375e0e60dc7b525d8211fe55 manifest--debug: 0:a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 node: 95c24699272ef57d062b8bccc32c878bf841784a node: 29114dbae42b9f078cf2714dbe3a86bba8ec7453 node: d41e714fe50d9e4a5f11b4d595d543481b5f980b node: 13207e5a10d9fd28ec424934298e176197f2c67f node: bbe44766e73d5f11ed2177f1838de10c53ef3e74 node: 10e46f2dcbf4823578cf180f33ecf0b957964c47 node: 97054abb4ab824450e9164180baf491ae0078465 node: b608e9d1a3f0273ccf70fb85fd6866b3482bf965 node: 1e4e1b8f71e05681d422154f5421e385fec3454f node--verbose: 95c24699272ef57d062b8bccc32c878bf841784a node--verbose: 29114dbae42b9f078cf2714dbe3a86bba8ec7453 node--verbose: d41e714fe50d9e4a5f11b4d595d543481b5f980b node--verbose: 13207e5a10d9fd28ec424934298e176197f2c67f node--verbose: bbe44766e73d5f11ed2177f1838de10c53ef3e74 node--verbose: 10e46f2dcbf4823578cf180f33ecf0b957964c47 node--verbose: 97054abb4ab824450e9164180baf491ae0078465 node--verbose: b608e9d1a3f0273ccf70fb85fd6866b3482bf965 node--verbose: 1e4e1b8f71e05681d422154f5421e385fec3454f node--debug: 95c24699272ef57d062b8bccc32c878bf841784a node--debug: 29114dbae42b9f078cf2714dbe3a86bba8ec7453 node--debug: d41e714fe50d9e4a5f11b4d595d543481b5f980b node--debug: 13207e5a10d9fd28ec424934298e176197f2c67f node--debug: bbe44766e73d5f11ed2177f1838de10c53ef3e74 node--debug: 10e46f2dcbf4823578cf180f33ecf0b957964c47 node--debug: 97054abb4ab824450e9164180baf491ae0078465 node--debug: b608e9d1a3f0273ccf70fb85fd6866b3482bf965 node--debug: 1e4e1b8f71e05681d422154f5421e385fec3454f parents: parents: -1:000000000000 parents: 5:13207e5a10d9 4:bbe44766e73d parents: 3:10e46f2dcbf4 parents: parents: parents: parents: parents: parents--verbose: parents--verbose: -1:000000000000 parents--verbose: 5:13207e5a10d9 4:bbe44766e73d parents--verbose: 3:10e46f2dcbf4 parents--verbose: parents--verbose: parents--verbose: parents--verbose: parents--verbose: parents--debug: 7:29114dbae42b9f078cf2714dbe3a86bba8ec7453 -1:0000000000000000000000000000000000000000 parents--debug: -1:0000000000000000000000000000000000000000 -1:0000000000000000000000000000000000000000 parents--debug: 5:13207e5a10d9fd28ec424934298e176197f2c67f 4:bbe44766e73d5f11ed2177f1838de10c53ef3e74 parents--debug: 3:10e46f2dcbf4823578cf180f33ecf0b957964c47 -1:0000000000000000000000000000000000000000 parents--debug: 3:10e46f2dcbf4823578cf180f33ecf0b957964c47 -1:0000000000000000000000000000000000000000 parents--debug: 2:97054abb4ab824450e9164180baf491ae0078465 -1:0000000000000000000000000000000000000000 parents--debug: 1:b608e9d1a3f0273ccf70fb85fd6866b3482bf965 -1:0000000000000000000000000000000000000000 parents--debug: 0:1e4e1b8f71e05681d422154f5421e385fec3454f -1:0000000000000000000000000000000000000000 parents--debug: -1:0000000000000000000000000000000000000000 -1:0000000000000000000000000000000000000000 rev: 8 rev: 7 rev: 6 rev: 5 rev: 4 rev: 3 rev: 2 rev: 1 rev: 0 rev--verbose: 8 rev--verbose: 7 rev--verbose: 6 rev--verbose: 5 rev--verbose: 4 rev--verbose: 3 rev--verbose: 2 rev--verbose: 1 rev--verbose: 0 rev--debug: 8 rev--debug: 7 rev--debug: 6 rev--debug: 5 rev--debug: 4 rev--debug: 3 rev--debug: 2 rev--debug: 1 rev--debug: 0 tags: tip tags: tags: tags: tags: tags: tags: tags: tags: tags--verbose: tip tags--verbose: tags--verbose: tags--verbose: tags--verbose: tags--verbose: tags--verbose: tags--verbose: tags--verbose: tags--debug: tip tags--debug: tags--debug: tags--debug: tags--debug: tags--debug: tags--debug: tags--debug: tags--debug: diffstat: 3: +2/-1 diffstat: 1: +1/-0 diffstat: 0: +0/-0 diffstat: 1: +1/-0 diffstat: 0: +0/-0 diffstat: 1: +1/-0 diffstat: 1: +4/-0 diffstat: 1: +2/-0 diffstat: 1: +1/-0 diffstat--verbose: 3: +2/-1 diffstat--verbose: 1: +1/-0 diffstat--verbose: 0: +0/-0 diffstat--verbose: 1: +1/-0 diffstat--verbose: 0: +0/-0 diffstat--verbose: 1: +1/-0 diffstat--verbose: 1: +4/-0 diffstat--verbose: 1: +2/-0 diffstat--verbose: 1: +1/-0 diffstat--debug: 3: +2/-1 diffstat--debug: 1: +1/-0 diffstat--debug: 0: +0/-0 diffstat--debug: 1: +1/-0 diffstat--debug: 0: +0/-0 diffstat--debug: 1: +1/-0 diffstat--debug: 1: +4/-0 diffstat--debug: 1: +2/-0 diffstat--debug: 1: +1/-0 extras: branch=default extras: branch=default extras: branch=default extras: branch=default extras: branch=foo extras: branch=default extras: branch=default extras: branch=default extras: branch=default extras--verbose: branch=default extras--verbose: branch=default extras--verbose: branch=default extras--verbose: branch=default extras--verbose: branch=foo extras--verbose: branch=default extras--verbose: branch=default extras--verbose: branch=default extras--verbose: branch=default extras--debug: branch=default extras--debug: branch=default extras--debug: branch=default extras--debug: branch=default extras--debug: branch=foo extras--debug: branch=default extras--debug: branch=default extras--debug: branch=default extras--debug: branch=default p1rev: 7 p1rev: -1 p1rev: 5 p1rev: 3 p1rev: 3 p1rev: 2 p1rev: 1 p1rev: 0 p1rev: -1 p1rev--verbose: 7 p1rev--verbose: -1 p1rev--verbose: 5 p1rev--verbose: 3 p1rev--verbose: 3 p1rev--verbose: 2 p1rev--verbose: 1 p1rev--verbose: 0 p1rev--verbose: -1 p1rev--debug: 7 p1rev--debug: -1 p1rev--debug: 5 p1rev--debug: 3 p1rev--debug: 3 p1rev--debug: 2 p1rev--debug: 1 p1rev--debug: 0 p1rev--debug: -1 p2rev: -1 p2rev: -1 p2rev: 4 p2rev: -1 p2rev: -1 p2rev: -1 p2rev: -1 p2rev: -1 p2rev: -1 p2rev--verbose: -1 p2rev--verbose: -1 p2rev--verbose: 4 p2rev--verbose: -1 p2rev--verbose: -1 p2rev--verbose: -1 p2rev--verbose: -1 p2rev--verbose: -1 p2rev--verbose: -1 p2rev--debug: -1 p2rev--debug: -1 p2rev--debug: 4 p2rev--debug: -1 p2rev--debug: -1 p2rev--debug: -1 p2rev--debug: -1 p2rev--debug: -1 p2rev--debug: -1 p1node: 29114dbae42b9f078cf2714dbe3a86bba8ec7453 p1node: 0000000000000000000000000000000000000000 p1node: 13207e5a10d9fd28ec424934298e176197f2c67f p1node: 10e46f2dcbf4823578cf180f33ecf0b957964c47 p1node: 10e46f2dcbf4823578cf180f33ecf0b957964c47 p1node: 97054abb4ab824450e9164180baf491ae0078465 p1node: b608e9d1a3f0273ccf70fb85fd6866b3482bf965 p1node: 1e4e1b8f71e05681d422154f5421e385fec3454f p1node: 0000000000000000000000000000000000000000 p1node--verbose: 29114dbae42b9f078cf2714dbe3a86bba8ec7453 p1node--verbose: 0000000000000000000000000000000000000000 p1node--verbose: 13207e5a10d9fd28ec424934298e176197f2c67f p1node--verbose: 10e46f2dcbf4823578cf180f33ecf0b957964c47 p1node--verbose: 10e46f2dcbf4823578cf180f33ecf0b957964c47 p1node--verbose: 97054abb4ab824450e9164180baf491ae0078465 p1node--verbose: b608e9d1a3f0273ccf70fb85fd6866b3482bf965 p1node--verbose: 1e4e1b8f71e05681d422154f5421e385fec3454f p1node--verbose: 0000000000000000000000000000000000000000 p1node--debug: 29114dbae42b9f078cf2714dbe3a86bba8ec7453 p1node--debug: 0000000000000000000000000000000000000000 p1node--debug: 13207e5a10d9fd28ec424934298e176197f2c67f p1node--debug: 10e46f2dcbf4823578cf180f33ecf0b957964c47 p1node--debug: 10e46f2dcbf4823578cf180f33ecf0b957964c47 p1node--debug: 97054abb4ab824450e9164180baf491ae0078465 p1node--debug: b608e9d1a3f0273ccf70fb85fd6866b3482bf965 p1node--debug: 1e4e1b8f71e05681d422154f5421e385fec3454f p1node--debug: 0000000000000000000000000000000000000000 p2node: 0000000000000000000000000000000000000000 p2node: 0000000000000000000000000000000000000000 p2node: bbe44766e73d5f11ed2177f1838de10c53ef3e74 p2node: 0000000000000000000000000000000000000000 p2node: 0000000000000000000000000000000000000000 p2node: 0000000000000000000000000000000000000000 p2node: 0000000000000000000000000000000000000000 p2node: 0000000000000000000000000000000000000000 p2node: 0000000000000000000000000000000000000000 p2node--verbose: 0000000000000000000000000000000000000000 p2node--verbose: 0000000000000000000000000000000000000000 p2node--verbose: bbe44766e73d5f11ed2177f1838de10c53ef3e74 p2node--verbose: 0000000000000000000000000000000000000000 p2node--verbose: 0000000000000000000000000000000000000000 p2node--verbose: 0000000000000000000000000000000000000000 p2node--verbose: 0000000000000000000000000000000000000000 p2node--verbose: 0000000000000000000000000000000000000000 p2node--verbose: 0000000000000000000000000000000000000000 p2node--debug: 0000000000000000000000000000000000000000 p2node--debug: 0000000000000000000000000000000000000000 p2node--debug: bbe44766e73d5f11ed2177f1838de10c53ef3e74 p2node--debug: 0000000000000000000000000000000000000000 p2node--debug: 0000000000000000000000000000000000000000 p2node--debug: 0000000000000000000000000000000000000000 p2node--debug: 0000000000000000000000000000000000000000 p2node--debug: 0000000000000000000000000000000000000000 p2node--debug: 0000000000000000000000000000000000000000 Filters work: $ hg log --template '{author|domain}\n' hostname place place hostname $ hg log --template '{author|person}\n' test User Name person person person person other A. N. Other User Name $ hg log --template '{author|user}\n' test user person person person person other other user $ hg log --template '{date|date}\n' Wed Jan 01 10:01:00 2020 +0000 Mon Jan 12 13:46:40 1970 +0000 Sun Jan 18 08:40:01 1970 +0000 Sun Jan 18 08:40:00 1970 +0000 Sat Jan 17 04:53:20 1970 +0000 Fri Jan 16 01:06:40 1970 +0000 Wed Jan 14 21:20:00 1970 +0000 Tue Jan 13 17:33:20 1970 +0000 Mon Jan 12 13:46:40 1970 +0000 $ hg log --template '{date|isodate}\n' 2020-01-01 10:01 +0000 1970-01-12 13:46 +0000 1970-01-18 08:40 +0000 1970-01-18 08:40 +0000 1970-01-17 04:53 +0000 1970-01-16 01:06 +0000 1970-01-14 21:20 +0000 1970-01-13 17:33 +0000 1970-01-12 13:46 +0000 $ hg log --template '{date|isodatesec}\n' 2020-01-01 10:01:00 +0000 1970-01-12 13:46:40 +0000 1970-01-18 08:40:01 +0000 1970-01-18 08:40:00 +0000 1970-01-17 04:53:20 +0000 1970-01-16 01:06:40 +0000 1970-01-14 21:20:00 +0000 1970-01-13 17:33:20 +0000 1970-01-12 13:46:40 +0000 $ hg log --template '{date|rfc822date}\n' Wed, 01 Jan 2020 10:01:00 +0000 Mon, 12 Jan 1970 13:46:40 +0000 Sun, 18 Jan 1970 08:40:01 +0000 Sun, 18 Jan 1970 08:40:00 +0000 Sat, 17 Jan 1970 04:53:20 +0000 Fri, 16 Jan 1970 01:06:40 +0000 Wed, 14 Jan 1970 21:20:00 +0000 Tue, 13 Jan 1970 17:33:20 +0000 Mon, 12 Jan 1970 13:46:40 +0000 $ hg log --template '{desc|firstline}\n' third second merge new head new branch no user, no domain no person other 1 line 1 $ hg log --template '{node|short}\n' 95c24699272e 29114dbae42b d41e714fe50d 13207e5a10d9 bbe44766e73d 10e46f2dcbf4 97054abb4ab8 b608e9d1a3f0 1e4e1b8f71e0 $ hg log --template '\n' $ hg log --template '{rev}: {children}\n' 8: 7: 8:95c24699272e 6: 5: 6:d41e714fe50d 4: 6:d41e714fe50d 3: 4:bbe44766e73d 5:13207e5a10d9 2: 3:10e46f2dcbf4 1: 2:97054abb4ab8 0: 1:b608e9d1a3f0 Formatnode filter works: $ hg -q log -r 0 --template '{node|formatnode}\n' 1e4e1b8f71e0 $ hg log -r 0 --template '{node|formatnode}\n' 1e4e1b8f71e0 $ hg -v log -r 0 --template '{node|formatnode}\n' 1e4e1b8f71e0 $ hg --debug log -r 0 --template '{node|formatnode}\n' 1e4e1b8f71e05681d422154f5421e385fec3454f Age filter: $ hg init unstable-hash $ cd unstable-hash $ hg log --template '{date|age}\n' > /dev/null || exit 1 >>> from datetime import datetime, timedelta >>> fp = open('a', 'w') >>> n = datetime.now() + timedelta(366 * 7) >>> fp.write('%d-%d-%d 00:00' % (n.year, n.month, n.day)) >>> fp.close() $ hg add a $ hg commit -m future -d "`cat a`" $ hg log -l1 --template '{date|age}\n' 7 years from now $ cd .. $ rm -rf unstable-hash Add a dummy commit to make up for the instability of the above: $ echo a > a $ hg add a $ hg ci -m future Count filter: $ hg log -l1 --template '{node|count} {node|short|count}\n' 40 12 $ hg log -l1 --template '{revset("null^")|count} {revset(".")|count} {revset("0::3")|count}\n' 0 1 4 $ hg log -G --template '{rev}: children: {children|count}, \ > tags: {tags|count}, file_adds: {file_adds|count}, \ > ancestors: {revset("ancestors(%s)", rev)|count}' @ 9: children: 0, tags: 1, file_adds: 1, ancestors: 3 | o 8: children: 1, tags: 0, file_adds: 2, ancestors: 2 | o 7: children: 1, tags: 0, file_adds: 1, ancestors: 1 o 6: children: 0, tags: 0, file_adds: 0, ancestors: 7 |\ | o 5: children: 1, tags: 0, file_adds: 1, ancestors: 5 | | o | 4: children: 1, tags: 0, file_adds: 0, ancestors: 5 |/ o 3: children: 2, tags: 0, file_adds: 0, ancestors: 4 | o 2: children: 1, tags: 0, file_adds: 1, ancestors: 3 | o 1: children: 1, tags: 0, file_adds: 1, ancestors: 2 | o 0: children: 1, tags: 0, file_adds: 1, ancestors: 1 Upper/lower filters: $ hg log -r0 --template '{branch|upper}\n' DEFAULT $ hg log -r0 --template '{author|lower}\n' user name $ hg log -r0 --template '{date|upper}\n' abort: template filter 'upper' is not compatible with keyword 'date' [255] Add a commit that does all possible modifications at once $ echo modify >> third $ touch b $ hg add b $ hg mv fourth fifth $ hg rm a $ hg ci -m "Modify, add, remove, rename" Check the status template $ cat <> $HGRCPATH > [extensions] > color= > EOF $ hg log -T status -r 10 changeset: 10:0f9759ec227a tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Modify, add, remove, rename files: M third A b A fifth R a R fourth $ hg log -T status -C -r 10 changeset: 10:0f9759ec227a tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Modify, add, remove, rename files: M third A b A fifth fourth R a R fourth $ hg log -T status -C -r 10 -v changeset: 10:0f9759ec227a tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 description: Modify, add, remove, rename files: M third A b A fifth fourth R a R fourth $ hg log -T status -C -r 10 --debug changeset: 10:0f9759ec227a4859c2014a345cd8a859022b7c6c tag: tip phase: secret parent: 9:bf9dfba36635106d6a73ccc01e28b762da60e066 parent: -1:0000000000000000000000000000000000000000 manifest: 8:89dd546f2de0a9d6d664f58d86097eb97baba567 user: test date: Thu Jan 01 00:00:00 1970 +0000 extra: branch=default description: Modify, add, remove, rename files: M third A b A fifth fourth R a R fourth $ hg log -T status -C -r 10 --quiet 10:0f9759ec227a $ hg --color=debug log -T status -r 10 [log.changeset changeset.secret|changeset: 10:0f9759ec227a] [log.tag|tag: tip] [log.user|user: test] [log.date|date: Thu Jan 01 00:00:00 1970 +0000] [log.summary|summary: Modify, add, remove, rename] [ui.note log.files|files:] [status.modified|M third] [status.added|A b] [status.added|A fifth] [status.removed|R a] [status.removed|R fourth] $ hg --color=debug log -T status -C -r 10 [log.changeset changeset.secret|changeset: 10:0f9759ec227a] [log.tag|tag: tip] [log.user|user: test] [log.date|date: Thu Jan 01 00:00:00 1970 +0000] [log.summary|summary: Modify, add, remove, rename] [ui.note log.files|files:] [status.modified|M third] [status.added|A b] [status.added|A fifth] [status.copied| fourth] [status.removed|R a] [status.removed|R fourth] $ hg --color=debug log -T status -C -r 10 -v [log.changeset changeset.secret|changeset: 10:0f9759ec227a] [log.tag|tag: tip] [log.user|user: test] [log.date|date: Thu Jan 01 00:00:00 1970 +0000] [ui.note log.description|description:] [ui.note log.description|Modify, add, remove, rename] [ui.note log.files|files:] [status.modified|M third] [status.added|A b] [status.added|A fifth] [status.copied| fourth] [status.removed|R a] [status.removed|R fourth] $ hg --color=debug log -T status -C -r 10 --debug [log.changeset changeset.secret|changeset: 10:0f9759ec227a4859c2014a345cd8a859022b7c6c] [log.tag|tag: tip] [log.phase|phase: secret] [log.parent changeset.secret|parent: 9:bf9dfba36635106d6a73ccc01e28b762da60e066] [log.parent changeset.public|parent: -1:0000000000000000000000000000000000000000] [ui.debug log.manifest|manifest: 8:89dd546f2de0a9d6d664f58d86097eb97baba567] [log.user|user: test] [log.date|date: Thu Jan 01 00:00:00 1970 +0000] [ui.debug log.extra|extra: branch=default] [ui.note log.description|description:] [ui.note log.description|Modify, add, remove, rename] [ui.note log.files|files:] [status.modified|M third] [status.added|A b] [status.added|A fifth] [status.copied| fourth] [status.removed|R a] [status.removed|R fourth] $ hg --color=debug log -T status -C -r 10 --quiet [log.node|10:0f9759ec227a] Check the bisect template $ hg bisect -g 1 $ hg bisect -b 3 --noupdate Testing changeset 2:97054abb4ab8 (2 changesets remaining, ~1 tests) $ hg log -T bisect -r 0:4 changeset: 0:1e4e1b8f71e0 bisect: good (implicit) user: User Name date: Mon Jan 12 13:46:40 1970 +0000 summary: line 1 changeset: 1:b608e9d1a3f0 bisect: good user: A. N. Other date: Tue Jan 13 17:33:20 1970 +0000 summary: other 1 changeset: 2:97054abb4ab8 bisect: untested user: other@place date: Wed Jan 14 21:20:00 1970 +0000 summary: no person changeset: 3:10e46f2dcbf4 bisect: bad user: person date: Fri Jan 16 01:06:40 1970 +0000 summary: no user, no domain changeset: 4:bbe44766e73d bisect: bad (implicit) branch: foo user: person date: Sat Jan 17 04:53:20 1970 +0000 summary: new branch $ hg log --debug -T bisect -r 0:4 changeset: 0:1e4e1b8f71e05681d422154f5421e385fec3454f bisect: good (implicit) phase: public parent: -1:0000000000000000000000000000000000000000 parent: -1:0000000000000000000000000000000000000000 manifest: 0:a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 user: User Name date: Mon Jan 12 13:46:40 1970 +0000 files+: a extra: branch=default description: line 1 line 2 changeset: 1:b608e9d1a3f0273ccf70fb85fd6866b3482bf965 bisect: good phase: public parent: 0:1e4e1b8f71e05681d422154f5421e385fec3454f parent: -1:0000000000000000000000000000000000000000 manifest: 1:4e8d705b1e53e3f9375e0e60dc7b525d8211fe55 user: A. N. Other date: Tue Jan 13 17:33:20 1970 +0000 files+: b extra: branch=default description: other 1 other 2 other 3 changeset: 2:97054abb4ab824450e9164180baf491ae0078465 bisect: untested phase: public parent: 1:b608e9d1a3f0273ccf70fb85fd6866b3482bf965 parent: -1:0000000000000000000000000000000000000000 manifest: 2:6e0e82995c35d0d57a52aca8da4e56139e06b4b1 user: other@place date: Wed Jan 14 21:20:00 1970 +0000 files+: c extra: branch=default description: no person changeset: 3:10e46f2dcbf4823578cf180f33ecf0b957964c47 bisect: bad phase: public parent: 2:97054abb4ab824450e9164180baf491ae0078465 parent: -1:0000000000000000000000000000000000000000 manifest: 3:cb5a1327723bada42f117e4c55a303246eaf9ccc user: person date: Fri Jan 16 01:06:40 1970 +0000 files: c extra: branch=default description: no user, no domain changeset: 4:bbe44766e73d5f11ed2177f1838de10c53ef3e74 bisect: bad (implicit) branch: foo phase: draft parent: 3:10e46f2dcbf4823578cf180f33ecf0b957964c47 parent: -1:0000000000000000000000000000000000000000 manifest: 3:cb5a1327723bada42f117e4c55a303246eaf9ccc user: person date: Sat Jan 17 04:53:20 1970 +0000 extra: branch=foo description: new branch $ hg log -v -T bisect -r 0:4 changeset: 0:1e4e1b8f71e0 bisect: good (implicit) user: User Name date: Mon Jan 12 13:46:40 1970 +0000 files: a description: line 1 line 2 changeset: 1:b608e9d1a3f0 bisect: good user: A. N. Other date: Tue Jan 13 17:33:20 1970 +0000 files: b description: other 1 other 2 other 3 changeset: 2:97054abb4ab8 bisect: untested user: other@place date: Wed Jan 14 21:20:00 1970 +0000 files: c description: no person changeset: 3:10e46f2dcbf4 bisect: bad user: person date: Fri Jan 16 01:06:40 1970 +0000 files: c description: no user, no domain changeset: 4:bbe44766e73d bisect: bad (implicit) branch: foo user: person date: Sat Jan 17 04:53:20 1970 +0000 description: new branch $ hg --color=debug log -T bisect -r 0:4 [log.changeset changeset.public|changeset: 0:1e4e1b8f71e0] [log.bisect bisect.good|bisect: good (implicit)] [log.user|user: User Name ] [log.date|date: Mon Jan 12 13:46:40 1970 +0000] [log.summary|summary: line 1] [log.changeset changeset.public|changeset: 1:b608e9d1a3f0] [log.bisect bisect.good|bisect: good] [log.user|user: A. N. Other ] [log.date|date: Tue Jan 13 17:33:20 1970 +0000] [log.summary|summary: other 1] [log.changeset changeset.public|changeset: 2:97054abb4ab8] [log.bisect bisect.untested|bisect: untested] [log.user|user: other@place] [log.date|date: Wed Jan 14 21:20:00 1970 +0000] [log.summary|summary: no person] [log.changeset changeset.public|changeset: 3:10e46f2dcbf4] [log.bisect bisect.bad|bisect: bad] [log.user|user: person] [log.date|date: Fri Jan 16 01:06:40 1970 +0000] [log.summary|summary: no user, no domain] [log.changeset changeset.draft|changeset: 4:bbe44766e73d] [log.bisect bisect.bad|bisect: bad (implicit)] [log.branch|branch: foo] [log.user|user: person] [log.date|date: Sat Jan 17 04:53:20 1970 +0000] [log.summary|summary: new branch] $ hg --color=debug log --debug -T bisect -r 0:4 [log.changeset changeset.public|changeset: 0:1e4e1b8f71e05681d422154f5421e385fec3454f] [log.bisect bisect.good|bisect: good (implicit)] [log.phase|phase: public] [log.parent changeset.public|parent: -1:0000000000000000000000000000000000000000] [log.parent changeset.public|parent: -1:0000000000000000000000000000000000000000] [ui.debug log.manifest|manifest: 0:a0c8bcbbb45c63b90b70ad007bf38961f64f2af0] [log.user|user: User Name ] [log.date|date: Mon Jan 12 13:46:40 1970 +0000] [ui.debug log.files|files+: a] [ui.debug log.extra|extra: branch=default] [ui.note log.description|description:] [ui.note log.description|line 1 line 2] [log.changeset changeset.public|changeset: 1:b608e9d1a3f0273ccf70fb85fd6866b3482bf965] [log.bisect bisect.good|bisect: good] [log.phase|phase: public] [log.parent changeset.public|parent: 0:1e4e1b8f71e05681d422154f5421e385fec3454f] [log.parent changeset.public|parent: -1:0000000000000000000000000000000000000000] [ui.debug log.manifest|manifest: 1:4e8d705b1e53e3f9375e0e60dc7b525d8211fe55] [log.user|user: A. N. Other ] [log.date|date: Tue Jan 13 17:33:20 1970 +0000] [ui.debug log.files|files+: b] [ui.debug log.extra|extra: branch=default] [ui.note log.description|description:] [ui.note log.description|other 1 other 2 other 3] [log.changeset changeset.public|changeset: 2:97054abb4ab824450e9164180baf491ae0078465] [log.bisect bisect.untested|bisect: untested] [log.phase|phase: public] [log.parent changeset.public|parent: 1:b608e9d1a3f0273ccf70fb85fd6866b3482bf965] [log.parent changeset.public|parent: -1:0000000000000000000000000000000000000000] [ui.debug log.manifest|manifest: 2:6e0e82995c35d0d57a52aca8da4e56139e06b4b1] [log.user|user: other@place] [log.date|date: Wed Jan 14 21:20:00 1970 +0000] [ui.debug log.files|files+: c] [ui.debug log.extra|extra: branch=default] [ui.note log.description|description:] [ui.note log.description|no person] [log.changeset changeset.public|changeset: 3:10e46f2dcbf4823578cf180f33ecf0b957964c47] [log.bisect bisect.bad|bisect: bad] [log.phase|phase: public] [log.parent changeset.public|parent: 2:97054abb4ab824450e9164180baf491ae0078465] [log.parent changeset.public|parent: -1:0000000000000000000000000000000000000000] [ui.debug log.manifest|manifest: 3:cb5a1327723bada42f117e4c55a303246eaf9ccc] [log.user|user: person] [log.date|date: Fri Jan 16 01:06:40 1970 +0000] [ui.debug log.files|files: c] [ui.debug log.extra|extra: branch=default] [ui.note log.description|description:] [ui.note log.description|no user, no domain] [log.changeset changeset.draft|changeset: 4:bbe44766e73d5f11ed2177f1838de10c53ef3e74] [log.bisect bisect.bad|bisect: bad (implicit)] [log.branch|branch: foo] [log.phase|phase: draft] [log.parent changeset.public|parent: 3:10e46f2dcbf4823578cf180f33ecf0b957964c47] [log.parent changeset.public|parent: -1:0000000000000000000000000000000000000000] [ui.debug log.manifest|manifest: 3:cb5a1327723bada42f117e4c55a303246eaf9ccc] [log.user|user: person] [log.date|date: Sat Jan 17 04:53:20 1970 +0000] [ui.debug log.extra|extra: branch=foo] [ui.note log.description|description:] [ui.note log.description|new branch] $ hg --color=debug log -v -T bisect -r 0:4 [log.changeset changeset.public|changeset: 0:1e4e1b8f71e0] [log.bisect bisect.good|bisect: good (implicit)] [log.user|user: User Name ] [log.date|date: Mon Jan 12 13:46:40 1970 +0000] [ui.note log.files|files: a] [ui.note log.description|description:] [ui.note log.description|line 1 line 2] [log.changeset changeset.public|changeset: 1:b608e9d1a3f0] [log.bisect bisect.good|bisect: good] [log.user|user: A. N. Other ] [log.date|date: Tue Jan 13 17:33:20 1970 +0000] [ui.note log.files|files: b] [ui.note log.description|description:] [ui.note log.description|other 1 other 2 other 3] [log.changeset changeset.public|changeset: 2:97054abb4ab8] [log.bisect bisect.untested|bisect: untested] [log.user|user: other@place] [log.date|date: Wed Jan 14 21:20:00 1970 +0000] [ui.note log.files|files: c] [ui.note log.description|description:] [ui.note log.description|no person] [log.changeset changeset.public|changeset: 3:10e46f2dcbf4] [log.bisect bisect.bad|bisect: bad] [log.user|user: person] [log.date|date: Fri Jan 16 01:06:40 1970 +0000] [ui.note log.files|files: c] [ui.note log.description|description:] [ui.note log.description|no user, no domain] [log.changeset changeset.draft|changeset: 4:bbe44766e73d] [log.bisect bisect.bad|bisect: bad (implicit)] [log.branch|branch: foo] [log.user|user: person] [log.date|date: Sat Jan 17 04:53:20 1970 +0000] [ui.note log.description|description:] [ui.note log.description|new branch] $ hg bisect --reset Error on syntax: $ echo 'x = "f' >> t $ hg log abort: t:3: unmatched quotes [255] $ hg log -T '{date' hg: parse error at 1: unterminated template expansion [255] Behind the scenes, this will throw TypeError $ hg log -l 3 --template '{date|obfuscate}\n' abort: template filter 'obfuscate' is not compatible with keyword 'date' [255] Behind the scenes, this will throw a ValueError $ hg log -l 3 --template 'line: {desc|shortdate}\n' abort: template filter 'shortdate' is not compatible with keyword 'desc' [255] Behind the scenes, this will throw AttributeError $ hg log -l 3 --template 'line: {date|escape}\n' abort: template filter 'escape' is not compatible with keyword 'date' [255] $ hg log -l 3 --template 'line: {extras|localdate}\n' hg: parse error: localdate expects a date information [255] Behind the scenes, this will throw ValueError $ hg tip --template '{author|email|date}\n' hg: parse error: date expects a date information [255] Error in nested template: $ hg log -T '{"date' hg: parse error at 2: unterminated string [255] $ hg log -T '{"foo{date|=}"}' hg: parse error at 11: syntax error [255] Thrown an error if a template function doesn't exist $ hg tip --template '{foo()}\n' hg: parse error: unknown function 'foo' [255] Pass generator object created by template function to filter $ hg log -l 1 --template '{if(author, author)|user}\n' test Test diff function: $ hg diff -c 8 diff -r 29114dbae42b -r 95c24699272e fourth --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/fourth Wed Jan 01 10:01:00 2020 +0000 @@ -0,0 +1,1 @@ +second diff -r 29114dbae42b -r 95c24699272e second --- a/second Mon Jan 12 13:46:40 1970 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -second diff -r 29114dbae42b -r 95c24699272e third --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/third Wed Jan 01 10:01:00 2020 +0000 @@ -0,0 +1,1 @@ +third $ hg log -r 8 -T "{diff()}" diff -r 29114dbae42b -r 95c24699272e fourth --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/fourth Wed Jan 01 10:01:00 2020 +0000 @@ -0,0 +1,1 @@ +second diff -r 29114dbae42b -r 95c24699272e second --- a/second Mon Jan 12 13:46:40 1970 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -second diff -r 29114dbae42b -r 95c24699272e third --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/third Wed Jan 01 10:01:00 2020 +0000 @@ -0,0 +1,1 @@ +third $ hg log -r 8 -T "{diff('glob:f*')}" diff -r 29114dbae42b -r 95c24699272e fourth --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/fourth Wed Jan 01 10:01:00 2020 +0000 @@ -0,0 +1,1 @@ +second $ hg log -r 8 -T "{diff('', 'glob:f*')}" diff -r 29114dbae42b -r 95c24699272e second --- a/second Mon Jan 12 13:46:40 1970 +0000 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +0,0 @@ -second diff -r 29114dbae42b -r 95c24699272e third --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/third Wed Jan 01 10:01:00 2020 +0000 @@ -0,0 +1,1 @@ +third $ hg log -r 8 -T "{diff('FOURTH'|lower)}" diff -r 29114dbae42b -r 95c24699272e fourth --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/fourth Wed Jan 01 10:01:00 2020 +0000 @@ -0,0 +1,1 @@ +second $ cd .. latesttag: $ hg init latesttag $ cd latesttag $ echo a > file $ hg ci -Am a -d '0 0' adding file $ echo b >> file $ hg ci -m b -d '1 0' $ echo c >> head1 $ hg ci -Am h1c -d '2 0' adding head1 $ hg update -q 1 $ echo d >> head2 $ hg ci -Am h2d -d '3 0' adding head2 created new head $ echo e >> head2 $ hg ci -m h2e -d '4 0' $ hg merge -q $ hg ci -m merge -d '5 -3600' No tag set: $ hg log --template '{rev}: {latesttag}+{latesttagdistance}\n' 5: null+5 4: null+4 3: null+3 2: null+3 1: null+2 0: null+1 One common tag: longest path wins: $ hg tag -r 1 -m t1 -d '6 0' t1 $ hg log --template '{rev}: {latesttag}+{latesttagdistance}\n' 6: t1+4 5: t1+3 4: t1+2 3: t1+1 2: t1+1 1: t1+0 0: null+1 One ancestor tag: more recent wins: $ hg tag -r 2 -m t2 -d '7 0' t2 $ hg log --template '{rev}: {latesttag}+{latesttagdistance}\n' 7: t2+3 6: t2+2 5: t2+1 4: t1+2 3: t1+1 2: t2+0 1: t1+0 0: null+1 Two branch tags: more recent wins: $ hg tag -r 3 -m t3 -d '8 0' t3 $ hg log --template '{rev}: {latesttag}+{latesttagdistance}\n' 8: t3+5 7: t3+4 6: t3+3 5: t3+2 4: t3+1 3: t3+0 2: t2+0 1: t1+0 0: null+1 Merged tag overrides: $ hg tag -r 5 -m t5 -d '9 0' t5 $ hg tag -r 3 -m at3 -d '10 0' at3 $ hg log --template '{rev}: {latesttag}+{latesttagdistance}\n' 10: t5+5 9: t5+4 8: t5+3 7: t5+2 6: t5+1 5: t5+0 4: at3:t3+1 3: at3:t3+0 2: t2+0 1: t1+0 0: null+1 $ hg log --template "{rev}: {latesttag % '{tag}+{distance},{changes} '}\n" 10: t5+5,5 9: t5+4,4 8: t5+3,3 7: t5+2,2 6: t5+1,1 5: t5+0,0 4: at3+1,1 t3+1,1 3: at3+0,0 t3+0,0 2: t2+0,0 1: t1+0,0 0: null+1,1 $ hg log --template "{rev}: {latesttag('re:^t[13]$') % '{tag}, C: {changes}, D: {distance}'}\n" 10: t3, C: 8, D: 7 9: t3, C: 7, D: 6 8: t3, C: 6, D: 5 7: t3, C: 5, D: 4 6: t3, C: 4, D: 3 5: t3, C: 3, D: 2 4: t3, C: 1, D: 1 3: t3, C: 0, D: 0 2: t1, C: 1, D: 1 1: t1, C: 0, D: 0 0: null, C: 1, D: 1 $ cd .. Style path expansion: issue1948 - ui.style option doesn't work on OSX if it is a relative path $ mkdir -p home/styles $ cat > home/styles/teststyle < changeset = 'test {rev}:{node|short}\n' > EOF $ HOME=`pwd`/home; export HOME $ cat > latesttag/.hg/hgrc < [ui] > style = ~/styles/teststyle > EOF $ hg -R latesttag tip test 10:9b4a630e5f5f Test recursive showlist template (issue1989): $ cat > style1989 < changeset = '{file_mods}{manifest}{extras}' > file_mod = 'M|{author|person}\n' > manifest = '{rev},{author}\n' > extra = '{key}: {author}\n' > EOF $ hg -R latesttag log -r tip --style=style1989 M|test 10,test branch: test Test new-style inline templating: $ hg log -R latesttag -r tip --template 'modified files: {file_mods % " {file}\n"}\n' modified files: .hgtags Test the sub function of templating for expansion: $ hg log -R latesttag -r 10 --template '{sub("[0-9]", "x", "{rev}")}\n' xx $ hg log -R latesttag -r 10 -T '{sub("[", "x", rev)}\n' hg: parse error: sub got an invalid pattern: [ [255] $ hg log -R latesttag -r 10 -T '{sub("[0-9]", r"\1", rev)}\n' hg: parse error: sub got an invalid replacement: \1 [255] Test the strip function with chars specified: $ hg log -R latesttag --template '{desc}\n' at3 t5 t3 t2 t1 merge h2e h2d h1c b a $ hg log -R latesttag --template '{strip(desc, "te")}\n' at3 5 3 2 1 merg h2 h2d h1c b a Test date format: $ hg log -R latesttag --template 'date: {date(date, "%y %m %d %S %z")}\n' date: 70 01 01 10 +0000 date: 70 01 01 09 +0000 date: 70 01 01 08 +0000 date: 70 01 01 07 +0000 date: 70 01 01 06 +0000 date: 70 01 01 05 +0100 date: 70 01 01 04 +0000 date: 70 01 01 03 +0000 date: 70 01 01 02 +0000 date: 70 01 01 01 +0000 date: 70 01 01 00 +0000 Test invalid date: $ hg log -R latesttag -T '{date(rev)}\n' hg: parse error: date expects a date information [255] Test integer literal: $ hg log -Ra -r0 -T '{(0)}\n' 0 $ hg log -Ra -r0 -T '{(123)}\n' 123 $ hg log -Ra -r0 -T '{(-4)}\n' -4 $ hg log -Ra -r0 -T '{(-)}\n' hg: parse error at 2: integer literal without digits [255] $ hg log -Ra -r0 -T '{(-a)}\n' hg: parse error at 2: integer literal without digits [255] top-level integer literal is interpreted as symbol (i.e. variable name): $ hg log -Ra -r0 -T '{1}\n' $ hg log -Ra -r0 -T '{if("t", "{1}")}\n' $ hg log -Ra -r0 -T '{1|stringify}\n' unless explicit symbol is expected: $ hg log -Ra -r0 -T '{desc|1}\n' hg: parse error: expected a symbol, got 'integer' [255] $ hg log -Ra -r0 -T '{1()}\n' hg: parse error: expected a symbol, got 'integer' [255] Test string literal: $ hg log -Ra -r0 -T '{"string with no template fragment"}\n' string with no template fragment $ hg log -Ra -r0 -T '{"template: {rev}"}\n' template: 0 $ hg log -Ra -r0 -T '{r"rawstring: {rev}"}\n' rawstring: {rev} because map operation requires template, raw string can't be used $ hg log -Ra -r0 -T '{files % r"rawstring"}\n' hg: parse error: expected template specifier [255] Test string escaping: $ hg log -R latesttag -r 0 --template '>\n<>\\n<{if(rev, "[>\n<>\\n<]")}>\n<>\\n<\n' > <>\n<[> <>\n<]> <>\n< $ hg log -R latesttag -r 0 \ > --config ui.logtemplate='>\n<>\\n<{if(rev, "[>\n<>\\n<]")}>\n<>\\n<\n' > <>\n<[> <>\n<]> <>\n< $ hg log -R latesttag -r 0 -T esc \ > --config templates.esc='>\n<>\\n<{if(rev, "[>\n<>\\n<]")}>\n<>\\n<\n' > <>\n<[> <>\n<]> <>\n< $ cat <<'EOF' > esctmpl > changeset = '>\n<>\\n<{if(rev, "[>\n<>\\n<]")}>\n<>\\n<\n' > EOF $ hg log -R latesttag -r 0 --style ./esctmpl > <>\n<[> <>\n<]> <>\n< Test string escaping of quotes: $ hg log -Ra -r0 -T '{"\""}\n' " $ hg log -Ra -r0 -T '{"\\\""}\n' \" $ hg log -Ra -r0 -T '{r"\""}\n' \" $ hg log -Ra -r0 -T '{r"\\\""}\n' \\\" $ hg log -Ra -r0 -T '{"\""}\n' " $ hg log -Ra -r0 -T '{"\\\""}\n' \" $ hg log -Ra -r0 -T '{r"\""}\n' \" $ hg log -Ra -r0 -T '{r"\\\""}\n' \\\" Test exception in quoted template. single backslash before quotation mark is stripped before parsing: $ cat <<'EOF' > escquotetmpl > changeset = "\" \\" \\\" \\\\" {files % \"{file}\"}\n" > EOF $ cd latesttag $ hg log -r 2 --style ../escquotetmpl " \" \" \\" head1 $ hg log -r 2 -T esc --config templates.esc='"{\"valid\"}\n"' valid $ hg log -r 2 -T esc --config templates.esc="'"'{\'"'"'valid\'"'"'}\n'"'" valid Test compatibility with 2.9.2-3.4 of escaped quoted strings in nested _evalifliteral() templates (issue4733): $ hg log -r 2 -T '{if(rev, "\"{rev}")}\n' "2 $ hg log -r 2 -T '{if(rev, "{if(rev, \"\\\"{rev}\")}")}\n' "2 $ hg log -r 2 -T '{if(rev, "{if(rev, \"{if(rev, \\\"\\\\\\\"{rev}\\\")}\")}")}\n' "2 $ hg log -r 2 -T '{if(rev, "\\\"")}\n' \" $ hg log -r 2 -T '{if(rev, "{if(rev, \"\\\\\\\"\")}")}\n' \" $ hg log -r 2 -T '{if(rev, "{if(rev, \"{if(rev, \\\"\\\\\\\\\\\\\\\"\\\")}\")}")}\n' \" $ hg log -r 2 -T '{if(rev, r"\\\"")}\n' \\\" $ hg log -r 2 -T '{if(rev, "{if(rev, r\"\\\\\\\"\")}")}\n' \\\" $ hg log -r 2 -T '{if(rev, "{if(rev, \"{if(rev, r\\\"\\\\\\\\\\\\\\\"\\\")}\")}")}\n' \\\" escaped single quotes and errors: $ hg log -r 2 -T "{if(rev, '{if(rev, \'foo\')}')}"'\n' foo $ hg log -r 2 -T "{if(rev, '{if(rev, r\'foo\')}')}"'\n' foo $ hg log -r 2 -T '{if(rev, "{if(rev, \")}")}\n' hg: parse error at 21: unterminated string [255] $ hg log -r 2 -T '{if(rev, \"\\"")}\n' hg: parse error: trailing \ in string [255] $ hg log -r 2 -T '{if(rev, r\"\\"")}\n' hg: parse error: trailing \ in string [255] $ cd .. Test leading backslashes: $ cd latesttag $ hg log -r 2 -T '\{rev} {files % "\{file}"}\n' {rev} {file} $ hg log -r 2 -T '\\{rev} {files % "\\{file}"}\n' \2 \head1 $ hg log -r 2 -T '\\\{rev} {files % "\\\{file}"}\n' \{rev} \{file} $ cd .. Test leading backslashes in "if" expression (issue4714): $ cd latesttag $ hg log -r 2 -T '{if("1", "\{rev}")} {if("1", r"\{rev}")}\n' {rev} \{rev} $ hg log -r 2 -T '{if("1", "\\{rev}")} {if("1", r"\\{rev}")}\n' \2 \\{rev} $ hg log -r 2 -T '{if("1", "\\\{rev}")} {if("1", r"\\\{rev}")}\n' \{rev} \\\{rev} $ cd .. "string-escape"-ed "\x5c\x786e" becomes r"\x6e" (once) or r"n" (twice) $ hg log -R a -r 0 --template '{if("1", "\x5c\x786e", "NG")}\n' \x6e $ hg log -R a -r 0 --template '{if("1", r"\x5c\x786e", "NG")}\n' \x5c\x786e $ hg log -R a -r 0 --template '{if("", "NG", "\x5c\x786e")}\n' \x6e $ hg log -R a -r 0 --template '{if("", "NG", r"\x5c\x786e")}\n' \x5c\x786e $ hg log -R a -r 2 --template '{ifeq("no perso\x6e", desc, "\x5c\x786e", "NG")}\n' \x6e $ hg log -R a -r 2 --template '{ifeq(r"no perso\x6e", desc, "NG", r"\x5c\x786e")}\n' \x5c\x786e $ hg log -R a -r 2 --template '{ifeq(desc, "no perso\x6e", "\x5c\x786e", "NG")}\n' \x6e $ hg log -R a -r 2 --template '{ifeq(desc, r"no perso\x6e", "NG", r"\x5c\x786e")}\n' \x5c\x786e $ hg log -R a -r 8 --template '{join(files, "\n")}\n' fourth second third $ hg log -R a -r 8 --template '{join(files, r"\n")}\n' fourth\nsecond\nthird $ hg log -R a -r 2 --template '{rstdoc("1st\n\n2nd", "htm\x6c")}'

                        1st

                        2nd

                        $ hg log -R a -r 2 --template '{rstdoc(r"1st\n\n2nd", "html")}'

                        1st\n\n2nd

                        $ hg log -R a -r 2 --template '{rstdoc("1st\n\n2nd", r"htm\x6c")}' 1st 2nd $ hg log -R a -r 2 --template '{strip(desc, "\x6e")}\n' o perso $ hg log -R a -r 2 --template '{strip(desc, r"\x6e")}\n' no person $ hg log -R a -r 2 --template '{strip("no perso\x6e", "\x6e")}\n' o perso $ hg log -R a -r 2 --template '{strip(r"no perso\x6e", r"\x6e")}\n' no perso $ hg log -R a -r 2 --template '{sub("\\x6e", "\x2d", desc)}\n' -o perso- $ hg log -R a -r 2 --template '{sub(r"\\x6e", "-", desc)}\n' no person $ hg log -R a -r 2 --template '{sub("n", r"\x2d", desc)}\n' \x2do perso\x2d $ hg log -R a -r 2 --template '{sub("n", "\x2d", "no perso\x6e")}\n' -o perso- $ hg log -R a -r 2 --template '{sub("n", r"\x2d", r"no perso\x6e")}\n' \x2do perso\x6e $ hg log -R a -r 8 --template '{files % "{file}\n"}' fourth second third Test string escaping in nested expression: $ hg log -R a -r 8 --template '{ifeq(r"\x6e", if("1", "\x5c\x786e"), join(files, "\x5c\x786e"))}\n' fourth\x6esecond\x6ethird $ hg log -R a -r 8 --template '{ifeq(if("1", r"\x6e"), "\x5c\x786e", join(files, "\x5c\x786e"))}\n' fourth\x6esecond\x6ethird $ hg log -R a -r 8 --template '{join(files, ifeq(branch, "default", "\x5c\x786e"))}\n' fourth\x6esecond\x6ethird $ hg log -R a -r 8 --template '{join(files, ifeq(branch, "default", r"\x5c\x786e"))}\n' fourth\x5c\x786esecond\x5c\x786ethird $ hg log -R a -r 3:4 --template '{rev}:{sub(if("1", "\x6e"), ifeq(branch, "foo", r"\x5c\x786e", "\x5c\x786e"), desc)}\n' 3:\x6eo user, \x6eo domai\x6e 4:\x5c\x786eew bra\x5c\x786ech Test quotes in nested expression are evaluated just like a $(command) substitution in POSIX shells: $ hg log -R a -r 8 -T '{"{"{rev}:{node|short}"}"}\n' 8:95c24699272e $ hg log -R a -r 8 -T '{"{"\{{rev}} \"{node|short}\""}"}\n' {8} "95c24699272e" Test recursive evaluation: $ hg init r $ cd r $ echo a > a $ hg ci -Am '{rev}' adding a $ hg log -r 0 --template '{if(rev, desc)}\n' {rev} $ hg log -r 0 --template '{if(rev, "{author} {rev}")}\n' test 0 $ hg branch -q 'text.{rev}' $ echo aa >> aa $ hg ci -u '{node|short}' -m 'desc to be wrapped desc to be wrapped' $ hg log -l1 --template '{fill(desc, "20", author, branch)}' {node|short}desc to text.{rev}be wrapped text.{rev}desc to be text.{rev}wrapped (no-eol) $ hg log -l1 --template '{fill(desc, "20", "{node|short}:", "text.{rev}:")}' bcc7ff960b8e:desc to text.1:be wrapped text.1:desc to be text.1:wrapped (no-eol) $ hg log -l 1 --template '{sub(r"[0-9]", "-", author)}' {node|short} (no-eol) $ hg log -l 1 --template '{sub(r"[0-9]", "-", "{node|short}")}' bcc-ff---b-e (no-eol) $ cat >> .hg/hgrc < [extensions] > color= > [color] > mode=ansi > text.{rev} = red > text.1 = green > EOF $ hg log --color=always -l 1 --template '{label(branch, "text\n")}' \x1b[0;31mtext\x1b[0m (esc) $ hg log --color=always -l 1 --template '{label("text.{rev}", "text\n")}' \x1b[0;32mtext\x1b[0m (esc) Test branches inside if statement: $ hg log -r 0 --template '{if(branches, "yes", "no")}\n' no Test get function: $ hg log -r 0 --template '{get(extras, "branch")}\n' default $ hg log -r 0 --template '{get(files, "should_fail")}\n' hg: parse error: get() expects a dict as first argument [255] Test localdate(date, tz) function: $ TZ=JST-09 hg log -r0 -T '{date|localdate|isodate}\n' 1970-01-01 09:00 +0900 $ TZ=JST-09 hg log -r0 -T '{localdate(date, "UTC")|isodate}\n' 1970-01-01 00:00 +0000 $ TZ=JST-09 hg log -r0 -T '{localdate(date, "+0200")|isodate}\n' 1970-01-01 02:00 +0200 $ TZ=JST-09 hg log -r0 -T '{localdate(date, "0")|isodate}\n' 1970-01-01 00:00 +0000 $ TZ=JST-09 hg log -r0 -T '{localdate(date, 0)|isodate}\n' 1970-01-01 00:00 +0000 $ hg log -r0 -T '{localdate(date, "invalid")|isodate}\n' hg: parse error: localdate expects a timezone [255] $ hg log -r0 -T '{localdate(date, date)|isodate}\n' hg: parse error: localdate expects a timezone [255] Test shortest(node) function: $ echo b > b $ hg ci -qAm b $ hg log --template '{shortest(node)}\n' e777 bcc7 f776 $ hg log --template '{shortest(node, 10)}\n' e777603221 bcc7ff960b f7769ec2ab $ hg log --template '{node|shortest}\n' -l1 e777 Test pad function $ hg log --template '{pad(rev, 20)} {author|user}\n' 2 test 1 {node|short} 0 test $ hg log --template '{pad(rev, 20, " ", True)} {author|user}\n' 2 test 1 {node|short} 0 test $ hg log --template '{pad(rev, 20, "-", False)} {author|user}\n' 2------------------- test 1------------------- {node|short} 0------------------- test Test template string in pad function $ hg log -r 0 -T '{pad("\{{rev}}", 10)} {author|user}\n' {0} test $ hg log -r 0 -T '{pad(r"\{rev}", 10)} {author|user}\n' \{rev} test Test ifcontains function $ hg log --template '{rev} {ifcontains(rev, "2 two 0", "is in the string", "is not")}\n' 2 is in the string 1 is not 0 is in the string $ hg log --template '{rev} {ifcontains("a", file_adds, "added a", "did not add a")}\n' 2 did not add a 1 did not add a 0 added a Test revset function $ hg log --template '{rev} {ifcontains(rev, revset("."), "current rev", "not current rev")}\n' 2 current rev 1 not current rev 0 not current rev $ hg log --template '{rev} {ifcontains(rev, revset(". + .^"), "match rev", "not match rev")}\n' 2 match rev 1 match rev 0 not match rev $ hg log --template '{rev} Parents: {revset("parents(%s)", rev)}\n' 2 Parents: 1 1 Parents: 0 0 Parents: $ cat >> .hg/hgrc < [revsetalias] > myparents(\$1) = parents(\$1) > EOF $ hg log --template '{rev} Parents: {revset("myparents(%s)", rev)}\n' 2 Parents: 1 1 Parents: 0 0 Parents: $ hg log --template 'Rev: {rev}\n{revset("::%s", rev) % "Ancestor: {revision}\n"}\n' Rev: 2 Ancestor: 0 Ancestor: 1 Ancestor: 2 Rev: 1 Ancestor: 0 Ancestor: 1 Rev: 0 Ancestor: 0 $ hg log --template '{revset("TIP"|lower)}\n' -l1 2 a list template is evaluated for each item of revset $ hg log -T '{rev} p: {revset("p1(%s)", rev) % "{rev}:{node|short}"}\n' 2 p: 1:bcc7ff960b8e 1 p: 0:f7769ec2ab97 0 p: therefore, 'revcache' should be recreated for each rev $ hg log -T '{rev} {file_adds}\np {revset("p1(%s)", rev) % "{file_adds}"}\n' 2 aa b p 1 p a 0 a p Test active bookmark templating $ hg book foo $ hg book bar $ hg log --template "{rev} {bookmarks % '{bookmark}{ifeq(bookmark, active, \"*\")} '}\n" 2 bar* foo 1 0 $ hg log --template "{rev} {activebookmark}\n" 2 bar 1 0 $ hg bookmarks --inactive bar $ hg log --template "{rev} {activebookmark}\n" 2 1 0 $ hg book -r1 baz $ hg log --template "{rev} {join(bookmarks, ' ')}\n" 2 bar foo 1 baz 0 $ hg log --template "{rev} {ifcontains('foo', bookmarks, 't', 'f')}\n" 2 t 1 f 0 f Test namespaces dict $ hg log -T '{rev}{namespaces % " {namespace}={join(names, ",")}"}\n' 2 bookmarks=bar,foo tags=tip branches=text.{rev} 1 bookmarks=baz tags= branches=text.{rev} 0 bookmarks= tags= branches=default $ hg log -r2 -T '{namespaces % "{namespace}: {names}\n"}' bookmarks: bar foo tags: tip branches: text.{rev} $ hg log -r2 -T '{namespaces % "{namespace}:\n{names % " {name}\n"}"}' bookmarks: bar foo tags: tip branches: text.{rev} $ hg log -r2 -T '{get(namespaces, "bookmarks") % "{name}\n"}' bar foo Test stringify on sub expressions $ cd .. $ hg log -R a -r 8 --template '{join(files, if("1", if("1", ", ")))}\n' fourth, second, third $ hg log -R a -r 8 --template '{strip(if("1", if("1", "-abc-")), if("1", if("1", "-")))}\n' abc Test splitlines $ hg log -Gv -R a --template "{splitlines(desc) % 'foo {line}\n'}" @ foo Modify, add, remove, rename | o foo future | o foo third | o foo second o foo merge |\ | o foo new head | | o | foo new branch |/ o foo no user, no domain | o foo no person | o foo other 1 | foo other 2 | foo | foo other 3 o foo line 1 foo line 2 Test startswith $ hg log -Gv -R a --template "{startswith(desc)}" hg: parse error: startswith expects two arguments [255] $ hg log -Gv -R a --template "{startswith('line', desc)}" @ | o | o | o o |\ | o | | o | |/ o | o | o | o line 1 line 2 Test bad template with better error message $ hg log -Gv -R a --template '{desc|user()}' hg: parse error: expected a symbol, got 'func' [255] Test word function (including index out of bounds graceful failure) $ hg log -Gv -R a --template "{word('1', desc)}" @ add, | o | o | o o |\ | o head | | o | branch |/ o user, | o person | o 1 | o 1 Test word third parameter used as splitter $ hg log -Gv -R a --template "{word('0', desc, 'o')}" @ M | o future | o third | o sec o merge |\ | o new head | | o | new branch |/ o n | o n | o | o line 1 line 2 Test word error messages for not enough and too many arguments $ hg log -Gv -R a --template "{word('0')}" hg: parse error: word expects two or three arguments, got 1 [255] $ hg log -Gv -R a --template "{word('0', desc, 'o', 'h', 'b', 'o', 'y')}" hg: parse error: word expects two or three arguments, got 7 [255] Test word for integer literal $ hg log -R a --template "{word(2, desc)}\n" -r0 line Test word for invalid numbers $ hg log -Gv -R a --template "{word('a', desc)}" hg: parse error: word expects an integer index [255] Test word for out of range $ hg log -R a --template "{word(10000, desc)}" $ hg log -R a --template "{word(-10000, desc)}" Test indent and not adding to empty lines $ hg log -T "-----\n{indent(desc, '>> ', ' > ')}\n" -r 0:1 -R a ----- > line 1 >> line 2 ----- > other 1 >> other 2 >> other 3 Test with non-strings like dates $ hg log -T "{indent(date, ' ')}\n" -r 2:3 -R a 1200000.00 1300000.00 Test broken string escapes: $ hg log -T "bogus\\" -R a hg: parse error: trailing \ in string [255] $ hg log -T "\\xy" -R a hg: parse error: invalid \x escape [255] Set up repository for non-ascii encoding tests: $ hg init nonascii $ cd nonascii $ python < open('utf-8', 'w').write('\xc3\xa9') > EOF $ HGENCODING=utf-8 hg branch -q `cat utf-8` $ HGENCODING=utf-8 hg ci -qAm 'non-ascii branch' utf-8 json filter should try round-trip conversion to utf-8: $ HGENCODING=ascii hg log -T "{branch|json}\n" -r0 "\u00e9" json filter should not abort if it can't decode bytes: (not sure the current behavior is right; we might want to use utf-8b encoding?) $ HGENCODING=ascii hg log -T "{'`cat utf-8`'|json}\n" -l1 "\ufffd\ufffd" $ cd .. mercurial-3.7.3/tests/test-debugrename.t0000644000175000017500000000056312676531525017715 0ustar mpmmpm00000000000000 $ hg init $ echo a > a $ hg ci -Am t adding a $ hg mv a b $ hg ci -Am t1 $ hg debugrename b b renamed from a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 $ hg mv b a $ hg ci -Am t2 $ hg debugrename a a renamed from b:37d9b5d994eab34eda9c16b195ace52c7b129980 $ hg debugrename --rev 1 b b renamed from a:b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 mercurial-3.7.3/tests/test-convert-darcs.t0000644000175000017500000000520712676531525020211 0ustar mpmmpm00000000000000#require darcs $ echo "[extensions]" >> $HGRCPATH $ echo "convert=" >> $HGRCPATH $ DARCS_EMAIL='test@example.org'; export DARCS_EMAIL initialize darcs repo $ mkdir darcs-repo $ cd darcs-repo $ darcs init $ echo a > a $ darcs record -a -l -m p0 Finished recording patch 'p0' $ cd .. branch and update $ darcs get -q darcs-repo darcs-clone >/dev/null $ cd darcs-clone $ echo c >> a $ echo c > c $ darcs record -a -l -m p1.1 Finished recording patch 'p1.1' $ cd .. skip if we can't import elementtree $ if hg convert darcs-repo darcs-dummy 2>&1 | grep ElementTree > /dev/null; then > echo 'skipped: missing feature: elementtree module' > exit 80 > fi update source $ cd darcs-repo $ echo b >> a $ echo b > b $ darcs record -a -l -m p1.2 Finished recording patch 'p1.2' $ darcs pull -q -a --no-set-default ../darcs-clone Backing up ./a(*) (glob) We have conflicts in the following files: ./a $ sleep 1 $ echo e > a $ echo f > f $ mkdir dir $ echo d > dir/d $ echo d > dir/d2 $ darcs record -a -l -m p2 Finished recording patch 'p2' test file and directory move $ darcs mv f ff Test remove + move $ darcs remove dir/d2 $ rm dir/d2 $ darcs mv dir dir2 $ darcs record -a -l -m p3 Finished recording patch 'p3' The converter does not currently handle patch conflicts very well. When they occur, it reverts *all* changes and moves forward, letting the conflict resolving patch fix collisions. Unfortunately, non-conflicting changes, like the addition of the "c" file in p1.1 patch are reverted too. Just to say that manifest not listing "c" here is a bug. $ cd .. $ hg convert darcs-repo darcs-repo-hg initializing destination darcs-repo-hg repository scanning source... sorting... converting... 4 p0 3 p1.2 2 p1.1 1 p2 0 p3 $ hg log -R darcs-repo-hg -g --template '{rev} "{desc|firstline}" ({author}) files: {files}\n' "$@" 4 "p3" (test@example.org) files: dir/d dir/d2 dir2/d f ff 3 "p2" (test@example.org) files: a dir/d dir/d2 f 2 "p1.1" (test@example.org) files: 1 "p1.2" (test@example.org) files: a b 0 "p0" (test@example.org) files: a $ hg up -q -R darcs-repo-hg $ hg -R darcs-repo-hg manifest --debug 7225b30cdf38257d5cc7780772c051b6f33e6d6b 644 a 1e88685f5ddec574a34c70af492f95b6debc8741 644 b 37406831adc447ec2385014019599dfec953c806 644 dir2/d b783a337463792a5c7d548ad85a7d3253c16ba8c 644 ff #if no-outer-repo try converting darcs1 repository $ hg clone -q "$TESTDIR/bundles/darcs1.hg" darcs $ hg convert -s darcs darcs/darcs1 2>&1 | grep darcs-1.0 darcs-1.0 repository format is unsupported, please upgrade #endif mercurial-3.7.3/tests/test-subrepo-svn.t0000644000175000017500000004012312676531525017716 0ustar mpmmpm00000000000000#require svn15 $ SVNREPOPATH=`pwd`/svn-repo #if windows $ SVNREPOURL=file:///`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` #else $ SVNREPOURL=file://`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` #endif $ filter_svn_output () { > egrep -v 'Committing|Transmitting|Updating' || true > } create subversion repo $ WCROOT="`pwd`/svn-wc" $ svnadmin create svn-repo $ svn co "$SVNREPOURL" svn-wc Checked out revision 0. $ cd svn-wc $ mkdir src $ echo alpha > src/alpha $ svn add src A src A src/alpha (glob) $ mkdir externals $ echo other > externals/other $ svn add externals A externals A externals/other (glob) $ svn ci -qm 'Add alpha' $ svn up -q $ echo "externals -r1 $SVNREPOURL/externals" > extdef $ svn propset -F extdef svn:externals src property 'svn:externals' set on 'src' $ svn ci -qm 'Setting externals' $ cd .. create hg repo $ mkdir sub $ cd sub $ hg init t $ cd t first revision, no sub $ echo a > a $ hg ci -Am0 adding a add first svn sub with leading whitespaces $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub $ echo "subdir/s = [svn] $SVNREPOURL/src" >> .hgsub $ svn co --quiet "$SVNREPOURL"/src s $ mkdir subdir $ svn co --quiet "$SVNREPOURL"/src subdir/s $ hg add .hgsub $ hg ci -m1 make sure we avoid empty commits (issue2445) $ hg sum parent: 1:* tip (glob) 1 branch: default commit: (clean) update: (current) phases: 2 draft $ hg ci -moops nothing changed [1] debugsub $ hg debugsub path s source file://*/svn-repo/src (glob) revision 2 path subdir/s source file://*/svn-repo/src (glob) revision 2 change file in svn and hg, commit $ echo a >> a $ echo alpha >> s/alpha $ hg sum parent: 1:* tip (glob) 1 branch: default commit: 1 modified, 1 subrepos update: (current) phases: 2 draft $ hg commit --subrepos -m 'Message!' | filter_svn_output committing subrepository s Sending*s/alpha (glob) Committed revision 3. Fetching external item into '*s/externals'* (glob) External at revision 1. At revision 3. $ hg debugsub path s source file://*/svn-repo/src (glob) revision 3 path subdir/s source file://*/svn-repo/src (glob) revision 2 missing svn file, commit should fail $ rm s/alpha $ hg commit --subrepos -m 'abort on missing file' committing subrepository s abort: cannot commit missing svn entries (in subrepo s) [255] $ svn revert s/alpha > /dev/null add an unrelated revision in svn and update the subrepo to without bringing any changes. $ svn mkdir "$SVNREPOURL/unrelated" -qm 'create unrelated' $ svn up -q s $ hg sum parent: 2:* tip (glob) Message! branch: default commit: (clean) update: (current) phases: 3 draft $ echo a > s/a should be empty despite change to s/a $ hg st add a commit from svn $ cd "$WCROOT/src" $ svn up -q $ echo xyz >> alpha $ svn propset svn:mime-type 'text/xml' alpha property 'svn:mime-type' set on 'alpha' $ svn ci -qm 'amend a from svn' $ cd ../../sub/t this commit from hg will fail $ echo zzz >> s/alpha $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date' committing subrepository s abort: svn:*Commit failed (details follow): (glob) [255] $ svn revert -q s/alpha this commit fails because of meta changes $ svn propset svn:mime-type 'text/html' s/alpha property 'svn:mime-type' set on 's/alpha' (glob) $ (hg ci --subrepos -m 'amend alpha from hg' 2>&1; echo "[$?]") | grep -vi 'out of date' committing subrepository s abort: svn:*Commit failed (details follow): (glob) [255] $ svn revert -q s/alpha this commit fails because of externals changes $ echo zzz > s/externals/other $ hg ci --subrepos -m 'amend externals from hg' committing subrepository s abort: cannot commit svn externals (in subrepo s) [255] $ hg diff --subrepos -r 1:2 | grep -v diff --- a/.hgsubstate Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgsubstate Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +1,2 @@ -2 s +3 s 2 subdir/s --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,2 @@ a +a $ svn revert -q s/externals/other this commit fails because of externals meta changes $ svn propset svn:mime-type 'text/html' s/externals/other property 'svn:mime-type' set on 's/externals/other' (glob) $ hg ci --subrepos -m 'amend externals from hg' committing subrepository s abort: cannot commit svn externals (in subrepo s) [255] $ svn revert -q s/externals/other clone $ cd .. $ hg clone t tc updating to branch default A tc/s/alpha (glob) U tc/s (glob) Fetching external item into 'tc/s/externals'* (glob) A tc/s/externals/other (glob) Checked out external at revision 1. Checked out revision 3. A tc/subdir/s/alpha (glob) U tc/subdir/s (glob) Fetching external item into 'tc/subdir/s/externals'* (glob) A tc/subdir/s/externals/other (glob) Checked out external at revision 1. Checked out revision 2. 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd tc debugsub in clone $ hg debugsub path s source file://*/svn-repo/src (glob) revision 3 path subdir/s source file://*/svn-repo/src (glob) revision 2 verify subrepo is contained within the repo directory $ $PYTHON -c "import os.path; print os.path.exists('s')" True update to nullrev (must delete the subrepo) $ hg up null 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ ls Check hg update --clean $ cd "$TESTTMP/sub/t" $ cd s $ echo c0 > alpha $ echo c1 > f1 $ echo c1 > f2 $ svn add f1 -q $ svn status | sort ? * a (glob) ? * f2 (glob) A * f1 (glob) M * alpha (glob) Performing status on external item at 'externals'* (glob) X * externals (glob) $ cd ../.. $ hg -R t update -C Fetching external item into 't/s/externals'* (glob) Checked out external at revision 1. Checked out revision 3. 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd t/s $ svn status | sort ? * a (glob) ? * f1 (glob) ? * f2 (glob) Performing status on external item at 'externals'* (glob) X * externals (glob) Sticky subrepositories, no changes $ cd "$TESTTMP/sub/t" $ hg id -n 2 $ cd s $ svnversion 3 $ cd .. $ hg update 1 U *s/alpha (glob) Fetching external item into '*s/externals'* (glob) Checked out external at revision 1. Checked out revision 2. 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -n 1 $ cd s $ svnversion 2 $ cd .. Sticky subrepositories, file changes $ touch s/f1 $ cd s $ svn add f1 A f1 $ cd .. $ hg id -n 1+ $ cd s $ svnversion 2M $ cd .. $ hg update tip subrepository s diverged (local revision: 2, remote revision: 3) (M)erge, keep (l)ocal or keep (r)emote? m subrepository sources for s differ use (l)ocal source (2) or (r)emote source (3)? l 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -n 2+ $ cd s $ svnversion 2M $ cd .. $ hg update --clean tip U *s/alpha (glob) Fetching external item into '*s/externals'* (glob) Checked out external at revision 1. Checked out revision 3. 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Sticky subrepository, revision updates $ hg id -n 2 $ cd s $ svnversion 3 $ cd .. $ cd s $ svn update -qr 1 $ cd .. $ hg update 1 subrepository s diverged (local revision: 3, remote revision: 2) (M)erge, keep (l)ocal or keep (r)emote? m subrepository sources for s differ (in checked out version) use (l)ocal source (1) or (r)emote source (2)? l 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -n 1+ $ cd s $ svnversion 1 $ cd .. Sticky subrepository, file changes and revision updates $ touch s/f1 $ cd s $ svn add f1 A f1 $ svnversion 1M $ cd .. $ hg id -n 1+ $ hg update tip subrepository s diverged (local revision: 3, remote revision: 3) (M)erge, keep (l)ocal or keep (r)emote? m subrepository sources for s differ use (l)ocal source (1) or (r)emote source (3)? l 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -n 2+ $ cd s $ svnversion 1M $ cd .. Sticky repository, update --clean $ hg update --clean tip | grep -v 's[/\]externals[/\]other' U *s/alpha (glob) U *s (glob) Fetching external item into '*s/externals'* (glob) Checked out external at revision 1. Checked out revision 3. 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -n 2 $ cd s $ svnversion 3 $ cd .. Test subrepo already at intended revision: $ cd s $ svn update -qr 2 $ cd .. $ hg update 1 subrepository s diverged (local revision: 3, remote revision: 2) (M)erge, keep (l)ocal or keep (r)emote? m 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -n 1+ $ cd s $ svnversion 2 $ cd .. Test case where subversion would fail to update the subrepo because there are unknown directories being replaced by tracked ones (happens with rebase). $ cd "$WCROOT/src" $ mkdir dir $ echo epsilon.py > dir/epsilon.py $ svn add dir A dir A dir/epsilon.py (glob) $ svn ci -qm 'Add dir/epsilon.py' $ cd ../.. $ hg init rebaserepo $ cd rebaserepo $ svn co -r5 --quiet "$SVNREPOURL"/src s $ echo "s = [svn] $SVNREPOURL/src" >> .hgsub $ hg add .hgsub $ hg ci -m addsub $ echo a > a $ hg add . adding a $ hg ci -m adda $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ svn up -qr6 s $ hg ci -m updatesub created new head $ echo pyc > s/dir/epsilon.pyc $ hg up 1 D *s/dir (glob) Fetching external item into '*s/externals'* (glob) Checked out external at revision 1. Checked out revision 5. 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg up -q 2 Modify one of the externals to point to a different path so we can test having obstructions when switching branches on checkout: $ hg checkout tip 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "obstruct = [svn] $SVNREPOURL/externals" >> .hgsub $ svn co -r5 --quiet "$SVNREPOURL"/externals obstruct $ hg commit -m 'Start making obstructed working copy' $ hg book other $ hg co -r 'p1(tip)' 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (leaving bookmark other) $ echo "obstruct = [svn] $SVNREPOURL/src" >> .hgsub $ svn co -r5 --quiet "$SVNREPOURL"/src obstruct $ hg commit -m 'Other branch which will be obstructed' created new head Switching back to the head where we have another path mapped to the same subrepo should work if the subrepo is clean. $ hg co other A *obstruct/other (glob) Checked out revision 1. 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (activating bookmark other) This is surprising, but is also correct based on the current code: $ echo "updating should (maybe) fail" > obstruct/other $ hg co tip abort: uncommitted changes (commit or update --clean to discard changes) [255] Point to a Subversion branch which has since been deleted and recreated First, create that condition in the repository. $ hg ci --subrepos -m cleanup | filter_svn_output committing subrepository obstruct Sending obstruct/other (glob) Committed revision 7. At revision 7. $ svn mkdir -qm "baseline" $SVNREPOURL/trunk $ svn copy -qm "initial branch" $SVNREPOURL/trunk $SVNREPOURL/branch $ svn co --quiet "$SVNREPOURL"/branch tempwc $ cd tempwc $ echo "something old" > somethingold $ svn add somethingold A somethingold $ svn ci -qm 'Something old' $ svn rm -qm "remove branch" $SVNREPOURL/branch $ svn copy -qm "recreate branch" $SVNREPOURL/trunk $SVNREPOURL/branch $ svn up -q $ echo "something new" > somethingnew $ svn add somethingnew A somethingnew $ svn ci -qm 'Something new' $ cd .. $ rm -rf tempwc $ svn co "$SVNREPOURL/branch"@10 recreated A recreated/somethingold (glob) Checked out revision 10. $ echo "recreated = [svn] $SVNREPOURL/branch" >> .hgsub $ hg ci -m addsub $ cd recreated $ svn up -q $ cd .. $ hg ci -m updatesub $ hg up -r-2 D *recreated/somethingnew (glob) A *recreated/somethingold (glob) Checked out revision 10. 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (leaving bookmark other) $ test -f recreated/somethingold Test archive $ hg archive -S ../archive-all --debug --config progress.debug=true archiving: 0/2 files (0.00%) archiving: .hgsub 1/2 files (50.00%) archiving: .hgsubstate 2/2 files (100.00%) archiving (obstruct): 0/1 files (0.00%) archiving (obstruct): 1/1 files (100.00%) archiving (recreated): 0/1 files (0.00%) archiving (recreated): 1/1 files (100.00%) archiving (s): 0/2 files (0.00%) archiving (s): 1/2 files (50.00%) archiving (s): 2/2 files (100.00%) $ hg archive -S ../archive-exclude --debug --config progress.debug=true -X **old archiving: 0/2 files (0.00%) archiving: .hgsub 1/2 files (50.00%) archiving: .hgsubstate 2/2 files (100.00%) archiving (obstruct): 0/1 files (0.00%) archiving (obstruct): 1/1 files (100.00%) archiving (recreated): 0 files archiving (s): 0/2 files (0.00%) archiving (s): 1/2 files (50.00%) archiving (s): 2/2 files (100.00%) $ find ../archive-exclude | sort ../archive-exclude ../archive-exclude/.hg_archival.txt ../archive-exclude/.hgsub ../archive-exclude/.hgsubstate ../archive-exclude/obstruct ../archive-exclude/obstruct/other ../archive-exclude/s ../archive-exclude/s/alpha ../archive-exclude/s/dir ../archive-exclude/s/dir/epsilon.py Test forgetting files, not implemented in svn subrepo, used to traceback #if no-windows $ hg forget 'notafile*' notafile*: No such file or directory [1] #else $ hg forget 'notafile' notafile: * (glob) [1] #endif Test a subrepo referencing a just moved svn path. Last commit rev will be different from the revision, and the path will be different as well. $ cd "$WCROOT" $ svn up > /dev/null $ mkdir trunk/subdir branches $ echo a > trunk/subdir/a $ svn add trunk/subdir branches A trunk/subdir (glob) A trunk/subdir/a (glob) A branches $ svn ci -qm addsubdir $ svn cp -qm branchtrunk $SVNREPOURL/trunk $SVNREPOURL/branches/somebranch $ cd .. $ hg init repo2 $ cd repo2 $ svn co $SVNREPOURL/branches/somebranch/subdir A subdir/a (glob) Checked out revision 15. $ echo "subdir = [svn] $SVNREPOURL/branches/somebranch/subdir" > .hgsub $ hg add .hgsub $ hg ci -m addsub $ hg up null 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg up A *subdir/a (glob) Checked out revision 15. 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. Test sanitizing ".hg/hgrc" in subrepo $ cd sub/t $ hg update -q -C tip $ cd s $ mkdir .hg $ echo '.hg/hgrc in svn repo' > .hg/hgrc $ mkdir -p sub/.hg $ echo 'sub/.hg/hgrc in svn repo' > sub/.hg/hgrc $ svn add .hg sub A .hg A .hg/hgrc (glob) A sub A sub/.hg (glob) A sub/.hg/hgrc (glob) $ svn ci -qm 'add .hg/hgrc to be sanitized at hg update' $ svn up -q $ cd .. $ hg commit -S -m 'commit with svn revision including .hg/hgrc' $ grep ' s$' .hgsubstate 16 s $ cd .. $ hg -R tc pull -u -q 2>&1 | sort warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/.hg' (glob) warning: removing potentially hostile 'hgrc' in '$TESTTMP/sub/tc/s/sub/.hg' (glob) $ cd tc $ grep ' s$' .hgsubstate 16 s $ test -f s/.hg/hgrc [1] $ test -f s/sub/.hg/hgrc [1] Test that sanitizing is omitted in meta data area: $ mkdir s/.svn/.hg $ echo '.hg/hgrc in svn metadata area' > s/.svn/.hg/hgrc $ hg update -q -C '.^1' $ cd ../.. mercurial-3.7.3/tests/test-context.py.out0000644000175000017500000000665312676531525020124 0ustar mpmmpm00000000000000workingfilectx.date = (1000, 0) ASCII : Gr?ezi! Latin-1 : Grüezi! UTF-8 : Grüezi! diff --git a/foo b/foo --- a/foo +++ b/foo @@ -1,1 +1,2 @@ foo +bar = checking context.status(): == checking workingctx.status: wctx._status= === with "pattern match": wctx._status= wctx._status= === with "always match" and "listclean=True": wctx._status= wctx._status= == checking workingcommitctx.status: wcctx._status= === with "always match": wcctx._status= wcctx._status= === with "always match" and "listclean=True": wcctx._status= wcctx._status= === with "pattern match": wcctx._status= wcctx._status= === with "pattern match" and "listclean=True": wcctx._status= wcctx._status= mercurial-3.7.3/tests/test-diff-newlines.t0000644000175000017500000000065112676531525020167 0ustar mpmmpm00000000000000 $ hg init $ $PYTHON -c 'file("a", "wb").write("confuse str.splitlines\nembedded\rnewline\n")' $ hg ci -Ama -d '1 0' adding a $ echo clean diff >> a $ hg ci -mb -d '2 0' $ hg diff -r0 -r1 diff -r 107ba6f817b5 -r 310ce7989cdc a --- a/a Thu Jan 01 00:00:01 1970 +0000 +++ b/a Thu Jan 01 00:00:02 1970 +0000 @@ -1,2 +1,3 @@ confuse str.splitlines embedded\r (no-eol) (esc) newline +clean diff mercurial-3.7.3/tests/test-convert-bzr-ghosts.t0000644000175000017500000000170312676531525021214 0ustar mpmmpm00000000000000#require bzr $ . "$TESTDIR/bzr-definitions" $ cat > ghostcreator.py < import sys > from bzrlib import workingtree > wt = workingtree.WorkingTree.open('.') > > message, ghostrev = sys.argv[1:] > wt.set_parent_ids(wt.get_parent_ids() + [ghostrev]) > wt.commit(message) > EOF ghost revisions $ mkdir test-ghost-revisions $ cd test-ghost-revisions $ bzr init -q source $ cd source $ echo content > somefile $ bzr add -q somefile $ bzr commit -q -m 'Initial layout setup' $ echo morecontent >> somefile $ python ../../ghostcreator.py 'Commit with ghost revision' ghostrev $ cd .. $ hg convert source source-hg initializing destination source-hg repository scanning source... sorting... converting... 1 Initial layout setup 0 Commit with ghost revision $ glog -R source-hg o 1@source "Commit with ghost revision" files: somefile | o 0@source "Initial layout setup" files: somefile $ cd .. mercurial-3.7.3/tests/test-bookmarks-current.t0000644000175000017500000001054212676531525021105 0ustar mpmmpm00000000000000 $ hg init no bookmarks $ hg bookmarks no bookmarks set set bookmark X $ hg bookmark X list bookmarks $ hg bookmark * X -1:000000000000 list bookmarks with color $ hg --config extensions.color= --config color.mode=ansi \ > bookmark --color=always \x1b[0;32m * \x1b[0m\x1b[0;32mX\x1b[0m\x1b[0;32m -1:000000000000\x1b[0m (esc) update to bookmark X $ hg update X 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (activating bookmark X) list bookmarks $ hg bookmarks * X -1:000000000000 rename $ hg bookmark -m X Z list bookmarks $ cat .hg/bookmarks.current Z (no-eol) $ cat .hg/bookmarks 0000000000000000000000000000000000000000 Z $ hg bookmarks * Z -1:000000000000 new bookmarks X and Y, first one made active $ hg bookmark Y X list bookmarks $ hg bookmark X -1:000000000000 * Y -1:000000000000 Z -1:000000000000 $ hg bookmark -d X commit $ echo 'b' > b $ hg add b $ hg commit -m'test' list bookmarks $ hg bookmark * Y 0:719295282060 Z -1:000000000000 Verify that switching to Z updates the active bookmark: $ hg update Z 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (activating bookmark Z) $ hg bookmark Y 0:719295282060 * Z -1:000000000000 Switch back to Y for the remaining tests in this file: $ hg update Y 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (activating bookmark Y) delete bookmarks $ hg bookmark -d Y $ hg bookmark -d Z list bookmarks $ hg bookmark no bookmarks set update to tip $ hg update tip 0 files updated, 0 files merged, 0 files removed, 0 files unresolved set bookmark Y using -r . but make sure that the active bookmark is not activated $ hg bookmark -r . Y list bookmarks, Y should not be active $ hg bookmark Y 0:719295282060 now, activate Y $ hg up -q Y set bookmark Z using -i $ hg bookmark -r . -i Z $ hg bookmarks * Y 0:719295282060 Z 0:719295282060 deactivate active bookmark using -i $ hg bookmark -i Y $ hg bookmarks Y 0:719295282060 Z 0:719295282060 $ hg up -q Y $ hg bookmark -i $ hg bookmarks Y 0:719295282060 Z 0:719295282060 $ hg bookmark -i no active bookmark $ hg up -q Y $ hg bookmarks * Y 0:719295282060 Z 0:719295282060 deactivate active bookmark while renaming $ hg bookmark -i -m Y X $ hg bookmarks X 0:719295282060 Z 0:719295282060 bare update moves the active bookmark forward and clear the divergent bookmarks $ echo a > a $ hg ci -Am1 adding a $ echo b >> a $ hg ci -Am2 $ hg bookmark X@1 -r 1 $ hg bookmark X@2 -r 2 $ hg update X 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (activating bookmark X) $ hg bookmarks * X 0:719295282060 X@1 1:cc586d725fbe X@2 2:49e1c4e84c58 Z 0:719295282060 $ hg update 1 files updated, 0 files merged, 0 files removed, 0 files unresolved updating bookmark X $ hg bookmarks * X 2:49e1c4e84c58 Z 0:719295282060 test deleting .hg/bookmarks.current when explicitly updating to a revision $ echo a >> b $ hg ci -m. $ hg up -q X $ test -f .hg/bookmarks.current try to update to it again to make sure we don't set and then unset it $ hg up -q X $ test -f .hg/bookmarks.current $ hg up -q 1 $ test -f .hg/bookmarks.current [1] when a bookmark is active, hg up -r . is analogous to hg book -i $ hg up -q X $ hg up -q . $ test -f .hg/bookmarks.current [1] issue 4552 -- simulate a pull moving the active bookmark $ hg up -q X $ printf "Z" > .hg/bookmarks.current $ hg log -T '{activebookmark}\n' -r Z Z $ hg log -T '{bookmarks % "{active}\n"}' -r Z Z mercurial-3.7.3/tests/test-branch-option.t0000644000175000017500000000600512676531525020177 0ustar mpmmpm00000000000000test branch selection options $ hg init branch $ cd branch $ hg branch a marked working directory as branch a (branches are permanent and global, did you want a bookmark?) $ echo a > foo $ hg ci -d '0 0' -Ama adding foo $ echo a2 > foo $ hg ci -d '0 0' -ma2 $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch c marked working directory as branch c $ echo c > foo $ hg ci -d '0 0' -mc $ hg tag -l z $ cd .. $ hg clone -r 0 branch branch2 adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch a 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd branch2 $ hg up 0 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch b marked working directory as branch b $ echo b > foo $ hg ci -d '0 0' -mb $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --encoding utf-8 branch æ marked working directory as branch \xc3\xa6 (esc) $ echo ae1 > foo $ hg ci -d '0 0' -mae1 $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --encoding utf-8 branch -f æ marked working directory as branch \xc3\xa6 (esc) $ echo ae2 > foo $ hg ci -d '0 0' -mae2 created new head $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch -f b marked working directory as branch b $ echo b2 > foo $ hg ci -d '0 0' -mb2 created new head unknown branch and fallback $ hg in -qbz abort: unknown branch 'z'! [255] $ hg in -q ../branch#z 2:f25d57ab0566 $ hg out -qbz abort: unknown branch 'z'! [255] in rev c branch a $ hg in -qr c ../branch#a 1:dd6e60a716c6 2:f25d57ab0566 $ hg in -qr c -b a 1:dd6e60a716c6 2:f25d57ab0566 out branch . $ hg out -q ../branch#. 1:b84708d77ab7 4:65511d0e2b55 $ hg out -q -b . 1:b84708d77ab7 4:65511d0e2b55 out branch . non-ascii $ hg --encoding utf-8 up æ 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --encoding latin1 out -q ../branch#. 2:df5a44224d4e 3:4f4a5125ca10 $ hg --encoding latin1 out -q -b . 2:df5a44224d4e 3:4f4a5125ca10 clone branch b $ cd .. $ hg clone branch2#b branch3 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files (+1 heads) updating to branch b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -q -R branch3 heads b 2:65511d0e2b55 1:b84708d77ab7 $ hg -q -R branch3 parents 2:65511d0e2b55 $ rm -rf branch3 clone rev a branch b $ hg clone -r a branch2#b branch3 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files (+1 heads) updating to branch a 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -q -R branch3 heads b 2:65511d0e2b55 1:b84708d77ab7 $ hg -q -R branch3 parents 0:5b65ba7c951d $ rm -rf branch3 mercurial-3.7.3/tests/test-merge-revert.t0000644000175000017500000000251012676531525020035 0ustar mpmmpm00000000000000 $ hg init $ echo "added file1" > file1 $ echo "added file2" > file2 $ hg add file1 file2 $ hg commit -m "added file1 and file2" $ echo "changed file1" >> file1 $ hg commit -m "changed file1" $ hg -q log 1:08a16e8e4408 0:d29c767a4b52 $ hg id 08a16e8e4408 tip $ hg update -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id d29c767a4b52 $ echo "changed file1" >> file1 $ hg id d29c767a4b52+ $ hg revert --all reverting file1 $ hg diff $ hg status ? file1.orig $ hg id d29c767a4b52 $ hg update 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg diff $ hg status ? file1.orig $ hg id 08a16e8e4408 tip $ hg update -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "changed file1" >> file1 $ hg update 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg diff $ hg status ? file1.orig $ hg id 08a16e8e4408 tip $ hg revert --all $ hg diff $ hg status ? file1.orig $ hg id 08a16e8e4408 tip $ hg revert -r tip --all $ hg diff $ hg status ? file1.orig $ hg id 08a16e8e4408 tip $ hg update -C 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg diff $ hg status ? file1.orig $ hg id 08a16e8e4408 tip mercurial-3.7.3/tests/test-backwards-remove.t0000644000175000017500000000043012676531525020664 0ustar mpmmpm00000000000000 $ hg init $ echo This is file a1 > a $ hg add a $ hg commit -m "commit #0" $ ls a $ echo This is file b1 > b $ hg add b $ hg commit -m "commit #1" $ hg co 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved B should disappear $ ls a mercurial-3.7.3/tests/test-merge-criss-cross.t0000644000175000017500000002352312676531525021007 0ustar mpmmpm00000000000000Criss cross merging $ hg init criss-cross $ cd criss-cross $ echo '0 base' > f1 $ echo '0 base' > f2 $ hg ci -Aqm '0 base' $ echo '1 first change' > f1 $ hg ci -m '1 first change f1' $ hg up -qr0 $ echo '2 first change' > f2 $ hg ci -qm '2 first change f2' $ hg merge -qr 1 $ hg ci -m '3 merge' $ hg up -qr2 $ hg merge -qr1 $ hg ci -qm '4 merge' $ echo '5 second change' > f1 $ hg ci -m '5 second change f1' $ hg up -r3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo '6 second change' > f2 $ hg ci -m '6 second change f2' $ hg log -G @ changeset: 6:3b08d01b0ab5 | tag: tip | parent: 3:cf89f02107e5 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: 6 second change f2 | | o changeset: 5:adfe50279922 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: 5 second change f1 | | | o changeset: 4:7d3e55501ae6 | |\ parent: 2:40663881a6dd | | | parent: 1:0f6b37dbe527 | | | user: test | | | date: Thu Jan 01 00:00:00 1970 +0000 | | | summary: 4 merge | | | o---+ changeset: 3:cf89f02107e5 | | | parent: 2:40663881a6dd |/ / parent: 1:0f6b37dbe527 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: 3 merge | | | o changeset: 2:40663881a6dd | | parent: 0:40494bf2444c | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: 2 first change f2 | | o | changeset: 1:0f6b37dbe527 |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: 1 first change f1 | o changeset: 0:40494bf2444c user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 0 base $ hg merge -v --debug --tool internal:dump 5 --config merge.preferancestor='!' note: using 0f6b37dbe527 as ancestor of 3b08d01b0ab5 and adfe50279922 alternatively, use --config merge.preferancestor=40663881a6dd searching for copies back to rev 3 resolving manifests branchmerge: True, force: False, partial: False ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922 preserving f2 for resolve of f2 f1: remote is newer -> g getting f1 f2: versions differ -> m (premerge) picked tool ':dump' for f2 (binary False symlink False changedelete False) merging f2 my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@40494bf2444c f2: versions differ -> m (merge) picked tool ':dump' for f2 (binary False symlink False changedelete False) my f2@3b08d01b0ab5+ other f2@adfe50279922 ancestor f2@40494bf2444c 1 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ head * ==> f1 <== 5 second change ==> f2 <== 6 second change ==> f2.base <== 0 base ==> f2.local <== 6 second change ==> f2.orig <== 6 second change ==> f2.other <== 2 first change $ hg up -qC . $ hg merge -v --tool internal:dump 5 --config merge.preferancestor="null 40663881 3b08d" note: using 40663881a6dd as ancestor of 3b08d01b0ab5 and adfe50279922 alternatively, use --config merge.preferancestor=0f6b37dbe527 resolving manifests merging f1 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] Redo merge with merge.preferancestor="*" to enable bid merge $ rm f* $ hg up -qC . $ hg merge -v --debug --tool internal:dump 5 --config merge.preferancestor="*" note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd calculating bids for ancestor 0f6b37dbe527 searching for copies back to rev 3 resolving manifests branchmerge: True, force: False, partial: False ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922 f1: remote is newer -> g f2: versions differ -> m calculating bids for ancestor 40663881a6dd searching for copies back to rev 3 resolving manifests branchmerge: True, force: False, partial: False ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922 f1: versions differ -> m f2: remote unchanged -> k auction for merging merge bids f1: picking 'get' action f2: picking 'keep' action end of auction f1: remote is newer -> g getting f1 f2: remote unchanged -> k 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ head * ==> f1 <== 5 second change ==> f2 <== 6 second change The other way around: $ hg up -C -r5 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge -v --debug --config merge.preferancestor="*" note: merging adfe50279922+ and 3b08d01b0ab5 using bids from ancestors 0f6b37dbe527 and 40663881a6dd calculating bids for ancestor 0f6b37dbe527 searching for copies back to rev 3 resolving manifests branchmerge: True, force: False, partial: False ancestor: 0f6b37dbe527, local: adfe50279922+, remote: 3b08d01b0ab5 f1: remote unchanged -> k f2: versions differ -> m calculating bids for ancestor 40663881a6dd searching for copies back to rev 3 resolving manifests branchmerge: True, force: False, partial: False ancestor: 40663881a6dd, local: adfe50279922+, remote: 3b08d01b0ab5 f1: versions differ -> m f2: remote is newer -> g auction for merging merge bids f1: picking 'keep' action f2: picking 'get' action end of auction f2: remote is newer -> g getting f2 f1: remote unchanged -> k 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ head * ==> f1 <== 5 second change ==> f2 <== 6 second change Verify how the output looks and and how verbose it is: $ hg up -qC $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg up -qC $ hg merge -v note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd calculating bids for ancestor 0f6b37dbe527 resolving manifests calculating bids for ancestor 40663881a6dd resolving manifests auction for merging merge bids f1: picking 'get' action f2: picking 'keep' action end of auction getting f1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg up -qC $ hg merge -v --debug --config merge.preferancestor="*" note: merging 3b08d01b0ab5+ and adfe50279922 using bids from ancestors 0f6b37dbe527 and 40663881a6dd calculating bids for ancestor 0f6b37dbe527 searching for copies back to rev 3 resolving manifests branchmerge: True, force: False, partial: False ancestor: 0f6b37dbe527, local: 3b08d01b0ab5+, remote: adfe50279922 f1: remote is newer -> g f2: versions differ -> m calculating bids for ancestor 40663881a6dd searching for copies back to rev 3 resolving manifests branchmerge: True, force: False, partial: False ancestor: 40663881a6dd, local: 3b08d01b0ab5+, remote: adfe50279922 f1: versions differ -> m f2: remote unchanged -> k auction for merging merge bids f1: picking 'get' action f2: picking 'keep' action end of auction f1: remote is newer -> g getting f1 f2: remote unchanged -> k 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cd .. http://stackoverflow.com/questions/9350005/how-do-i-specify-a-merge-base-to-use-in-a-hg-merge/9430810 $ hg init ancestor-merging $ cd ancestor-merging $ echo a > x $ hg commit -A -m a x $ hg update -q 0 $ echo b >> x $ hg commit -m b $ hg update -q 0 $ echo c >> x $ hg commit -qm c $ hg update -q 1 $ hg merge -q --tool internal:local 2 $ echo c >> x $ hg commit -m bc $ hg update -q 2 $ hg merge -q --tool internal:local 1 $ echo b >> x $ hg commit -qm cb $ hg merge --config merge.preferancestor='!' note: using 70008a2163f6 as ancestor of 0d355fdef312 and 4b8b546a3eef alternatively, use --config merge.preferancestor=b211bbc6eb3c merging x 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat x a c b c $ hg up -qC . $ hg merge --config merge.preferancestor=b211bbc6eb3c note: using b211bbc6eb3c as ancestor of 0d355fdef312 and 4b8b546a3eef alternatively, use --config merge.preferancestor=70008a2163f6 merging x 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat x a b c b $ hg up -qC . $ hg merge -v --config merge.preferancestor="*" note: merging 0d355fdef312+ and 4b8b546a3eef using bids from ancestors 70008a2163f6 and b211bbc6eb3c calculating bids for ancestor 70008a2163f6 resolving manifests calculating bids for ancestor b211bbc6eb3c resolving manifests auction for merging merge bids x: multiple bids for merge action: versions differ -> m versions differ -> m x: ambiguous merge - picked m action end of auction merging x 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat x a c b c Verify that the old context ancestor works with / despite preferancestor: $ hg log -r 'ancestor(head())' --config merge.preferancestor=1 -T '{rev}\n' 1 $ hg log -r 'ancestor(head())' --config merge.preferancestor=2 -T '{rev}\n' 2 $ hg log -r 'ancestor(head())' --config merge.preferancestor=3 -T '{rev}\n' 1 $ hg log -r 'ancestor(head())' --config merge.preferancestor='1337 * - 2' -T '{rev}\n' 2 $ cd .. mercurial-3.7.3/tests/test-unbundlehash.t0000644000175000017500000000261312676531525020115 0ustar mpmmpm00000000000000#require killdaemons Test wire protocol unbundle with hashed heads (capability: unbundlehash) $ cat << EOF >> $HGRCPATH > [experimental] > # This tests is intended for bundle1 only. > # bundle2 carries the head information inside the bundle itself and > # always uses 'force' as the heads value. > bundle2-exp = False > EOF Create a remote repository. $ hg init remote $ hg serve -R remote --config web.push_ssl=False --config web.allow_push=* -p $HGPORT -d --pid-file=hg1.pid -E error.log -A access.log $ cat hg1.pid >> $DAEMON_PIDS Clone the repository and push a change. $ hg clone http://localhost:$HGPORT/ local no changes found updating to branch default 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ touch local/README $ hg ci -R local -A -m hoge adding README $ hg push -R local pushing to http://localhost:$HGPORT/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files Ensure hashed heads format is used. The hash here is always the same since the remote repository only has the null head. $ cat access.log | grep unbundle * - - [*] "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=686173686564+6768033e216468247bd031a0a2d9876d79818f8f (glob) Explicitly kill daemons to let the test exit on Windows $ killdaemons.py mercurial-3.7.3/tests/test-check-commit.t0000644000175000017500000000115612676531525020001 0ustar mpmmpm00000000000000#require test-repo Enable obsolescence to avoid the warning issue when obsmarker are found $ cat >> $HGRCPATH << EOF > [experimental] > evolution=createmarkers > EOF Go back in the hg repo $ cd $TESTDIR/.. $ for node in `hg log --rev 'not public() and ::.' --template '{node|short}\n'`; do > hg export $node | contrib/check-commit > ${TESTTMP}/check-commit.out > if [ $? -ne 0 ]; then > echo "Revision $node does not comply to rules" > echo '------------------------------------------------------' > cat ${TESTTMP}/check-commit.out > echo > fi > done mercurial-3.7.3/tests/test-merge-internal-tools-pattern.t0000644000175000017500000000553512676531525023165 0ustar mpmmpm00000000000000Make sure that the internal merge tools (internal:fail, internal:local, internal:union and internal:other) are used when matched by a merge-pattern in hgrc Make sure HGMERGE doesn't interfere with the test: $ unset HGMERGE $ hg init Initial file contents: $ echo "line 1" > f $ echo "line 2" >> f $ echo "line 3" >> f $ hg ci -Am "revision 0" adding f $ cat f line 1 line 2 line 3 Branch 1: editing line 1: $ sed 's/line 1/first line/' f > f.new $ mv f.new f $ hg ci -Am "edited first line" Branch 2: editing line 3: $ hg update 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ sed 's/line 3/third line/' f > f.new $ mv f.new f $ hg ci -Am "edited third line" created new head Merge using internal:fail tool: $ echo "[merge-patterns]" > .hg/hgrc $ echo "* = internal:fail" >> .hg/hgrc $ hg merge 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ cat f line 1 line 2 third line $ hg stat M f Merge using internal:local tool: $ hg update -C 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ sed 's/internal:fail/internal:local/' .hg/hgrc > .hg/hgrc.new $ mv .hg/hgrc.new .hg/hgrc $ hg merge 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f line 1 line 2 third line $ hg stat M f Merge using internal:other tool: $ hg update -C 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ sed 's/internal:local/internal:other/' .hg/hgrc > .hg/hgrc.new $ mv .hg/hgrc.new .hg/hgrc $ hg merge 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f first line line 2 line 3 $ hg stat M f Merge using default tool: $ hg update -C 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm .hg/hgrc $ hg merge merging f 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f first line line 2 third line $ hg stat M f Merge using internal:union tool: $ hg update -C 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "line 4a" >>f $ hg ci -Am "Adding fourth line (commit 4)" $ hg update 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "line 4b" >>f $ hg ci -Am "Adding fourth line v2 (commit 5)" created new head $ echo "[merge-patterns]" > .hg/hgrc $ echo "* = internal:union" >> .hg/hgrc $ hg merge 3 merging f 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f line 1 line 2 third line line 4b line 4a mercurial-3.7.3/tests/test-notify-changegroup.t0000644000175000017500000001225412676531525021247 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [extensions] > notify= > > [hooks] > changegroup.notify = python:hgext.notify.hook > > [notify] > sources = push > diffstat = False > maxsubject = 10 > > [usersubs] > foo@bar = * > > [reposubs] > * = baz > EOF $ hg init a clone $ hg --traceback clone a b updating to branch default 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a > b/a commit $ hg --traceback --cwd b commit -Ama adding a $ echo a >> b/a commit $ hg --traceback --cwd b commit -Amb push $ hg --traceback --cwd b push ../a 2>&1 | > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),' pushing to ../a searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Date: * (glob) Subject: * (glob) From: test X-Hg-Notification: changeset cb9a9f314b8b Message-Id: <*> (glob) To: baz, foo@bar changeset cb9a9f314b8b in $TESTTMP/a (glob) details: $TESTTMP/a?cmd=changeset;node=cb9a9f314b8b summary: a changeset ba677d0156c1 in $TESTTMP/a (glob) details: $TESTTMP/a?cmd=changeset;node=ba677d0156c1 summary: b diffs (6 lines): diff -r 000000000000 -r ba677d0156c1 a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,2 @@ +a +a $ hg --cwd a rollback repository tip rolled back to revision -1 (undo push) unbundle with unrelated source $ hg --cwd b bundle ../test.hg ../a searching for changes 2 changesets found $ hg --cwd a unbundle ../test.hg adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files (run 'hg update' to get a working copy) $ hg --cwd a rollback repository tip rolled back to revision -1 (undo unbundle) unbundle with correct source $ hg --config notify.sources=unbundle --cwd a unbundle ../test.hg 2>&1 | > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),' adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Date: * (glob) Subject: * (glob) From: test X-Hg-Notification: changeset cb9a9f314b8b Message-Id: <*> (glob) To: baz, foo@bar changeset cb9a9f314b8b in $TESTTMP/a (glob) details: $TESTTMP/a?cmd=changeset;node=cb9a9f314b8b summary: a changeset ba677d0156c1 in $TESTTMP/a (glob) details: $TESTTMP/a?cmd=changeset;node=ba677d0156c1 summary: b diffs (6 lines): diff -r 000000000000 -r ba677d0156c1 a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,2 @@ +a +a (run 'hg update' to get a working copy) Check that using the first committer as the author of a changeset works: Check that the config option works. Check that the first committer is indeed used for "From:". Check that the merge user is NOT used for "From:" Create new file $ echo a > b/b $ echo b >> b/b $ echo c >> b/b $ hg --traceback --cwd b commit -Amnewfile -u committer_1 adding b commit as one user $ echo x > b/b $ echo b >> b/b $ echo c >> b/b $ hg --traceback --cwd b commit -Amx -u committer_2 commit as other user, change file so we can do an (automatic) merge $ hg --cwd b up 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a > b/b $ echo b >> b/b $ echo y >> b/b $ hg --traceback --cwd b commit -Amy -u committer_3 created new head merge as a different user $ hg --cwd b merge --config notify.fromauthor=True merging b 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg --traceback --cwd b commit -Am "merged" push $ hg --traceback --cwd b --config notify.fromauthor=True push ../a 2>&1 | > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),' pushing to ../a searching for changes adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 1 files Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Date: * (glob) Subject: * (glob) From: committer_1 X-Hg-Notification: changeset 84e487dddc58 Message-Id: <*> (glob) To: baz, foo@bar changeset 84e487dddc58 in $TESTTMP/a (glob) details: $TESTTMP/a?cmd=changeset;node=84e487dddc58 summary: newfile changeset b29c7a2b6b0c in $TESTTMP/a (glob) details: $TESTTMP/a?cmd=changeset;node=b29c7a2b6b0c summary: x changeset 0957c7d64886 in $TESTTMP/a (glob) details: $TESTTMP/a?cmd=changeset;node=0957c7d64886 summary: y changeset 485b4e6b0249 in $TESTTMP/a (glob) details: $TESTTMP/a?cmd=changeset;node=485b4e6b0249 summary: merged diffs (7 lines): diff -r ba677d0156c1 -r 485b4e6b0249 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,3 @@ +x +b +y $ hg --cwd a rollback repository tip rolled back to revision 1 (undo push) mercurial-3.7.3/tests/test-globalopts.t0000644000175000017500000003355112676531525017610 0ustar mpmmpm00000000000000 $ hg init a $ cd a $ echo a > a $ hg ci -A -d'1 0' -m a adding a $ cd .. $ hg init b $ cd b $ echo b > b $ hg ci -A -d'1 0' -m b adding b $ cd .. $ hg clone a c updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd c $ cat >> .hg/hgrc < [paths] > relative = ../a > EOF $ hg pull -f ../b pulling from ../b searching for changes warning: repository is unrelated requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cd .. Testing -R/--repository: $ hg -R a tip changeset: 0:8580ff50825a tag: tip user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a $ hg --repository b tip changeset: 0:b6c483daf290 tag: tip user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b -R with a URL: $ hg -R file:a identify 8580ff50825a tip $ hg -R file://localhost/`pwd`/a/ identify 8580ff50825a tip -R with path aliases: $ cd c $ hg -R default identify 8580ff50825a tip $ hg -R relative identify 8580ff50825a tip $ echo '[paths]' >> $HGRCPATH $ echo 'relativetohome = a' >> $HGRCPATH $ HOME=`pwd`/../ hg -R relativetohome identify 8580ff50825a tip $ cd .. #if no-outer-repo Implicit -R: $ hg ann a/a 0: a $ hg ann a/a a/a 0: a $ hg ann a/a b/b abort: no repository found in '$TESTTMP' (.hg not found)! [255] $ hg -R b ann a/a abort: a/a not under root '$TESTTMP/b' (glob) (consider using '--cwd b') [255] $ hg log abort: no repository found in '$TESTTMP' (.hg not found)! [255] #endif Abbreviation of long option: $ hg --repo c tip changeset: 1:b6c483daf290 tag: tip parent: -1:000000000000 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b earlygetopt with duplicate options (36d23de02da1): $ hg --cwd a --cwd b --cwd c tip changeset: 1:b6c483daf290 tag: tip parent: -1:000000000000 user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b $ hg --repo c --repository b -R a tip changeset: 0:8580ff50825a tag: tip user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a earlygetopt short option without following space: $ hg -q -Rb tip 0:b6c483daf290 earlygetopt with illegal abbreviations: $ hg --confi "foo.bar=baz" abort: option --config may not be abbreviated! [255] $ hg --cw a tip abort: option --cwd may not be abbreviated! [255] $ hg --rep a tip abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo! [255] $ hg --repositor a tip abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo! [255] $ hg -qR a tip abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo! [255] $ hg -qRa tip abort: option -R has to be separated from other options (e.g. not -qR) and --repository may only be abbreviated as --repo! [255] Testing --cwd: $ hg --cwd a parents changeset: 0:8580ff50825a tag: tip user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a Testing -y/--noninteractive - just be sure it is parsed: $ hg --cwd a tip -q --noninteractive 0:8580ff50825a $ hg --cwd a tip -q -y 0:8580ff50825a Testing -q/--quiet: $ hg -R a -q tip 0:8580ff50825a $ hg -R b -q tip 0:b6c483daf290 $ hg -R c --quiet parents 0:8580ff50825a 1:b6c483daf290 Testing -v/--verbose: $ hg --cwd c head -v changeset: 1:b6c483daf290 tag: tip parent: -1:000000000000 user: test date: Thu Jan 01 00:00:01 1970 +0000 files: b description: b changeset: 0:8580ff50825a user: test date: Thu Jan 01 00:00:01 1970 +0000 files: a description: a $ hg --cwd b tip --verbose changeset: 0:b6c483daf290 tag: tip user: test date: Thu Jan 01 00:00:01 1970 +0000 files: b description: b Testing --config: $ hg --cwd c --config paths.quuxfoo=bar paths | grep quuxfoo > /dev/null && echo quuxfoo quuxfoo $ hg --cwd c --config '' tip -q abort: malformed --config option: '' (use --config section.name=value) [255] $ hg --cwd c --config a.b tip -q abort: malformed --config option: 'a.b' (use --config section.name=value) [255] $ hg --cwd c --config a tip -q abort: malformed --config option: 'a' (use --config section.name=value) [255] $ hg --cwd c --config a.= tip -q abort: malformed --config option: 'a.=' (use --config section.name=value) [255] $ hg --cwd c --config .b= tip -q abort: malformed --config option: '.b=' (use --config section.name=value) [255] Testing --debug: $ hg --cwd c log --debug changeset: 1:b6c483daf2907ce5825c0bb50f5716226281cc1a tag: tip phase: public parent: -1:0000000000000000000000000000000000000000 parent: -1:0000000000000000000000000000000000000000 manifest: 1:23226e7a252cacdc2d99e4fbdc3653441056de49 user: test date: Thu Jan 01 00:00:01 1970 +0000 files+: b extra: branch=default description: b changeset: 0:8580ff50825a50c8f716709acdf8de0deddcd6ab phase: public parent: -1:0000000000000000000000000000000000000000 parent: -1:0000000000000000000000000000000000000000 manifest: 0:a0c8bcbbb45c63b90b70ad007bf38961f64f2af0 user: test date: Thu Jan 01 00:00:01 1970 +0000 files+: a extra: branch=default description: a Testing --traceback: $ hg --cwd c --config x --traceback id 2>&1 | grep -i 'traceback' Traceback (most recent call last): Testing --time: $ hg --cwd a --time id 8580ff50825a tip time: real * (glob) Testing --version: $ hg --version -q Mercurial Distributed SCM * (glob) hide outer repo $ hg init Testing -h/--help: $ hg -h Mercurial Distributed SCM list of commands: add add the specified files on the next commit addremove add all new files, delete all missing files annotate show changeset information by line for each file archive create an unversioned archive of a repository revision backout reverse effect of earlier changeset bisect subdivision search of changesets bookmarks create a new bookmark or list existing bookmarks branch set or show the current branch name branches list repository named branches bundle create a changegroup file cat output the current or given revision of files clone make a copy of an existing repository commit commit the specified files or all outstanding changes config show combined config settings from all hgrc files copy mark files as copied for the next commit diff diff repository (or selected files) export dump the header and diffs for one or more changesets files list tracked files forget forget the specified files on the next commit graft copy changes from other branches onto the current branch grep search for a pattern in specified files and revisions heads show branch heads help show help for a given topic or a help overview identify identify the working directory or specified revision import import an ordered set of patches incoming show new changesets found in source init create a new repository in the given directory log show revision history of entire repository or files manifest output the current or given revision of the project manifest merge merge another revision into working directory outgoing show changesets not found in the destination paths show aliases for remote repositories phase set or show the current phase name pull pull changes from the specified source push push changes to the specified destination recover roll back an interrupted transaction remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files revert restore files to their checkout state root print the root (top) of the current working directory serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state tag add one or more tags for the current or given revision tags list repository tags unbundle apply one or more changegroup files update update working directory (or switch revisions) verify verify the integrity of the repository version output version and copyright information additional help topics: config Configuration Files dates Date Formats diffs Diff Formats environment Environment Variables extensions Using Additional Features filesets Specifying File Sets glossary Glossary hgignore Syntax for Mercurial Ignore Files hgweb Configuring hgweb internals Technical implementation topics merge-tools Merge Tools multirevs Specifying Multiple Revisions patterns File Name Patterns phases Working with Phases revisions Specifying Single Revisions revsets Specifying Revision Sets scripting Using Mercurial from scripts and automation subrepos Subrepositories templating Template Usage urls URL Paths (use "hg help -v" to show built-in aliases and global options) $ hg --help Mercurial Distributed SCM list of commands: add add the specified files on the next commit addremove add all new files, delete all missing files annotate show changeset information by line for each file archive create an unversioned archive of a repository revision backout reverse effect of earlier changeset bisect subdivision search of changesets bookmarks create a new bookmark or list existing bookmarks branch set or show the current branch name branches list repository named branches bundle create a changegroup file cat output the current or given revision of files clone make a copy of an existing repository commit commit the specified files or all outstanding changes config show combined config settings from all hgrc files copy mark files as copied for the next commit diff diff repository (or selected files) export dump the header and diffs for one or more changesets files list tracked files forget forget the specified files on the next commit graft copy changes from other branches onto the current branch grep search for a pattern in specified files and revisions heads show branch heads help show help for a given topic or a help overview identify identify the working directory or specified revision import import an ordered set of patches incoming show new changesets found in source init create a new repository in the given directory log show revision history of entire repository or files manifest output the current or given revision of the project manifest merge merge another revision into working directory outgoing show changesets not found in the destination paths show aliases for remote repositories phase set or show the current phase name pull pull changes from the specified source push push changes to the specified destination recover roll back an interrupted transaction remove remove the specified files on the next commit rename rename files; equivalent of copy + remove resolve redo merges or set/view the merge status of files revert restore files to their checkout state root print the root (top) of the current working directory serve start stand-alone webserver status show changed files in the working directory summary summarize working directory state tag add one or more tags for the current or given revision tags list repository tags unbundle apply one or more changegroup files update update working directory (or switch revisions) verify verify the integrity of the repository version output version and copyright information additional help topics: config Configuration Files dates Date Formats diffs Diff Formats environment Environment Variables extensions Using Additional Features filesets Specifying File Sets glossary Glossary hgignore Syntax for Mercurial Ignore Files hgweb Configuring hgweb internals Technical implementation topics merge-tools Merge Tools multirevs Specifying Multiple Revisions patterns File Name Patterns phases Working with Phases revisions Specifying Single Revisions revsets Specifying Revision Sets scripting Using Mercurial from scripts and automation subrepos Subrepositories templating Template Usage urls URL Paths (use "hg help -v" to show built-in aliases and global options) Not tested: --debugger mercurial-3.7.3/tests/test-issue660.t0000644000175000017500000000407712676531525017027 0ustar mpmmpm00000000000000https://bz.mercurial-scm.org/660 and: https://bz.mercurial-scm.org/322 $ hg init $ echo a > a $ mkdir b $ echo b > b/b $ hg commit -A -m "a is file, b is dir" adding a adding b/b File replaced with directory: $ rm a $ mkdir a $ echo a > a/a Should fail - would corrupt dirstate: $ hg add a/a abort: file 'a' in dirstate clashes with 'a/a' [255] Removing shadow: $ hg rm --after a Should succeed - shadow removed: $ hg add a/a Directory replaced with file: $ rm -r b $ echo b > b Should fail - would corrupt dirstate: $ hg add b abort: directory 'b' already in dirstate [255] Removing shadow: $ hg rm --after b/b Should succeed - shadow removed: $ hg add b Look what we got: $ hg st A a/a A b R a R b/b Revert reintroducing shadow - should fail: $ rm -r a b $ hg revert b/b abort: file 'b' in dirstate clashes with 'b/b' [255] Revert all - should succeed: $ hg revert --all undeleting a forgetting a/a (glob) forgetting b undeleting b/b (glob) $ hg st Issue3423: $ hg forget a $ echo zed > a $ hg revert a $ hg st ? a.orig $ rm a.orig addremove: $ rm -r a b $ mkdir a $ echo a > a/a $ echo b > b $ hg addremove -s 0 removing a adding a/a adding b removing b/b $ hg st A a/a A b R a R b/b commit: $ hg ci -A -m "a is dir, b is file" $ hg st --all C a/a C b Long directory replaced with file: $ mkdir d $ mkdir d/d $ echo d > d/d/d $ hg commit -A -m "d is long directory" adding d/d/d $ rm -r d $ echo d > d Should fail - would corrupt dirstate: $ hg add d abort: directory 'd' already in dirstate [255] Removing shadow: $ hg rm --after d/d/d Should succeed - shadow removed: $ hg add d $ hg ci -md Update should work at least with clean working directory: $ rm -r a b d $ hg up -r 0 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg st --all C a C b/b $ rm -r a b $ hg up -r 1 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg st --all C a/a C b mercurial-3.7.3/tests/test-issue1089.t0000644000175000017500000000047012676531525017106 0ustar mpmmpm00000000000000https://bz.mercurial-scm.org/1089 $ hg init $ mkdir a $ echo a > a/b $ hg ci -Am m adding a/b $ hg rm a removing a/b (glob) $ hg ci -m m a $ mkdir a b $ echo a > a/b $ hg ci -Am m adding a/b $ hg rm a removing a/b (glob) $ cd b Relative delete: $ hg ci -m m ../a $ cd .. mercurial-3.7.3/tests/test-debugcommands.t0000644000175000017500000001073112676531525020245 0ustar mpmmpm00000000000000 $ cat << EOF >> $HGRCPATH > [format] > usegeneraldelta=yes > EOF $ hg init debugrevlog $ cd debugrevlog $ echo a > a $ hg ci -Am adda adding a $ hg debugrevlog -m format : 1 flags : inline, generaldelta revisions : 1 merges : 0 ( 0.00%) normal : 1 (100.00%) revisions : 1 full : 1 (100.00%) deltas : 0 ( 0.00%) revision size : 44 full : 44 (100.00%) deltas : 0 ( 0.00%) avg chain length : 0 max chain length : 0 compression ratio : 0 uncompressed data size (min/max/avg) : 43 / 43 / 43 full revision size (min/max/avg) : 44 / 44 / 44 delta size (min/max/avg) : 0 / 0 / 0 Test debugindex, with and without the --debug flag $ hg debugindex a rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 .... 0 b789fdd96dc2 000000000000 000000000000 (re) $ hg --debug debugindex a rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 .... 0 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 (re) $ hg debugindex -f 1 a rev flag offset length size ..... link p1 p2 nodeid (re) 0 0000 0 3 2 .... 0 -1 -1 b789fdd96dc2 (re) $ hg --debug debugindex -f 1 a rev flag offset length size ..... link p1 p2 nodeid (re) 0 0000 0 3 2 .... 0 -1 -1 b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 (re) debugdelta chain basic output $ hg debugdeltachain -m rev chain# chainlen prev delta size rawsize chainsize ratio lindist extradist extraratio 0 1 1 -1 base 44 43 44 1.02326 44 0 0.00000 $ hg debugdeltachain -m -T '{rev} {chainid} {chainlen}\n' 0 1 1 $ hg debugdeltachain -m -Tjson [ { "chainid": 1, "chainlen": 1, "chainratio": 1.02325581395, "chainsize": 44, "compsize": 44, "deltatype": "base", "extradist": 0, "extraratio": 0.0, "lindist": 44, "prevrev": -1, "rev": 0, "uncompsize": 43 } ] Test max chain len $ cat >> $HGRCPATH << EOF > [format] > maxchainlen=4 > EOF $ printf "This test checks if maxchainlen config value is respected also it can serve as basic test for debugrevlog -d .\n" >> a $ hg ci -m a $ printf "b\n" >> a $ hg ci -m a $ printf "c\n" >> a $ hg ci -m a $ printf "d\n" >> a $ hg ci -m a $ printf "e\n" >> a $ hg ci -m a $ printf "f\n" >> a $ hg ci -m a $ printf 'g\n' >> a $ hg ci -m a $ printf 'h\n' >> a $ hg ci -m a $ hg debugrevlog -d a # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen 0 -1 -1 0 ??? 0 0 0 0 ??? ???? ? 1 0 (glob) 1 0 -1 ??? ??? 0 0 0 0 ??? ???? ? 1 1 (glob) 2 1 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob) 3 2 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob) 4 3 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 4 (glob) 5 4 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 0 (glob) 6 5 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 1 (glob) 7 6 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 2 (glob) 8 7 -1 ??? ??? ??? ??? ??? 0 ??? ???? ? 1 3 (glob) $ cd .. Test internal debugstacktrace command $ cat > debugstacktrace.py << EOF > from mercurial.util import debugstacktrace, dst, sys > def f(): > dst('hello world') > def g(): > f() > debugstacktrace(skip=-5, f=sys.stdout) > g() > EOF $ python debugstacktrace.py hello world at: debugstacktrace.py:7 in * (glob) debugstacktrace.py:5 in g debugstacktrace.py:3 in f stacktrace at: debugstacktrace.py:7 *in * (glob) debugstacktrace.py:6 *in g (glob) */util.py:* in debugstacktrace (glob) mercurial-3.7.3/tests/blacklists/0000755000175000017500000000000012676531544016425 5ustar mpmmpm00000000000000mercurial-3.7.3/tests/blacklists/linux-vfat0000644000175000017500000000067312676531525020452 0ustar mpmmpm00000000000000# invalid filenames test-add.t test-init.t test-clone.t test-contrib.t test-hgweb-raw.t test-walk.t # no sockets or fifos test-hup.t # no hardlinks test-hardlinks.t test-relink.t # exec bit problems test-convert-bzr-114.t test-convert-bzr-directories.t test-convert-bzr-merges.t test-convert-bzr-treeroot.t test-convert-darcs.t test-merge-tools.t # debugstate exec bit false positives test-dirstate.t test-filebranch.t test-merge-remove.t mercurial-3.7.3/tests/blacklists/README0000644000175000017500000000100412676531525017277 0ustar mpmmpm00000000000000Put here definitions of blacklists for run-tests.py Create a file per blacklist. Each file should list the names of tests that you want to be skipped. File names are meant to be used as targets for run-tests.py --blacklist option. Lines starting with # are ignored. White spaces are stripped. e.g. if you create a blacklist/example file containing: test-hgrc # some comment test-help then calling "run-tests.py --blacklist blacklists/example" will exclude test-hgrc and test-help from the list of tests to run. mercurial-3.7.3/tests/test-subrepo.t0000644000175000017500000013163412676531525017122 0ustar mpmmpm00000000000000Let commit recurse into subrepos by default to match pre-2.0 behavior: $ echo "[ui]" >> $HGRCPATH $ echo "commitsubrepos = Yes" >> $HGRCPATH $ hg init t $ cd t first revision, no sub $ echo a > a $ hg ci -Am0 adding a add first sub $ echo s = s > .hgsub $ hg add .hgsub $ hg init s $ echo a > s/a Issue2232: committing a subrepo without .hgsub $ hg ci -mbad s abort: can't commit subrepos without .hgsub [255] $ hg -R s add s/a $ hg files -S .hgsub a s/a (glob) $ hg -R s ci -Ams0 $ hg sum parent: 0:f7b1eb17ad24 tip 0 branch: default commit: 1 added, 1 subrepos update: (current) phases: 1 draft $ hg ci -m1 test handling .hgsubstate "added" explicitly. $ hg parents --template '{node}\n{files}\n' 7cf8cfea66e410e8e3336508dfeec07b3192de51 .hgsub .hgsubstate $ hg rollback -q $ hg add .hgsubstate $ hg ci -m1 $ hg parents --template '{node}\n{files}\n' 7cf8cfea66e410e8e3336508dfeec07b3192de51 .hgsub .hgsubstate Revert subrepo and test subrepo fileset keyword: $ echo b > s/a $ hg revert --dry-run "set:subrepo('glob:s*')" reverting subrepo s reverting s/a (glob) $ cat s/a b $ hg revert "set:subrepo('glob:s*')" reverting subrepo s reverting s/a (glob) $ cat s/a a $ rm s/a.orig Revert subrepo with no backup. The "reverting s/a" line is gone since we're really running 'hg update' in the subrepo: $ echo b > s/a $ hg revert --no-backup s reverting subrepo s Issue2022: update -C $ echo b > s/a $ hg sum parent: 1:7cf8cfea66e4 tip 1 branch: default commit: 1 subrepos update: (current) phases: 2 draft $ hg co -C 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg sum parent: 1:7cf8cfea66e4 tip 1 branch: default commit: (clean) update: (current) phases: 2 draft commands that require a clean repo should respect subrepos $ echo b >> s/a $ hg backout tip abort: uncommitted changes in subrepository 's' [255] $ hg revert -C -R s s/a add sub sub $ echo ss = ss > s/.hgsub $ hg init s/ss $ echo a > s/ss/a $ hg -R s add s/.hgsub $ hg -R s/ss add s/ss/a $ hg sum parent: 1:7cf8cfea66e4 tip 1 branch: default commit: 1 subrepos update: (current) phases: 2 draft $ hg ci -m2 committing subrepository s committing subrepository s/ss (glob) $ hg sum parent: 2:df30734270ae tip 2 branch: default commit: (clean) update: (current) phases: 3 draft test handling .hgsubstate "modified" explicitly. $ hg parents --template '{node}\n{files}\n' df30734270ae757feb35e643b7018e818e78a9aa .hgsubstate $ hg rollback -q $ hg status -A .hgsubstate M .hgsubstate $ hg ci -m2 $ hg parents --template '{node}\n{files}\n' df30734270ae757feb35e643b7018e818e78a9aa .hgsubstate bump sub rev (and check it is ignored by ui.commitsubrepos) $ echo b > s/a $ hg -R s ci -ms1 $ hg --config ui.commitsubrepos=no ci -m3 leave sub dirty (and check ui.commitsubrepos=no aborts the commit) $ echo c > s/a $ hg --config ui.commitsubrepos=no ci -m4 abort: uncommitted changes in subrepository 's' (use --subrepos for recursive commit) [255] $ hg id f6affe3fbfaa+ tip $ hg -R s ci -mc $ hg id f6affe3fbfaa+ tip $ echo d > s/a $ hg ci -m4 committing subrepository s $ hg tip -R s changeset: 4:02dcf1d70411 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 4 check caching $ hg co 0 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg debugsub restore $ hg co 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg debugsub path s source s revision 02dcf1d704118aee3ee306ccfa1910850d5b05ef new branch for merge tests $ hg co 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo t = t >> .hgsub $ hg init t $ echo t > t/t $ hg -R t add t adding t/t (glob) 5 $ hg ci -m5 # add sub committing subrepository t created new head $ echo t2 > t/t 6 $ hg st -R s $ hg ci -m6 # change sub committing subrepository t $ hg debugsub path s source s revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 path t source t revision 6747d179aa9a688023c4b0cad32e4c92bb7f34ad $ echo t3 > t/t 7 $ hg ci -m7 # change sub again for conflict test committing subrepository t $ hg rm .hgsub 8 $ hg ci -m8 # remove sub test handling .hgsubstate "removed" explicitly. $ hg parents --template '{node}\n{files}\n' 96615c1dad2dc8e3796d7332c77ce69156f7b78e .hgsub .hgsubstate $ hg rollback -q $ hg remove .hgsubstate $ hg ci -m8 $ hg parents --template '{node}\n{files}\n' 96615c1dad2dc8e3796d7332c77ce69156f7b78e .hgsub .hgsubstate merge tests $ hg co -C 3 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 5 # test adding 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg debugsub path s source s revision fc627a69481fcbe5f1135069e8a3881c023e4cf5 path t source t revision 60ca1237c19474e7a3978b0dc1ca4e6f36d51382 $ hg ci -m9 created new head $ hg merge 6 --debug # test change searching for copies back to rev 2 resolving manifests branchmerge: True, force: False, partial: False ancestor: 1f14a2e2d3ec, local: f0d2028bf86d+, remote: 1831e14459c4 .hgsubstate: versions differ -> m (premerge) subrepo merge f0d2028bf86d+ 1831e14459c4 1f14a2e2d3ec subrepo t: other changed, get t:6747d179aa9a688023c4b0cad32e4c92bb7f34ad:hg getting subrepo t resolving manifests branchmerge: False, force: False, partial: False ancestor: 60ca1237c194, local: 60ca1237c194+, remote: 6747d179aa9a t: remote is newer -> g getting t 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg debugsub path s source s revision fc627a69481fcbe5f1135069e8a3881c023e4cf5 path t source t revision 6747d179aa9a688023c4b0cad32e4c92bb7f34ad $ echo conflict > t/t $ hg ci -m10 committing subrepository t $ HGMERGE=internal:merge hg merge --debug 7 # test conflict searching for copies back to rev 2 resolving manifests branchmerge: True, force: False, partial: False ancestor: 1831e14459c4, local: e45c8b14af55+, remote: f94576341bcf .hgsubstate: versions differ -> m (premerge) subrepo merge e45c8b14af55+ f94576341bcf 1831e14459c4 subrepo t: both sides changed subrepository t diverged (local revision: 20a0db6fbf6c, remote revision: 7af322bc1198) (M)erge, keep (l)ocal or keep (r)emote? m merging subrepo t searching for copies back to rev 2 resolving manifests branchmerge: True, force: False, partial: False ancestor: 6747d179aa9a, local: 20a0db6fbf6c+, remote: 7af322bc1198 preserving t for resolve of t t: versions differ -> m (premerge) picked tool ':merge' for t (binary False symlink False changedelete False) merging t my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a t: versions differ -> m (merge) picked tool ':merge' for t (binary False symlink False changedelete False) my t@20a0db6fbf6c+ other t@7af322bc1198 ancestor t@6747d179aa9a warning: conflicts while merging t! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon subrepo t: merge with t:7af322bc1198a32402fe903e0b7ebcfc5c9bf8f4:hg 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) should conflict $ cat t/t <<<<<<< local: 20a0db6fbf6c - test: 10 conflict ======= t3 >>>>>>> other: 7af322bc1198 - test: 7 11: remove subrepo t $ hg co -C 5 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg revert -r 4 .hgsub # remove t $ hg ci -m11 created new head $ hg debugsub path s source s revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 local removed, remote changed, keep changed $ hg merge 6 remote changed subrepository t which local removed use (c)hanged version or (d)elete? c 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) BROKEN: should include subrepo t $ hg debugsub path s source s revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 $ cat .hgsubstate e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s 6747d179aa9a688023c4b0cad32e4c92bb7f34ad t $ hg ci -m 'local removed, remote changed, keep changed' BROKEN: should include subrepo t $ hg debugsub path s source s revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 BROKEN: should include subrepo t $ cat .hgsubstate e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s $ cat t/t t2 local removed, remote changed, keep removed $ hg co -C 11 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge --config ui.interactive=true 6 < d > EOF remote changed subrepository t which local removed use (c)hanged version or (d)elete? d 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg debugsub path s source s revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 $ cat .hgsubstate e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s $ hg ci -m 'local removed, remote changed, keep removed' created new head $ hg debugsub path s source s revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 $ cat .hgsubstate e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s local changed, remote removed, keep changed $ hg co -C 6 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 11 local changed subrepository t which remote removed use (c)hanged version or (d)elete? c 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) BROKEN: should include subrepo t $ hg debugsub path s source s revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 BROKEN: should include subrepo t $ cat .hgsubstate e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s $ hg ci -m 'local changed, remote removed, keep changed' created new head BROKEN: should include subrepo t $ hg debugsub path s source s revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 BROKEN: should include subrepo t $ cat .hgsubstate e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s $ cat t/t t2 local changed, remote removed, keep removed $ hg co -C 6 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge --config ui.interactive=true 11 < d > EOF local changed subrepository t which remote removed use (c)hanged version or (d)elete? d 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg debugsub path s source s revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 $ cat .hgsubstate e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s $ hg ci -m 'local changed, remote removed, keep removed' created new head $ hg debugsub path s source s revision e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 $ cat .hgsubstate e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s clean up to avoid having to fix up the tests below $ hg co -C 10 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat >> $HGRCPATH < [extensions] > strip= > EOF $ hg strip -r 11:15 saved backup bundle to $TESTTMP/t/.hg/strip-backup/*-backup.hg (glob) clone $ cd .. $ hg clone t tc updating to branch default cloning subrepo s from $TESTTMP/t/s cloning subrepo s/ss from $TESTTMP/t/s/ss (glob) cloning subrepo t from $TESTTMP/t/t 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd tc $ hg debugsub path s source s revision fc627a69481fcbe5f1135069e8a3881c023e4cf5 path t source t revision 20a0db6fbf6c3d2836e6519a642ae929bfc67c0e push $ echo bah > t/t $ hg ci -m11 committing subrepository t $ hg push pushing to $TESTTMP/t (glob) no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob) no changes made to subrepo s since last push to $TESTTMP/t/s pushing subrepo t to $TESTTMP/t/t searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files push -f $ echo bah > s/a $ hg ci -m12 committing subrepository s $ hg push pushing to $TESTTMP/t (glob) no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob) pushing subrepo s to $TESTTMP/t/s searching for changes abort: push creates new remote head 12a213df6fa9! (in subrepo s) (merge or see "hg help push" for details about pushing new heads) [255] $ hg push -f pushing to $TESTTMP/t (glob) pushing subrepo s/ss to $TESTTMP/t/s/ss (glob) searching for changes no changes found pushing subrepo s to $TESTTMP/t/s searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) pushing subrepo t to $TESTTMP/t/t searching for changes no changes found searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files check that unmodified subrepos are not pushed $ hg clone . ../tcc updating to branch default cloning subrepo s from $TESTTMP/tc/s cloning subrepo s/ss from $TESTTMP/tc/s/ss (glob) cloning subrepo t from $TESTTMP/tc/t 3 files updated, 0 files merged, 0 files removed, 0 files unresolved the subrepos on the new clone have nothing to push to its source $ hg push -R ../tcc . pushing to . no changes made to subrepo s/ss since last push to s/ss (glob) no changes made to subrepo s since last push to s no changes made to subrepo t since last push to t searching for changes no changes found [1] the subrepos on the source do not have a clean store versus the clone target because they were never explicitly pushed to the source $ hg push ../tcc pushing to ../tcc pushing subrepo s/ss to ../tcc/s/ss (glob) searching for changes no changes found pushing subrepo s to ../tcc/s searching for changes no changes found pushing subrepo t to ../tcc/t searching for changes no changes found searching for changes no changes found [1] after push their stores become clean $ hg push ../tcc pushing to ../tcc no changes made to subrepo s/ss since last push to ../tcc/s/ss (glob) no changes made to subrepo s since last push to ../tcc/s no changes made to subrepo t since last push to ../tcc/t searching for changes no changes found [1] updating a subrepo to a different revision or changing its working directory does not make its store dirty $ hg -R s update '.^' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg push pushing to $TESTTMP/t (glob) no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob) no changes made to subrepo s since last push to $TESTTMP/t/s no changes made to subrepo t since last push to $TESTTMP/t/t searching for changes no changes found [1] $ echo foo >> s/a $ hg push pushing to $TESTTMP/t (glob) no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob) no changes made to subrepo s since last push to $TESTTMP/t/s no changes made to subrepo t since last push to $TESTTMP/t/t searching for changes no changes found [1] $ hg -R s update -C tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved committing into a subrepo makes its store (but not its parent's store) dirty $ echo foo >> s/ss/a $ hg -R s/ss commit -m 'test dirty store detection' $ hg out -S -r `hg log -r tip -T "{node|short}"` comparing with $TESTTMP/t (glob) searching for changes no changes found comparing with $TESTTMP/t/s searching for changes no changes found comparing with $TESTTMP/t/s/ss searching for changes changeset: 1:79ea5566a333 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: test dirty store detection comparing with $TESTTMP/t/t searching for changes no changes found $ hg push pushing to $TESTTMP/t (glob) pushing subrepo s/ss to $TESTTMP/t/s/ss (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files no changes made to subrepo s since last push to $TESTTMP/t/s no changes made to subrepo t since last push to $TESTTMP/t/t searching for changes no changes found [1] a subrepo store may be clean versus one repo but not versus another $ hg push pushing to $TESTTMP/t (glob) no changes made to subrepo s/ss since last push to $TESTTMP/t/s/ss (glob) no changes made to subrepo s since last push to $TESTTMP/t/s no changes made to subrepo t since last push to $TESTTMP/t/t searching for changes no changes found [1] $ hg push ../tcc pushing to ../tcc pushing subrepo s/ss to ../tcc/s/ss (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files no changes made to subrepo s since last push to ../tcc/s no changes made to subrepo t since last push to ../tcc/t searching for changes no changes found [1] update $ cd ../t $ hg up -C # discard our earlier merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo blah > t/t $ hg ci -m13 committing subrepository t backout calls revert internally with minimal opts, which should not raise KeyError $ hg backout ".^" --no-commit 0 files updated, 0 files merged, 0 files removed, 0 files unresolved changeset c373c8102e68 backed out, don't forget to commit. $ hg up -C # discard changes 1 files updated, 0 files merged, 0 files removed, 0 files unresolved pull $ cd ../tc $ hg pull pulling from $TESTTMP/t (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) should pull t $ hg incoming -S -r `hg log -r tip -T "{node|short}"` comparing with $TESTTMP/t (glob) no changes found comparing with $TESTTMP/t/s searching for changes no changes found comparing with $TESTTMP/t/s/ss searching for changes no changes found comparing with $TESTTMP/t/t searching for changes changeset: 5:52c0adc0515a tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 13 $ hg up pulling subrepo t from $TESTTMP/t/t searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat t/t blah bogus subrepo path aborts $ echo 'bogus=[boguspath' >> .hgsub $ hg ci -m 'bogus subrepo path' abort: missing ] in subrepo source [255] Issue1986: merge aborts when trying to merge a subrepo that shouldn't need merging # subrepo layout # # o 5 br # /| # o | 4 default # | | # | o 3 br # |/| # o | 2 default # | | # | o 1 br # |/ # o 0 default $ cd .. $ rm -rf sub $ hg init main $ cd main $ hg init s $ cd s $ echo a > a $ hg ci -Am1 adding a $ hg branch br marked working directory as branch br (branches are permanent and global, did you want a bookmark?) $ echo a >> a $ hg ci -m1 $ hg up default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b > b $ hg ci -Am1 adding b $ hg up br 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m1 $ hg up 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo c > c $ hg ci -Am1 adding c $ hg up 3 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m1 # main repo layout: # # * <-- try to merge default into br again # .`| # . o 5 br --> substate = 5 # . | # o | 4 default --> substate = 4 # | | # | o 3 br --> substate = 2 # |/| # o | 2 default --> substate = 2 # | | # | o 1 br --> substate = 3 # |/ # o 0 default --> substate = 2 $ cd .. $ echo 's = s' > .hgsub $ hg -R s up 2 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg ci -Am1 adding .hgsub $ hg branch br marked working directory as branch br (branches are permanent and global, did you want a bookmark?) $ echo b > b $ hg -R s up 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg ci -Am1 adding b $ hg up default 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo c > c $ hg ci -Am1 adding c $ hg up 1 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m1 $ hg up 2 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg -R s up 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo d > d $ hg ci -Am1 adding d $ hg up 3 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg -R s up 5 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo e > e $ hg ci -Am1 adding e $ hg up 5 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 4 # try to merge default into br again subrepository s diverged (local revision: f8f13b33206e, remote revision: a3f9062a4f88) (M)erge, keep (l)ocal or keep (r)emote? m 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cd .. test subrepo delete from .hgsubstate $ hg init testdelete $ mkdir testdelete/nested testdelete/nested2 $ hg init testdelete/nested $ hg init testdelete/nested2 $ echo test > testdelete/nested/foo $ echo test > testdelete/nested2/foo $ hg -R testdelete/nested add adding testdelete/nested/foo (glob) $ hg -R testdelete/nested2 add adding testdelete/nested2/foo (glob) $ hg -R testdelete/nested ci -m test $ hg -R testdelete/nested2 ci -m test $ echo nested = nested > testdelete/.hgsub $ echo nested2 = nested2 >> testdelete/.hgsub $ hg -R testdelete add adding testdelete/.hgsub (glob) $ hg -R testdelete ci -m "nested 1 & 2 added" $ echo nested = nested > testdelete/.hgsub $ hg -R testdelete ci -m "nested 2 deleted" $ cat testdelete/.hgsubstate bdf5c9a3103743d900b12ae0db3ffdcfd7b0d878 nested $ hg -R testdelete remove testdelete/.hgsub $ hg -R testdelete ci -m ".hgsub deleted" $ cat testdelete/.hgsubstate bdf5c9a3103743d900b12ae0db3ffdcfd7b0d878 nested test repository cloning $ mkdir mercurial mercurial2 $ hg init nested_absolute $ echo test > nested_absolute/foo $ hg -R nested_absolute add adding nested_absolute/foo (glob) $ hg -R nested_absolute ci -mtest $ cd mercurial $ hg init nested_relative $ echo test2 > nested_relative/foo2 $ hg -R nested_relative add adding nested_relative/foo2 (glob) $ hg -R nested_relative ci -mtest2 $ hg init main $ echo "nested_relative = ../nested_relative" > main/.hgsub $ echo "nested_absolute = `pwd`/nested_absolute" >> main/.hgsub $ hg -R main add adding main/.hgsub (glob) $ hg -R main ci -m "add subrepos" $ cd .. $ hg clone mercurial/main mercurial2/main updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat mercurial2/main/nested_absolute/.hg/hgrc \ > mercurial2/main/nested_relative/.hg/hgrc [paths] default = $TESTTMP/mercurial/nested_absolute [paths] default = $TESTTMP/mercurial/nested_relative $ rm -rf mercurial mercurial2 Issue1977: multirepo push should fail if subrepo push fails $ hg init repo $ hg init repo/s $ echo a > repo/s/a $ hg -R repo/s ci -Am0 adding a $ echo s = s > repo/.hgsub $ hg -R repo ci -Am1 adding .hgsub $ hg clone repo repo2 updating to branch default cloning subrepo s from $TESTTMP/repo/s 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -q -R repo2 pull -u $ echo 1 > repo2/s/a $ hg -R repo2/s ci -m2 $ hg -q -R repo2/s push $ hg -R repo2/s up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 2 > repo2/s/b $ hg -R repo2/s ci -m3 -A adding b created new head $ hg -R repo2 ci -m3 $ hg -q -R repo2 push abort: push creates new remote head cc505f09a8b2! (in subrepo s) (merge or see "hg help push" for details about pushing new heads) [255] $ hg -R repo update 0 files updated, 0 files merged, 0 files removed, 0 files unresolved test if untracked file is not overwritten (this also tests that updated .hgsubstate is treated as "modified", when 'merge.update()' is aborted before 'merge.recordupdates()', even if none of mode, size and timestamp of it isn't changed on the filesystem (see also issue4583)) $ echo issue3276_ok > repo/s/b $ hg -R repo2 push -f -q $ touch -t 200001010000 repo/.hgsubstate $ cat >> repo/.hg/hgrc < [fakedirstatewritetime] > # emulate invoking dirstate.write() via repo.status() > # at 2000-01-01 00:00 > fakenow = 200001010000 > > [extensions] > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py > EOF $ hg -R repo update b: untracked file differs abort: untracked files in working directory differ from files in requested revision (in subrepo s) [255] $ cat >> repo/.hg/hgrc < [extensions] > fakedirstatewritetime = ! > EOF $ cat repo/s/b issue3276_ok $ rm repo/s/b $ touch -t 200001010000 repo/.hgsubstate $ hg -R repo revert --all reverting repo/.hgsubstate (glob) reverting subrepo s $ hg -R repo update 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat repo/s/b 2 $ rm -rf repo2 repo Issue1852 subrepos with relative paths always push/pull relative to default Prepare a repo with subrepo $ hg init issue1852a $ cd issue1852a $ hg init sub/repo $ echo test > sub/repo/foo $ hg -R sub/repo add sub/repo/foo $ echo sub/repo = sub/repo > .hgsub $ hg add .hgsub $ hg ci -mtest committing subrepository sub/repo (glob) $ echo test >> sub/repo/foo $ hg ci -mtest committing subrepository sub/repo (glob) $ hg cat sub/repo/foo test test $ mkdir -p tmp/sub/repo $ hg cat -r 0 --output tmp/%p_p sub/repo/foo $ cat tmp/sub/repo/foo_p test $ mv sub/repo sub_ $ hg cat sub/repo/baz skipping missing subrepository: sub/repo [1] $ rm -rf sub/repo $ mv sub_ sub/repo $ cd .. Create repo without default path, pull top repo, and see what happens on update $ hg init issue1852b $ hg -R issue1852b pull issue1852a pulling from issue1852a requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 3 changes to 2 files (run 'hg update' to get a working copy) $ hg -R issue1852b update abort: default path for subrepository not found (in subrepo sub/repo) (glob) [255] Ensure a full traceback, not just the SubrepoAbort part $ hg -R issue1852b update --traceback 2>&1 | grep 'raise error\.Abort' raise error.Abort(_("default path for subrepository not found")) Pull -u now doesn't help $ hg -R issue1852b pull -u issue1852a pulling from issue1852a searching for changes no changes found Try the same, but with pull -u $ hg init issue1852c $ hg -R issue1852c pull -r0 -u issue1852a pulling from issue1852a adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files cloning subrepo sub/repo from issue1852a/sub/repo (glob) 2 files updated, 0 files merged, 0 files removed, 0 files unresolved Try to push from the other side $ hg -R issue1852a push `pwd`/issue1852c pushing to $TESTTMP/issue1852c (glob) pushing subrepo sub/repo to $TESTTMP/issue1852c/sub/repo (glob) searching for changes no changes found searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Incoming and outgoing should not use the default path: $ hg clone -q issue1852a issue1852d $ hg -R issue1852d outgoing --subrepos issue1852c comparing with issue1852c searching for changes no changes found comparing with issue1852c/sub/repo searching for changes no changes found [1] $ hg -R issue1852d incoming --subrepos issue1852c comparing with issue1852c searching for changes no changes found comparing with issue1852c/sub/repo searching for changes no changes found [1] Check that merge of a new subrepo doesn't write the uncommitted state to .hgsubstate (issue4622) $ hg init issue1852a/addedsub $ echo zzz > issue1852a/addedsub/zz.txt $ hg -R issue1852a/addedsub ci -Aqm "initial ZZ" $ hg clone issue1852a/addedsub issue1852d/addedsub updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo def > issue1852a/sub/repo/foo $ hg -R issue1852a ci -SAm 'tweaked subrepo' adding tmp/sub/repo/foo_p committing subrepository sub/repo (glob) $ echo 'addedsub = addedsub' >> issue1852d/.hgsub $ echo xyz > issue1852d/sub/repo/foo $ hg -R issue1852d pull -u pulling from $TESTTMP/issue1852a (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files subrepository sub/repo diverged (local revision: f42d5c7504a8, remote revision: 46cd4aac504c) (M)erge, keep (l)ocal or keep (r)emote? m pulling subrepo sub/repo from $TESTTMP/issue1852a/sub/repo (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files subrepository sources for sub/repo differ (glob) use (l)ocal source (f42d5c7504a8) or (r)emote source (46cd4aac504c)? l 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat issue1852d/.hgsubstate f42d5c7504a811dda50f5cf3e5e16c3330b87172 sub/repo Check status of files when none of them belong to the first subrepository: $ hg init subrepo-status $ cd subrepo-status $ hg init subrepo-1 $ hg init subrepo-2 $ cd subrepo-2 $ touch file $ hg add file $ cd .. $ echo subrepo-1 = subrepo-1 > .hgsub $ echo subrepo-2 = subrepo-2 >> .hgsub $ hg add .hgsub $ hg ci -m 'Added subrepos' committing subrepository subrepo-2 $ hg st subrepo-2/file Check that share works with subrepo $ hg --config extensions.share= share . ../shared updating working directory cloning subrepo subrepo-2 from $TESTTMP/subrepo-status/subrepo-2 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ test -f ../shared/subrepo-1/.hg/sharedpath [1] $ hg -R ../shared in abort: repository default not found! [255] $ hg -R ../shared/subrepo-2 showconfig paths paths.default=$TESTTMP/subrepo-status/subrepo-2 $ hg -R ../shared/subrepo-1 sum --remote parent: -1:000000000000 tip (empty repository) branch: default commit: (clean) update: (current) remote: (synced) Check hg update --clean $ cd $TESTTMP/t $ rm -r t/t.orig $ hg status -S --all C .hgsub C .hgsubstate C a C s/.hgsub C s/.hgsubstate C s/a C s/ss/a C t/t $ echo c1 > s/a $ cd s $ echo c1 > b $ echo c1 > c $ hg add b $ cd .. $ hg status -S M s/a A s/b ? s/c $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg status -S ? s/b ? s/c Sticky subrepositories, no changes $ cd $TESTTMP/t $ hg id 925c17564ef8 tip $ hg -R s id 12a213df6fa9 tip $ hg -R t id 52c0adc0515a tip $ hg update 11 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id 365661e5936a $ hg -R s id fc627a69481f $ hg -R t id e95bcfa18a35 Sticky subrepositories, file changes $ touch s/f1 $ touch t/f1 $ hg add -S s/f1 $ hg add -S t/f1 $ hg id 365661e5936a+ $ hg -R s id fc627a69481f+ $ hg -R t id e95bcfa18a35+ $ hg update tip subrepository s diverged (local revision: fc627a69481f, remote revision: 12a213df6fa9) (M)erge, keep (l)ocal or keep (r)emote? m subrepository sources for s differ use (l)ocal source (fc627a69481f) or (r)emote source (12a213df6fa9)? l subrepository t diverged (local revision: e95bcfa18a35, remote revision: 52c0adc0515a) (M)erge, keep (l)ocal or keep (r)emote? m subrepository sources for t differ use (l)ocal source (e95bcfa18a35) or (r)emote source (52c0adc0515a)? l 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id 925c17564ef8+ tip $ hg -R s id fc627a69481f+ $ hg -R t id e95bcfa18a35+ $ hg update --clean tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Sticky subrepository, revision updates $ hg id 925c17564ef8 tip $ hg -R s id 12a213df6fa9 tip $ hg -R t id 52c0adc0515a tip $ cd s $ hg update -r -2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../t $ hg update -r 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. $ hg update 10 subrepository s diverged (local revision: 12a213df6fa9, remote revision: fc627a69481f) (M)erge, keep (l)ocal or keep (r)emote? m subrepository t diverged (local revision: 52c0adc0515a, remote revision: 20a0db6fbf6c) (M)erge, keep (l)ocal or keep (r)emote? m subrepository sources for t differ (in checked out version) use (l)ocal source (7af322bc1198) or (r)emote source (20a0db6fbf6c)? l 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id e45c8b14af55+ $ hg -R s id 02dcf1d70411 $ hg -R t id 7af322bc1198 Sticky subrepository, file changes and revision updates $ touch s/f1 $ touch t/f1 $ hg add -S s/f1 $ hg add -S t/f1 $ hg id e45c8b14af55+ $ hg -R s id 02dcf1d70411+ $ hg -R t id 7af322bc1198+ $ hg update tip subrepository s diverged (local revision: 12a213df6fa9, remote revision: 12a213df6fa9) (M)erge, keep (l)ocal or keep (r)emote? m subrepository sources for s differ use (l)ocal source (02dcf1d70411) or (r)emote source (12a213df6fa9)? l subrepository t diverged (local revision: 52c0adc0515a, remote revision: 52c0adc0515a) (M)erge, keep (l)ocal or keep (r)emote? m subrepository sources for t differ use (l)ocal source (7af322bc1198) or (r)emote source (52c0adc0515a)? l 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id 925c17564ef8+ tip $ hg -R s id 02dcf1d70411+ $ hg -R t id 7af322bc1198+ Sticky repository, update --clean $ hg update --clean tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id 925c17564ef8 tip $ hg -R s id 12a213df6fa9 tip $ hg -R t id 52c0adc0515a tip Test subrepo already at intended revision: $ cd s $ hg update fc627a69481f 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. $ hg update 11 subrepository s diverged (local revision: 12a213df6fa9, remote revision: fc627a69481f) (M)erge, keep (l)ocal or keep (r)emote? m 0 files updated, 0 files merged, 0 files removed, 0 files unresolved 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -n 11+ $ hg -R s id fc627a69481f $ hg -R t id e95bcfa18a35 Test that removing .hgsubstate doesn't break anything: $ hg rm -f .hgsubstate $ hg ci -mrm nothing changed [1] $ hg log -vr tip changeset: 13:925c17564ef8 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: .hgsubstate description: 13 Test that removing .hgsub removes .hgsubstate: $ hg rm .hgsub $ hg ci -mrm2 created new head $ hg log -vr tip changeset: 14:2400bccd50af tag: tip parent: 11:365661e5936a user: test date: Thu Jan 01 00:00:00 1970 +0000 files: .hgsub .hgsubstate description: rm2 Test issue3153: diff -S with deleted subrepos $ hg diff --nodates -S -c . diff -r 365661e5936a -r 2400bccd50af .hgsub --- a/.hgsub +++ /dev/null @@ -1,2 +0,0 @@ -s = s -t = t diff -r 365661e5936a -r 2400bccd50af .hgsubstate --- a/.hgsubstate +++ /dev/null @@ -1,2 +0,0 @@ -fc627a69481fcbe5f1135069e8a3881c023e4cf5 s -e95bcfa18a358dc4936da981ebf4147b4cad1362 t Test behavior of add for explicit path in subrepo: $ cd .. $ hg init explicit $ cd explicit $ echo s = s > .hgsub $ hg add .hgsub $ hg init s $ hg ci -m0 Adding with an explicit path in a subrepo adds the file $ echo c1 > f1 $ echo c2 > s/f2 $ hg st -S ? f1 ? s/f2 $ hg add s/f2 $ hg st -S A s/f2 ? f1 $ hg ci -R s -m0 $ hg ci -Am1 adding f1 Adding with an explicit path in a subrepo with -S has the same behavior $ echo c3 > f3 $ echo c4 > s/f4 $ hg st -S ? f3 ? s/f4 $ hg add -S s/f4 $ hg st -S A s/f4 ? f3 $ hg ci -R s -m1 $ hg ci -Ama2 adding f3 Adding without a path or pattern silently ignores subrepos $ echo c5 > f5 $ echo c6 > s/f6 $ echo c7 > s/f7 $ hg st -S ? f5 ? s/f6 ? s/f7 $ hg add adding f5 $ hg st -S A f5 ? s/f6 ? s/f7 $ hg ci -R s -Am2 adding f6 adding f7 $ hg ci -m3 Adding without a path or pattern with -S also adds files in subrepos $ echo c8 > f8 $ echo c9 > s/f9 $ echo c10 > s/f10 $ hg st -S ? f8 ? s/f10 ? s/f9 $ hg add -S adding f8 adding s/f10 (glob) adding s/f9 (glob) $ hg st -S A f8 A s/f10 A s/f9 $ hg ci -R s -m3 $ hg ci -m4 Adding with a pattern silently ignores subrepos $ echo c11 > fm11 $ echo c12 > fn12 $ echo c13 > s/fm13 $ echo c14 > s/fn14 $ hg st -S ? fm11 ? fn12 ? s/fm13 ? s/fn14 $ hg add 'glob:**fm*' adding fm11 $ hg st -S A fm11 ? fn12 ? s/fm13 ? s/fn14 $ hg ci -R s -Am4 adding fm13 adding fn14 $ hg ci -Am5 adding fn12 Adding with a pattern with -S also adds matches in subrepos $ echo c15 > fm15 $ echo c16 > fn16 $ echo c17 > s/fm17 $ echo c18 > s/fn18 $ hg st -S ? fm15 ? fn16 ? s/fm17 ? s/fn18 $ hg add -S 'glob:**fm*' adding fm15 adding s/fm17 (glob) $ hg st -S A fm15 A s/fm17 ? fn16 ? s/fn18 $ hg ci -R s -Am5 adding fn18 $ hg ci -Am6 adding fn16 Test behavior of forget for explicit path in subrepo: Forgetting an explicit path in a subrepo untracks the file $ echo c19 > s/f19 $ hg add s/f19 $ hg st -S A s/f19 $ hg forget s/f19 $ hg st -S ? s/f19 $ rm s/f19 $ cd .. Courtesy phases synchronisation to publishing server does not block the push (issue3781) $ cp -r main issue3781 $ cp -r main issue3781-dest $ cd issue3781-dest/s $ hg phase tip # show we have draft changeset 5: draft $ chmod a-w .hg/store/phaseroots # prevent phase push $ cd ../../issue3781 $ cat >> .hg/hgrc << EOF > [paths] > default=../issue3781-dest/ > EOF $ hg push --config experimental.bundle2-exp=False pushing to $TESTTMP/issue3781-dest (glob) pushing subrepo s to $TESTTMP/issue3781-dest/s searching for changes no changes found searching for changes no changes found [1] # clean the push cache $ rm s/.hg/cache/storehash/* $ hg push --config experimental.bundle2-exp=True pushing to $TESTTMP/issue3781-dest (glob) pushing subrepo s to $TESTTMP/issue3781-dest/s searching for changes no changes found searching for changes no changes found [1] $ cd .. Test phase choice for newly created commit with "phases.subrepochecks" configuration $ cd t $ hg update -q -r 12 $ cat >> s/ss/.hg/hgrc < [phases] > new-commit = secret > EOF $ cat >> s/.hg/hgrc < [phases] > new-commit = draft > EOF $ echo phasecheck1 >> s/ss/a $ hg -R s commit -S --config phases.checksubrepos=abort -m phasecheck1 committing subrepository ss transaction abort! rollback completed abort: can't commit in draft phase conflicting secret from subrepository ss [255] $ echo phasecheck2 >> s/ss/a $ hg -R s commit -S --config phases.checksubrepos=ignore -m phasecheck2 committing subrepository ss $ hg -R s/ss phase tip 3: secret $ hg -R s phase tip 6: draft $ echo phasecheck3 >> s/ss/a $ hg -R s commit -S -m phasecheck3 committing subrepository ss warning: changes are committed in secret phase from subrepository ss $ hg -R s/ss phase tip 4: secret $ hg -R s phase tip 7: secret $ cat >> t/.hg/hgrc < [phases] > new-commit = draft > EOF $ cat >> .hg/hgrc < [phases] > new-commit = public > EOF $ echo phasecheck4 >> s/ss/a $ echo phasecheck4 >> t/t $ hg commit -S -m phasecheck4 committing subrepository s committing subrepository s/ss (glob) warning: changes are committed in secret phase from subrepository ss committing subrepository t warning: changes are committed in secret phase from subrepository s created new head $ hg -R s/ss phase tip 5: secret $ hg -R s phase tip 8: secret $ hg -R t phase tip 6: draft $ hg phase tip 15: secret $ cd .. Test that commit --secret works on both repo and subrepo (issue4182) $ cd main $ echo secret >> b $ echo secret >> s/b $ hg commit --secret --subrepo -m "secret" committing subrepository s $ hg phase -r . 6: secret $ cd s $ hg phase -r . 6: secret $ cd ../../ Test "subrepos" template keyword $ cd t $ hg update -q 15 $ cat > .hgsub < s = s > EOF $ hg commit -m "16" warning: changes are committed in secret phase from subrepository s (addition of ".hgsub" itself) $ hg diff --nodates -c 1 .hgsubstate diff -r f7b1eb17ad24 -r 7cf8cfea66e4 .hgsubstate --- /dev/null +++ b/.hgsubstate @@ -0,0 +1,1 @@ +e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s $ hg log -r 1 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}" f7b1eb17ad24 000000000000 s (modification of existing entry) $ hg diff --nodates -c 2 .hgsubstate diff -r 7cf8cfea66e4 -r df30734270ae .hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,1 +1,1 @@ -e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s +dc73e2e6d2675eb2e41e33c205f4bdab4ea5111d s $ hg log -r 2 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}" 7cf8cfea66e4 000000000000 s (addition of entry) $ hg diff --nodates -c 5 .hgsubstate diff -r 7cf8cfea66e4 -r 1f14a2e2d3ec .hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,1 +1,2 @@ e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s +60ca1237c19474e7a3978b0dc1ca4e6f36d51382 t $ hg log -r 5 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}" 7cf8cfea66e4 000000000000 t (removal of existing entry) $ hg diff --nodates -c 16 .hgsubstate diff -r 8bec38d2bd0b -r f2f70bc3d3c9 .hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,2 +1,1 @@ 0731af8ca9423976d3743119d0865097c07bdc1b s -e202dc79b04c88a636ea8913d9182a1346d9b3dc t $ hg log -r 16 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}" 8bec38d2bd0b 000000000000 t (merging) $ hg diff --nodates -c 9 .hgsubstate diff -r f6affe3fbfaa -r f0d2028bf86d .hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,1 +1,2 @@ fc627a69481fcbe5f1135069e8a3881c023e4cf5 s +60ca1237c19474e7a3978b0dc1ca4e6f36d51382 t $ hg log -r 9 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}" f6affe3fbfaa 1f14a2e2d3ec t (removal of ".hgsub" itself) $ hg diff --nodates -c 8 .hgsubstate diff -r f94576341bcf -r 96615c1dad2d .hgsubstate --- a/.hgsubstate +++ /dev/null @@ -1,2 +0,0 @@ -e4ece1bf43360ddc8f6a96432201a37b7cd27ae4 s -7af322bc1198a32402fe903e0b7ebcfc5c9bf8f4 t $ hg log -r 8 --template "{p1node|short} {p2node|short}\n{subrepos % '{subrepo}\n'}" f94576341bcf 000000000000 Test that '[paths]' is configured correctly at subrepo creation $ cd $TESTTMP/tc $ cat > .hgsub < # to clear bogus subrepo path 'bogus=[boguspath' > s = s > t = t > EOF $ hg update -q --clean null $ rm -rf s t $ cat >> .hg/hgrc < [paths] > default-push = /foo/bar > EOF $ hg update -q $ cat s/.hg/hgrc [paths] default = $TESTTMP/t/s default-push = /foo/bar/s $ cat s/ss/.hg/hgrc [paths] default = $TESTTMP/t/s/ss default-push = /foo/bar/s/ss $ cat t/.hg/hgrc [paths] default = $TESTTMP/t/t default-push = /foo/bar/t $ cd $TESTTMP/t $ hg up -qC 0 $ echo 'bar' > bar.txt $ hg ci -Am 'branch before subrepo add' adding bar.txt created new head $ hg merge -r "first(subrepo('s'))" 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status -S -X '.hgsub*' A s/a ? s/b ? s/c ? s/f1 $ hg status -S --rev 'p2()' A bar.txt ? s/b ? s/c ? s/f1 $ hg diff -S -X '.hgsub*' --nodates diff -r 000000000000 s/a --- /dev/null +++ b/s/a @@ -0,0 +1,1 @@ +a $ hg diff -S --rev 'p2()' --nodates diff -r 7cf8cfea66e4 bar.txt --- /dev/null +++ b/bar.txt @@ -0,0 +1,1 @@ +bar $ cd .. mercurial-3.7.3/tests/sitecustomize.py0000644000175000017500000000077612676531525017564 0ustar mpmmpm00000000000000import os if os.environ.get('COVERAGE_PROCESS_START'): try: import coverage import random # uuid is better, but not available in Python 2.4. covpath = os.path.join(os.environ['COVERAGE_DIR'], 'cov.%s' % random.randrange(0, 1000000000000)) cov = coverage.coverage(data_file=covpath, auto_data=True) cov._warn_no_data = False cov._warn_unimported_source = False cov.start() except ImportError: pass mercurial-3.7.3/tests/test-dirstate-nonnormalset.t0000644000175000017500000000076212676531525021774 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH << EOF > [ui] > logtemplate="{rev}:{node|short} ({phase}) [{tags} {bookmarks}] {desc|firstline}\n" > [extensions] > dirstateparanoidcheck = $TESTDIR/../contrib/dirstatenonnormalcheck.py > [experimental] > nonnormalparanoidcheck = True > [devel] > all-warnings=True > EOF $ mkcommit() { > echo "$1" > "$1" > hg add "$1" > hg ci -m "add $1" > } $ hg init testrepo $ cd testrepo $ mkcommit a $ mkcommit b $ mkcommit c $ hg status mercurial-3.7.3/tests/test-manifest.t0000644000175000017500000000213212676531525017237 0ustar mpmmpm00000000000000Source bundle was generated with the following script: # hg init # echo a > a # ln -s a l # hg ci -Ama -d'0 0' # mkdir b # echo a > b/a # chmod +x b/a # hg ci -Amb -d'1 0' $ hg init $ hg -q pull "$TESTDIR/bundles/test-manifest.hg" The next call is expected to return nothing: $ hg manifest $ hg co 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg manifest a b/a l $ hg files -vr . 2 a 2 x b/a (glob) 1 l l $ hg files -r . -X b a l $ hg manifest -v 644 a 755 * b/a 644 @ l $ hg manifest --debug b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 644 a b789fdd96dc2f3bd229c1dd8eedf0fc60e2b68e3 755 * b/a 047b75c6d7a3ef6a2243bd0e99f94f6ea6683597 644 @ l $ hg manifest -r 0 a l $ hg manifest -r 1 a b/a l $ hg manifest -r tip a b/a l $ hg manifest tip a b/a l $ hg manifest --all a b/a l The next two calls are expected to abort: $ hg manifest -r 2 abort: unknown revision '2'! [255] $ hg manifest -r tip tip abort: please specify just one revision [255] mercurial-3.7.3/tests/test-parseindex.t0000644000175000017500000001275712676531525017611 0ustar mpmmpm00000000000000revlog.parseindex must be able to parse the index file even if an index entry is split between two 64k blocks. The ideal test would be to create an index file with inline data where 64k < size < 64k + 64 (64k is the size of the read buffer, 64 is the size of an index entry) and with an index entry starting right before the 64k block boundary, and try to read it. We approximate that by reducing the read buffer to 1 byte. $ hg init a $ cd a $ echo abc > foo $ hg add foo $ hg commit -m 'add foo' $ echo >> foo $ hg commit -m 'change foo' $ hg log -r 0: changeset: 0:7c31755bf9b5 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo changeset: 1:26333235a41c tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change foo $ cat >> test.py << EOF > from mercurial import changelog, scmutil > from mercurial.node import * > > class singlebyteread(object): > def __init__(self, real): > self.real = real > > def read(self, size=-1): > if size == 65536: > size = 1 > return self.real.read(size) > > def __getattr__(self, key): > return getattr(self.real, key) > > def opener(*args): > o = scmutil.opener(*args) > def wrapper(*a): > f = o(*a) > return singlebyteread(f) > return wrapper > > cl = changelog.changelog(opener('.hg/store')) > print len(cl), 'revisions:' > for r in cl: > print short(cl.node(r)) > EOF $ python test.py 2 revisions: 7c31755bf9b5 26333235a41c $ cd .. #if no-pure Test SEGV caused by bad revision passed to reachableroots() (issue4775): $ cd a $ python < from mercurial import changelog, scmutil > cl = changelog.changelog(scmutil.vfs('.hg/store')) > print 'good heads:' > for head in [0, len(cl) - 1, -1]: > print'%s: %r' % (head, cl.reachableroots(0, [head], [0])) > print 'bad heads:' > for head in [len(cl), 10000, -2, -10000, None]: > print '%s:' % head, > try: > cl.reachableroots(0, [head], [0]) > print 'uncaught buffer overflow?' > except (IndexError, TypeError) as inst: > print inst > print 'good roots:' > for root in [0, len(cl) - 1, -1]: > print '%s: %r' % (root, cl.reachableroots(root, [len(cl) - 1], [root])) > print 'out-of-range roots are ignored:' > for root in [len(cl), 10000, -2, -10000]: > print '%s: %r' % (root, cl.reachableroots(root, [len(cl) - 1], [root])) > print 'bad roots:' > for root in [None]: > print '%s:' % root, > try: > cl.reachableroots(root, [len(cl) - 1], [root]) > print 'uncaught error?' > except TypeError as inst: > print inst > EOF good heads: 0: [0] 1: [0] -1: [] bad heads: 2: head out of range 10000: head out of range -2: head out of range -10000: head out of range None: an integer is required good roots: 0: [0] 1: [1] -1: [-1] out-of-range roots are ignored: 2: [] 10000: [] -2: [] -10000: [] bad roots: None: an integer is required $ cd .. Test corrupted p1/p2 fields that could cause SEGV at parsers.c: $ mkdir invalidparent $ cd invalidparent $ hg clone --pull -q --config phases.publish=False ../a limit $ hg clone --pull -q --config phases.publish=False ../a segv $ rm -R limit/.hg/cache segv/.hg/cache $ python < data = open("limit/.hg/store/00changelog.i", "rb").read() > for n, p in [('limit', '\0\0\0\x02'), ('segv', '\0\x01\0\0')]: > # corrupt p1 at rev0 and p2 at rev1 > d = data[:24] + p + data[28:127 + 28] + p + data[127 + 32:] > open(n + "/.hg/store/00changelog.i", "wb").write(d) > EOF $ hg debugindex -f1 limit/.hg/store/00changelog.i rev flag offset length size base link p1 p2 nodeid 0 0000 0 63 62 0 0 2 -1 7c31755bf9b5 1 0000 63 66 65 1 1 0 2 26333235a41c $ hg debugindex -f1 segv/.hg/store/00changelog.i rev flag offset length size base link p1 p2 nodeid 0 0000 0 63 62 0 0 65536 -1 7c31755bf9b5 1 0000 63 66 65 1 1 0 65536 26333235a41c $ cat < test.py > import sys > from mercurial import changelog, scmutil > cl = changelog.changelog(scmutil.vfs(sys.argv[1])) > n0, n1 = cl.node(0), cl.node(1) > ops = [ > ('reachableroots', > lambda: cl.index.reachableroots2(0, [1], [0], False)), > ('compute_phases_map_sets', lambda: cl.computephases([[0], []])), > ('index_headrevs', lambda: cl.headrevs()), > ('find_gca_candidates', lambda: cl.commonancestorsheads(n0, n1)), > ('find_deepest', lambda: cl.ancestor(n0, n1)), > ] > for l, f in ops: > print l + ':', > try: > f() > print 'uncaught buffer overflow?' > except ValueError, inst: > print inst > EOF $ python test.py limit/.hg/store reachableroots: parent out of range compute_phases_map_sets: parent out of range index_headrevs: parent out of range find_gca_candidates: parent out of range find_deepest: parent out of range $ python test.py segv/.hg/store reachableroots: parent out of range compute_phases_map_sets: parent out of range index_headrevs: parent out of range find_gca_candidates: parent out of range find_deepest: parent out of range $ cd .. #endif mercurial-3.7.3/tests/test-ancestor.py0000644000175000017500000002015512676531525017441 0ustar mpmmpm00000000000000from __future__ import absolute_import import binascii import getopt import math import os import random import sys import time from mercurial.node import nullrev from mercurial import ( ancestor, commands, hg, ui, util, ) def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7): '''nodes: total number of nodes in the graph rootprob: probability that a new node (not 0) will be a root mergeprob: probability that, excluding a root a node will be a merge prevprob: probability that p1 will be the previous node return value is a graph represented as an adjacency list. ''' graph = [None] * nodes for i in xrange(nodes): if i == 0 or rng.random() < rootprob: graph[i] = [nullrev] elif i == 1: graph[i] = [0] elif rng.random() < mergeprob: if i == 2 or rng.random() < prevprob: # p1 is prev p1 = i - 1 else: p1 = rng.randrange(i - 1) p2 = rng.choice(range(0, p1) + range(p1 + 1, i)) graph[i] = [p1, p2] elif rng.random() < prevprob: graph[i] = [i - 1] else: graph[i] = [rng.randrange(i - 1)] return graph def buildancestorsets(graph): ancs = [None] * len(graph) for i in xrange(len(graph)): ancs[i] = set([i]) if graph[i] == [nullrev]: continue for p in graph[i]: ancs[i].update(ancs[p]) return ancs class naiveincrementalmissingancestors(object): def __init__(self, ancs, bases): self.ancs = ancs self.bases = set(bases) def addbases(self, newbases): self.bases.update(newbases) def removeancestorsfrom(self, revs): for base in self.bases: if base != nullrev: revs.difference_update(self.ancs[base]) revs.discard(nullrev) def missingancestors(self, revs): res = set() for rev in revs: if rev != nullrev: res.update(self.ancs[rev]) for base in self.bases: if base != nullrev: res.difference_update(self.ancs[base]) return sorted(res) def test_missingancestors(seed, rng): # empirically observed to take around 1 second graphcount = 100 testcount = 10 inccount = 10 nerrs = [0] # the default mu and sigma give us a nice distribution of mostly # single-digit counts (including 0) with some higher ones def lognormrandom(mu, sigma): return int(math.floor(rng.lognormvariate(mu, sigma))) def samplerevs(nodes, mu=1.1, sigma=0.8): count = min(lognormrandom(mu, sigma), len(nodes)) return rng.sample(nodes, count) def err(seed, graph, bases, seq, output, expected): if nerrs[0] == 0: print >> sys.stderr, 'seed:', hex(seed)[:-1] if gerrs[0] == 0: print >> sys.stderr, 'graph:', graph print >> sys.stderr, '* bases:', bases print >> sys.stderr, '* seq: ', seq print >> sys.stderr, '* output: ', output print >> sys.stderr, '* expected:', expected nerrs[0] += 1 gerrs[0] += 1 for g in xrange(graphcount): graph = buildgraph(rng) ancs = buildancestorsets(graph) gerrs = [0] for _ in xrange(testcount): # start from nullrev to include it as a possibility graphnodes = range(nullrev, len(graph)) bases = samplerevs(graphnodes) # fast algorithm inc = ancestor.incrementalmissingancestors(graph.__getitem__, bases) # reference slow algorithm naiveinc = naiveincrementalmissingancestors(ancs, bases) seq = [] revs = [] for _ in xrange(inccount): if rng.random() < 0.2: newbases = samplerevs(graphnodes) seq.append(('addbases', newbases)) inc.addbases(newbases) naiveinc.addbases(newbases) if rng.random() < 0.4: # larger set so that there are more revs to remove from revs = samplerevs(graphnodes, mu=1.5) seq.append(('removeancestorsfrom', revs)) hrevs = set(revs) rrevs = set(revs) inc.removeancestorsfrom(hrevs) naiveinc.removeancestorsfrom(rrevs) if hrevs != rrevs: err(seed, graph, bases, seq, sorted(hrevs), sorted(rrevs)) else: revs = samplerevs(graphnodes) seq.append(('missingancestors', revs)) h = inc.missingancestors(revs) r = naiveinc.missingancestors(revs) if h != r: err(seed, graph, bases, seq, h, r) # graph is a dict of child->parent adjacency lists for this graph: # o 13 # | # | o 12 # | | # | | o 11 # | | |\ # | | | | o 10 # | | | | | # | o---+ | 9 # | | | | | # o | | | | 8 # / / / / # | | o | 7 # | | | | # o---+ | 6 # / / / # | | o 5 # | |/ # | o 4 # | | # o | 3 # | | # | o 2 # |/ # o 1 # | # o 0 graph = {0: [-1], 1: [0], 2: [1], 3: [1], 4: [2], 5: [4], 6: [4], 7: [4], 8: [-1], 9: [6, 7], 10: [5], 11: [3, 7], 12: [9], 13: [8]} def genlazyancestors(revs, stoprev=0, inclusive=False): print ("%% lazy ancestor set for %s, stoprev = %s, inclusive = %s" % (revs, stoprev, inclusive)) return ancestor.lazyancestors(graph.get, revs, stoprev=stoprev, inclusive=inclusive) def printlazyancestors(s, l): print 'membership: %r' % [n for n in l if n in s] print 'iteration: %r' % list(s) def test_lazyancestors(): # Empty revs s = genlazyancestors([]) printlazyancestors(s, [3, 0, -1]) # Standard example s = genlazyancestors([11, 13]) printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) # Standard with ancestry in the initial set (1 is ancestor of 3) s = genlazyancestors([1, 3]) printlazyancestors(s, [1, -1, 0]) # Including revs s = genlazyancestors([11, 13], inclusive=True) printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) # Test with stoprev s = genlazyancestors([11, 13], stoprev=6) printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) s = genlazyancestors([11, 13], stoprev=6, inclusive=True) printlazyancestors(s, [11, 13, 7, 9, 8, 3, 6, 4, 1, -1, 0]) # The C gca algorithm requires a real repo. These are textual descriptions of # DAGs that have been known to be problematic. dagtests = [ '+2*2*2/*3/2', '+3*3/*2*2/*4*4/*4/2*4/2*2', ] def test_gca(): u = ui.ui() for i, dag in enumerate(dagtests): repo = hg.repository(u, 'gca%d' % i, create=1) cl = repo.changelog if not util.safehasattr(cl.index, 'ancestors'): # C version not available return commands.debugbuilddag(u, repo, dag) # Compare the results of the Python and C versions. This does not # include choosing a winner when more than one gca exists -- we make # sure both return exactly the same set of gcas. for a in cl: for b in cl: cgcas = sorted(cl.index.ancestors(a, b)) pygcas = sorted(ancestor.ancestors(cl.parentrevs, a, b)) if cgcas != pygcas: print "test_gca: for dag %s, gcas for %d, %d:" % (dag, a, b) print " C returned: %s" % cgcas print " Python returned: %s" % pygcas def main(): seed = None opts, args = getopt.getopt(sys.argv[1:], 's:', ['seed=']) for o, a in opts: if o in ('-s', '--seed'): seed = long(a, base=0) # accepts base 10 or 16 strings if seed is None: try: seed = long(binascii.hexlify(os.urandom(16)), 16) except AttributeError: seed = long(time.time() * 1000) rng = random.Random(seed) test_missingancestors(seed, rng) test_lazyancestors() test_gca() if __name__ == '__main__': main() mercurial-3.7.3/tests/test-share.t0000644000175000017500000001650312676531525016542 0ustar mpmmpm00000000000000#require killdaemons $ echo "[extensions]" >> $HGRCPATH $ echo "share = " >> $HGRCPATH prepare repo1 $ hg init repo1 $ cd repo1 $ echo a > a $ hg commit -A -m'init' adding a share it $ cd .. $ hg share repo1 repo2 updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved share shouldn't have a store dir $ cd repo2 $ test -d .hg/store [1] Some sed versions appends newline, some don't, and some just fails $ cat .hg/sharedpath; echo $TESTTMP/repo1/.hg (glob) trailing newline on .hg/sharedpath is ok $ hg tip -q 0:d3873e73d99e $ echo '' >> .hg/sharedpath $ cat .hg/sharedpath $TESTTMP/repo1/.hg (glob) $ hg tip -q 0:d3873e73d99e commit in shared clone $ echo a >> a $ hg commit -m'change in shared clone' check original $ cd ../repo1 $ hg log changeset: 1:8af4dc49db9e tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change in shared clone changeset: 0:d3873e73d99e user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: init $ hg update 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat a # should be two lines of "a" a a commit in original $ echo b > b $ hg commit -A -m'another file' adding b check in shared clone $ cd ../repo2 $ hg log changeset: 2:c2e0ac586386 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: another file changeset: 1:8af4dc49db9e user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change in shared clone changeset: 0:d3873e73d99e user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: init $ hg update 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat b # should exist with one "b" b hg serve shared clone $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT 'raw-file/' 200 Script output follows -rw-r--r-- 4 a -rw-r--r-- 2 b test unshare command $ hg unshare $ test -d .hg/store $ test -f .hg/sharedpath [1] $ hg unshare abort: this is not a shared repo [255] check that a change does not propagate $ echo b >> b $ hg commit -m'change in unshared' $ cd ../repo1 $ hg id -r tip c2e0ac586386 tip $ cd .. test sharing bookmarks $ hg share -B repo1 repo3 updating working directory 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo1 $ hg bookmark bm1 $ hg bookmarks * bm1 2:c2e0ac586386 $ cd ../repo2 $ hg book bm2 $ hg bookmarks * bm2 3:0e6e70d1d5f1 $ cd ../repo3 $ hg bookmarks bm1 2:c2e0ac586386 $ hg book bm3 $ hg bookmarks bm1 2:c2e0ac586386 * bm3 2:c2e0ac586386 $ cd ../repo1 $ hg bookmarks * bm1 2:c2e0ac586386 bm3 2:c2e0ac586386 test that commits work $ echo 'shared bookmarks' > a $ hg commit -m 'testing shared bookmarks' $ hg bookmarks * bm1 3:b87954705719 bm3 2:c2e0ac586386 $ cd ../repo3 $ hg bookmarks bm1 3:b87954705719 * bm3 2:c2e0ac586386 $ echo 'more shared bookmarks' > a $ hg commit -m 'testing shared bookmarks' created new head $ hg bookmarks bm1 3:b87954705719 * bm3 4:62f4ded848e4 $ cd ../repo1 $ hg bookmarks * bm1 3:b87954705719 bm3 4:62f4ded848e4 $ cd .. test pushing bookmarks works $ hg clone repo3 repo4 updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo4 $ hg boo bm4 $ echo foo > b $ hg commit -m 'foo in b' $ hg boo bm1 3:b87954705719 bm3 4:62f4ded848e4 * bm4 5:92793bfc8cad $ hg push -B bm4 pushing to $TESTTMP/repo3 (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files exporting bookmark bm4 $ cd ../repo1 $ hg bookmarks * bm1 3:b87954705719 bm3 4:62f4ded848e4 bm4 5:92793bfc8cad $ cd ../repo3 $ hg bookmarks bm1 3:b87954705719 * bm3 4:62f4ded848e4 bm4 5:92793bfc8cad $ cd .. test behavior when sharing a shared repo $ hg share -B repo3 repo5 updating working directory 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo5 $ hg book bm1 3:b87954705719 bm3 4:62f4ded848e4 bm4 5:92793bfc8cad $ cd .. test what happens when an active bookmark is deleted $ cd repo1 $ hg boo -d bm3 $ hg boo * bm1 3:b87954705719 bm4 5:92793bfc8cad $ cd ../repo3 $ hg boo bm1 3:b87954705719 bm4 5:92793bfc8cad $ cd .. verify that bookmarks are not written on failed transaction $ cat > failpullbookmarks.py << EOF > """A small extension that makes bookmark pulls fail, for testing""" > from mercurial import extensions, exchange, error > def _pullbookmarks(orig, pullop): > orig(pullop) > raise error.HookAbort('forced failure by extension') > def extsetup(ui): > extensions.wrapfunction(exchange, '_pullbookmarks', _pullbookmarks) > EOF $ cd repo4 $ hg boo bm1 3:b87954705719 bm3 4:62f4ded848e4 * bm4 5:92793bfc8cad $ cd ../repo3 $ hg boo bm1 3:b87954705719 bm4 5:92793bfc8cad $ hg --config "extensions.failpullbookmarks=$TESTTMP/failpullbookmarks.py" pull $TESTTMP/repo4 pulling from $TESTTMP/repo4 (glob) searching for changes no changes found adding remote bookmark bm3 abort: forced failure by extension [255] $ hg boo bm1 3:b87954705719 bm4 5:92793bfc8cad $ hg pull $TESTTMP/repo4 pulling from $TESTTMP/repo4 (glob) searching for changes no changes found adding remote bookmark bm3 $ hg boo bm1 3:b87954705719 * bm3 4:62f4ded848e4 bm4 5:92793bfc8cad $ cd .. verify bookmark behavior after unshare $ cd repo3 $ hg unshare $ hg boo bm1 3:b87954705719 * bm3 4:62f4ded848e4 bm4 5:92793bfc8cad $ hg boo -d bm4 $ hg boo bm5 $ hg boo bm1 3:b87954705719 bm3 4:62f4ded848e4 * bm5 4:62f4ded848e4 $ cd ../repo1 $ hg boo * bm1 3:b87954705719 bm3 4:62f4ded848e4 bm4 5:92793bfc8cad $ cd .. Explicitly kill daemons to let the test exit on Windows $ killdaemons.py mercurial-3.7.3/tests/test-histedit-commute.t0000644000175000017500000003040412676531525020720 0ustar mpmmpm00000000000000 $ . "$TESTDIR/histedit-helpers.sh" $ cat >> $HGRCPATH < [extensions] > histedit= > EOF $ initrepo () > { > hg init r > cd r > for x in a b c d e f ; do > echo $x > $x > hg add $x > hg ci -m $x > done > } $ initrepo log before edit $ hg log --graph @ changeset: 5:652413bf663e | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 4:e860deea161a | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 3:055a42cdd887 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 2:177f92b77385 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 1:d2ae7f538514 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a show the edit commands offered $ HGEDITOR=cat hg histedit 177f92b77385 pick 177f92b77385 2 c pick 055a42cdd887 3 d pick e860deea161a 4 e pick 652413bf663e 5 f # Edit history between 177f92b77385 and 652413bf663e # # Commits are listed from least to most recent # # Commands: # # e, edit = use commit, but stop for amending # m, mess = edit commit message without changing commit content # p, pick = use commit # d, drop = remove commit from history # f, fold = use commit, but combine it with the one above # r, roll = like fold, but discard this commit's description # edit the history (use a hacky editor to check histedit-last-edit.txt backup) $ EDITED="$TESTTMP/editedhistory" $ cat > $EDITED < edit 177f92b77385 c > pick e860deea161a e > pick 652413bf663e f > pick 055a42cdd887 d > EOF $ HGEDITOR="cat \"$EDITED\" > " hg histedit 177f92b77385 2>&1 | fixbundle 0 files updated, 0 files merged, 4 files removed, 0 files unresolved Editing (177f92b77385), you may commit or record as needed now. (hg histedit --continue to resume) rules should end up in .hg/histedit-last-edit.txt: $ cat .hg/histedit-last-edit.txt edit 177f92b77385 c pick e860deea161a e pick 652413bf663e f pick 055a42cdd887 d $ hg histedit --abort 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat > $EDITED < pick 177f92b77385 c > pick e860deea161a e > pick 652413bf663e f > pick 055a42cdd887 d > EOF $ HGEDITOR="cat \"$EDITED\" > " hg histedit 177f92b77385 2>&1 | fixbundle 0 files updated, 0 files merged, 3 files removed, 0 files unresolved log after edit $ hg log --graph @ changeset: 5:07114f51870f | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 4:8ade9693061e | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 3:d8249471110a | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 2:177f92b77385 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 1:d2ae7f538514 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a put things back $ hg histedit 177f92b77385 --commands - 2>&1 << EOF | fixbundle > pick 177f92b77385 c > pick 07114f51870f d > pick d8249471110a e > pick 8ade9693061e f > EOF 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg log --graph @ changeset: 5:7eca9b5b1148 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 4:915da888f2de | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 3:10517e47bbbb | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 2:177f92b77385 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 1:d2ae7f538514 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a slightly different this time $ hg histedit 177f92b77385 --commands - << EOF 2>&1 | fixbundle > pick 10517e47bbbb d > pick 7eca9b5b1148 f > pick 915da888f2de e > pick 177f92b77385 c > EOF 0 files updated, 0 files merged, 4 files removed, 0 files unresolved $ hg log --graph @ changeset: 5:38b92f448761 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 4:de71b079d9ce | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 3:be9ae3a309c6 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 2:799205341b6b | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 1:d2ae7f538514 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a keep prevents stripping dead revs $ hg histedit 799205341b6b --keep --commands - 2>&1 << EOF | fixbundle > pick 799205341b6b d > pick be9ae3a309c6 f > pick 38b92f448761 c > pick de71b079d9ce e > EOF 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg log --graph @ changeset: 7:803ef1c6fcfd | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 6:ece0b8d93dda | parent: 3:be9ae3a309c6 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | | o changeset: 5:38b92f448761 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: c | | | o changeset: 4:de71b079d9ce |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 3:be9ae3a309c6 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 2:799205341b6b | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 1:d2ae7f538514 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a try with --rev $ hg histedit --commands - --rev -2 2>&1 < pick de71b079d9ce e > pick 38b92f448761 c > EOF hg: parse error: pick "de71b079d9ce" changeset was not a candidate (only use listed changesets) $ hg log --graph @ changeset: 7:803ef1c6fcfd | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 6:ece0b8d93dda | parent: 3:be9ae3a309c6 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | | o changeset: 5:38b92f448761 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: c | | | o changeset: 4:de71b079d9ce |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 3:be9ae3a309c6 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 2:799205341b6b | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 1:d2ae7f538514 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a Verify that revsetalias entries work with histedit: $ cat >> $HGRCPATH < [revsetalias] > grandparent(ARG) = p1(p1(ARG)) > EOF $ echo extra commit >> c $ hg ci -m 'extra commit to c' $ HGEDITOR=cat hg histedit 'grandparent(.)' pick ece0b8d93dda 6 c pick 803ef1c6fcfd 7 e pick 9c863c565126 8 extra commit to c # Edit history between ece0b8d93dda and 9c863c565126 # # Commits are listed from least to most recent # # Commands: # # e, edit = use commit, but stop for amending # m, mess = edit commit message without changing commit content # p, pick = use commit # d, drop = remove commit from history # f, fold = use commit, but combine it with the one above # r, roll = like fold, but discard this commit's description # should also work if a commit message is missing $ BUNDLE="$TESTDIR/missing-comment.hg" $ hg init missing $ cd missing $ hg unbundle $BUNDLE adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files (run 'hg update' to get a working copy) $ hg co tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log --graph @ changeset: 2:bd22688093b3 | tag: tip | user: Robert Altman | date: Mon Nov 28 16:40:04 2011 +0000 | summary: Update file. | o changeset: 1:3b3e956f9171 | user: Robert Altman | date: Mon Nov 28 16:37:57 2011 +0000 | o changeset: 0:141947992243 user: Robert Altman date: Mon Nov 28 16:35:28 2011 +0000 summary: Checked in text file $ hg histedit 0 $ cd .. $ cd .. Test to make sure folding renames doesn't cause bogus conflicts (issue4251): $ hg init issue4251 $ cd issue4251 $ mkdir initial-dir $ echo foo > initial-dir/initial-file $ hg add initial-dir/initial-file $ hg commit -m "initial commit" Move the file to a new directory, and in the same commit, change its content: $ mkdir another-dir $ hg mv initial-dir/initial-file another-dir/ $ echo changed > another-dir/initial-file $ hg commit -m "moved and changed" Rename the file: $ hg mv another-dir/initial-file another-dir/renamed-file $ hg commit -m "renamed" Now, let's try to fold the second commit into the first: $ cat > editor.sh < #!/bin/sh > cat > \$1 < pick b0f4233702ca 0 initial commit > fold 5e8704a8f2d2 1 moved and changed > pick 40e7299e8fa7 2 renamed > ENDOF > EOF $ HGEDITOR="sh ./editor.sh" hg histedit 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved adding another-dir/initial-file (glob) removing initial-dir/initial-file (glob) 0 files updated, 0 files merged, 1 files removed, 0 files unresolved 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/issue4251/.hg/strip-backup/*-backup.hg (glob) saved backup bundle to $TESTTMP/issue4251/.hg/strip-backup/*-backup.hg (glob) $ hg --config diff.git=yes export 0 # HG changeset patch # User test # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID fffadc26f8f85623ce60b028a3f1ccc3730f8530 # Parent 0000000000000000000000000000000000000000 pick b0f4233702ca 0 initial commit fold 5e8704a8f2d2 1 moved and changed pick 40e7299e8fa7 2 renamed diff --git a/another-dir/initial-file b/another-dir/initial-file new file mode 100644 --- /dev/null +++ b/another-dir/initial-file @@ -0,0 +1,1 @@ +changed $ hg --config diff.git=yes export 1 # HG changeset patch # User test # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID 9b730d82b00af8a2766facebfa47cc124405a118 # Parent fffadc26f8f85623ce60b028a3f1ccc3730f8530 renamed diff --git a/another-dir/initial-file b/another-dir/renamed-file rename from another-dir/initial-file rename to another-dir/renamed-file $ cd .. mercurial-3.7.3/tests/test-hardlinks.t0000644000175000017500000001750612676531525017423 0ustar mpmmpm00000000000000#require hardlink $ cat > nlinks.py < import sys > from mercurial import util > for f in sorted(sys.stdin.readlines()): > f = f[:-1] > print util.nlinks(f), f > EOF $ nlinksdir() > { > find $1 -type f | python $TESTTMP/nlinks.py > } Some implementations of cp can't create hardlinks (replaces 'cp -al' on Linux): $ cat > linkcp.py < from mercurial import util > import sys > util.copyfiles(sys.argv[1], sys.argv[2], hardlink=True) > EOF $ linkcp() > { > python $TESTTMP/linkcp.py $1 $2 > } Prepare repo r1: $ hg init r1 $ cd r1 $ echo c1 > f1 $ hg add f1 $ hg ci -m0 $ mkdir d1 $ cd d1 $ echo c2 > f2 $ hg add f2 $ hg ci -m1 $ cd ../.. $ nlinksdir r1/.hg/store 1 r1/.hg/store/00changelog.i 1 r1/.hg/store/00manifest.i 1 r1/.hg/store/data/d1/f2.i 1 r1/.hg/store/data/f1.i 1 r1/.hg/store/fncache 1 r1/.hg/store/phaseroots 1 r1/.hg/store/undo 1 r1/.hg/store/undo.backup.fncache 1 r1/.hg/store/undo.backupfiles 1 r1/.hg/store/undo.phaseroots Create hardlinked clone r2: $ hg clone -U --debug r1 r2 --config progress.debug=true linking: 1 linking: 2 linking: 3 linking: 4 linking: 5 linking: 6 linking: 7 linked 7 files Create non-hardlinked clone r3: $ hg clone --pull r1 r3 requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved Repos r1 and r2 should now contain hardlinked files: $ nlinksdir r1/.hg/store 2 r1/.hg/store/00changelog.i 2 r1/.hg/store/00manifest.i 2 r1/.hg/store/data/d1/f2.i 2 r1/.hg/store/data/f1.i 2 r1/.hg/store/fncache 1 r1/.hg/store/phaseroots 1 r1/.hg/store/undo 1 r1/.hg/store/undo.backup.fncache 1 r1/.hg/store/undo.backupfiles 1 r1/.hg/store/undo.phaseroots $ nlinksdir r2/.hg/store 2 r2/.hg/store/00changelog.i 2 r2/.hg/store/00manifest.i 2 r2/.hg/store/data/d1/f2.i 2 r2/.hg/store/data/f1.i 2 r2/.hg/store/fncache Repo r3 should not be hardlinked: $ nlinksdir r3/.hg/store 1 r3/.hg/store/00changelog.i 1 r3/.hg/store/00manifest.i 1 r3/.hg/store/data/d1/f2.i 1 r3/.hg/store/data/f1.i 1 r3/.hg/store/fncache 1 r3/.hg/store/phaseroots 1 r3/.hg/store/undo 1 r3/.hg/store/undo.backupfiles 1 r3/.hg/store/undo.phaseroots Create a non-inlined filelog in r3: $ cd r3/d1 >>> f = open('data1', 'wb') >>> for x in range(10000): ... f.write("%s\n" % str(x)) >>> f.close() $ for j in 0 1 2 3 4 5 6 7 8 9; do > cat data1 >> f2 > hg commit -m$j > done $ cd ../.. $ nlinksdir r3/.hg/store 1 r3/.hg/store/00changelog.i 1 r3/.hg/store/00manifest.i 1 r3/.hg/store/data/d1/f2.d 1 r3/.hg/store/data/d1/f2.i 1 r3/.hg/store/data/f1.i 1 r3/.hg/store/fncache 1 r3/.hg/store/phaseroots 1 r3/.hg/store/undo 1 r3/.hg/store/undo.backup.fncache 1 r3/.hg/store/undo.backup.phaseroots 1 r3/.hg/store/undo.backupfiles 1 r3/.hg/store/undo.phaseroots Push to repo r1 should break up most hardlinks in r2: $ hg -R r2 verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 2 changesets, 2 total revisions $ cd r3 $ hg push pushing to $TESTTMP/r1 (glob) searching for changes adding changesets adding manifests adding file changes added 10 changesets with 10 changes to 1 files $ cd .. $ nlinksdir r2/.hg/store 1 r2/.hg/store/00changelog.i 1 r2/.hg/store/00manifest.i 1 r2/.hg/store/data/d1/f2.i 2 r2/.hg/store/data/f1.i 1 r2/.hg/store/fncache $ hg -R r2 verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 2 changesets, 2 total revisions $ cd r1 $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Committing a change to f1 in r1 must break up hardlink f1.i in r2: $ echo c1c1 >> f1 $ hg ci -m00 $ cd .. $ nlinksdir r2/.hg/store 1 r2/.hg/store/00changelog.i 1 r2/.hg/store/00manifest.i 1 r2/.hg/store/data/d1/f2.i 1 r2/.hg/store/data/f1.i 1 r2/.hg/store/fncache $ cd r3 $ hg tip --template '{rev}:{node|short}\n' 11:a6451b6bc41f $ echo bla > f1 $ hg ci -m1 $ cd .. Create hardlinked copy r4 of r3 (on Linux, we would call 'cp -al'): $ linkcp r3 r4 r4 has hardlinks in the working dir (not just inside .hg): $ nlinksdir r4 2 r4/.hg/00changelog.i 2 r4/.hg/branch 2 r4/.hg/cache/branch2-served 2 r4/.hg/cache/rbc-names-v1 2 r4/.hg/cache/rbc-revs-v1 2 r4/.hg/dirstate 2 r4/.hg/hgrc 2 r4/.hg/last-message.txt 2 r4/.hg/requires 2 r4/.hg/store/00changelog.i 2 r4/.hg/store/00manifest.i 2 r4/.hg/store/data/d1/f2.d 2 r4/.hg/store/data/d1/f2.i 2 r4/.hg/store/data/f1.i 2 r4/.hg/store/fncache 2 r4/.hg/store/phaseroots 2 r4/.hg/store/undo 2 r4/.hg/store/undo.backup.fncache 2 r4/.hg/store/undo.backup.phaseroots 2 r4/.hg/store/undo.backupfiles 2 r4/.hg/store/undo.phaseroots 2 r4/.hg/undo.backup.dirstate 2 r4/.hg/undo.bookmarks 2 r4/.hg/undo.branch 2 r4/.hg/undo.desc 2 r4/.hg/undo.dirstate 2 r4/d1/data1 2 r4/d1/f2 2 r4/f1 Update back to revision 11 in r4 should break hardlink of file f1: $ hg -R r4 up 11 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ nlinksdir r4 2 r4/.hg/00changelog.i 1 r4/.hg/branch 2 r4/.hg/cache/branch2-served 2 r4/.hg/cache/rbc-names-v1 2 r4/.hg/cache/rbc-revs-v1 1 r4/.hg/dirstate 2 r4/.hg/hgrc 2 r4/.hg/last-message.txt 2 r4/.hg/requires 2 r4/.hg/store/00changelog.i 2 r4/.hg/store/00manifest.i 2 r4/.hg/store/data/d1/f2.d 2 r4/.hg/store/data/d1/f2.i 2 r4/.hg/store/data/f1.i 2 r4/.hg/store/fncache 2 r4/.hg/store/phaseroots 2 r4/.hg/store/undo 2 r4/.hg/store/undo.backup.fncache 2 r4/.hg/store/undo.backup.phaseroots 2 r4/.hg/store/undo.backupfiles 2 r4/.hg/store/undo.phaseroots 2 r4/.hg/undo.backup.dirstate 2 r4/.hg/undo.bookmarks 2 r4/.hg/undo.branch 2 r4/.hg/undo.desc 2 r4/.hg/undo.dirstate 2 r4/d1/data1 2 r4/d1/f2 1 r4/f1 Test hardlinking outside hg: $ mkdir x $ echo foo > x/a $ linkcp x y $ echo bar >> y/a No diff if hardlink: $ diff x/a y/a Test mq hardlinking: $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg init a $ cd a $ hg qimport -n foo - << EOF > # HG changeset patch > # Date 1 0 > diff -r 2588a8b53d66 a > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 > +++ b/a Wed Jul 23 15:54:29 2008 +0200 > @@ -0,0 +1,1 @@ > +a > EOF adding foo to series file $ hg qpush applying foo now at: foo $ cd .. $ linkcp a b $ cd b $ hg qimport -n bar - << EOF > # HG changeset patch > # Date 2 0 > diff -r 2588a8b53d66 a > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 > +++ b/b Wed Jul 23 15:54:29 2008 +0200 > @@ -0,0 +1,1 @@ > +b > EOF adding bar to series file $ hg qpush applying bar now at: bar $ cat .hg/patches/status 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c:bar $ cat .hg/patches/series foo bar $ cat ../a/.hg/patches/status 430ed4828a74fa4047bc816a25500f7472ab4bfe:foo $ cat ../a/.hg/patches/series foo Test tags hardlinking: $ hg qdel -r qbase:qtip patch foo finalized without changeset message patch bar finalized without changeset message $ hg tag -l lfoo $ hg tag foo $ cd .. $ linkcp b c $ cd c $ hg tag -l -r 0 lbar $ hg tag -r 0 bar $ cat .hgtags 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo 430ed4828a74fa4047bc816a25500f7472ab4bfe bar $ cat .hg/localtags 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo 430ed4828a74fa4047bc816a25500f7472ab4bfe lbar $ cat ../b/.hgtags 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c foo $ cat ../b/.hg/localtags 4e7abb4840c46a910f6d7b4d3c3fc7e5209e684c lfoo $ cd .. mercurial-3.7.3/tests/test-profile.t0000644000175000017500000000155612676531525017102 0ustar mpmmpm00000000000000test --time $ hg --time help -q help 2>&1 | grep time > /dev/null $ hg init a $ cd a #if lsprof test --profile $ hg --profile st 2>../out $ grep CallCount ../out > /dev/null || cat ../out $ hg --profile --config profiling.output=../out st $ grep CallCount ../out > /dev/null || cat ../out $ hg --profile --config profiling.output=blackbox --config extensions.blackbox= st $ grep CallCount .hg/blackbox.log > /dev/null || cat .hg/blackbox.log $ hg --profile --config profiling.format=text st 2>../out $ grep CallCount ../out > /dev/null || cat ../out $ echo "[profiling]" >> $HGRCPATH $ echo "format=kcachegrind" >> $HGRCPATH $ hg --profile st 2>../out $ grep 'events: Ticks' ../out > /dev/null || cat ../out $ hg --profile --config profiling.output=../out st $ grep 'events: Ticks' ../out > /dev/null || cat ../out #endif $ cd .. mercurial-3.7.3/tests/test-record.t0000644000175000017500000000501612676531525016713 0ustar mpmmpm00000000000000Set up a repo $ cat <> $HGRCPATH > [ui] > interactive = true > [extensions] > record = > EOF $ hg init a $ cd a Record help $ hg record -h hg record [OPTION]... [FILE]... interactively select changes to commit If a list of files is omitted, all changes reported by 'hg status' will be candidates for recording. See 'hg help dates' for a list of formats valid for -d/--date. You will be prompted for whether to record changes to each modified file, and for files with multiple changes, for each change to use. For each query, the following responses are possible: y - record this change n - skip this change e - edit this change manually s - skip remaining changes to this file f - record remaining changes to this file d - done, skip remaining changes and files a - record all changes to all remaining files q - quit, recording no changes ? - display help This command is not available when committing a merge. options ([+] can be repeated): -A --addremove mark new/missing files as added/removed before committing --close-branch mark a branch head as closed --amend amend the parent of the working directory -s --secret use the secret phase for committing -e --edit invoke editor on commit messages -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns -m --message TEXT use text as commit message -l --logfile FILE read commit message from file -d --date DATE record the specified date as commit date -u --user USER record the specified user as committer -S --subrepos recurse into subrepositories -w --ignore-all-space ignore white space when comparing lines -b --ignore-space-change ignore changes in the amount of white space -B --ignore-blank-lines ignore changes whose lines are all blank (some details hidden, use --verbose to show complete help) Select no files $ touch empty-rw $ hg add empty-rw $ hg record empty-rw< n > EOF diff --git a/empty-rw b/empty-rw new file mode 100644 examine changes to 'empty-rw'? [Ynesfdaq?] n no changes to record $ hg tip -p changeset: -1:000000000000 tag: tip user: date: Thu Jan 01 00:00:00 1970 +0000 mercurial-3.7.3/tests/test-encoding-textwrap.t0000644000175000017500000005403612676531525021105 0ustar mpmmpm00000000000000Test text wrapping for multibyte characters $ mkdir t $ cd t define commands to display help text $ cat << EOF > show.py > from mercurial import cmdutil > > cmdtable = {} > command = cmdutil.command(cmdtable) > > # Japanese full-width characters: > @command('show_full_ja', [], '') > def show_full_ja(ui, **opts): > u'''\u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051 \u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051 \u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051 > > \u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051 \u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051 \u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051 \u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051 > > \u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051\u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051\u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051\u3042\u3044\u3046\u3048\u304a\u304b\u304d\u304f\u3051 > ''' > > # Japanese half-width characters: > @command('show_half_ja', [], '') > def show_half_ja(ui, *opts): > u'''\uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 > > \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 > > \uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79\uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79\uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79\uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79\uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79\uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79\uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79\uff71\uff72\uff73\uff74\uff75\uff76\uff77\uff78\uff79 > ''' > > # Japanese ambiguous-width characters: > @command('show_ambig_ja', [], '') > def show_ambig_ja(ui, **opts): > u'''\u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb \u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb \u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb > > \u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb \u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb \u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb \u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb \u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb \u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb \u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb > > \u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb\u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb\u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb\u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb\u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb\u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb\u03b1\u03b2\u03b3\u03b4\u03c5\u03b6\u03b7\u03b8\u25cb > ''' > > # Russian ambiguous-width characters: > @command('show_ambig_ru', [], '') > def show_ambig_ru(ui, **opts): > u'''\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 > > \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 > > \u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0438 > ''' > EOF "COLUMNS=60" means that there is no lines which has grater than 58 width (1) test text wrapping for non-ambiguous-width characters (1-1) display Japanese full-width characters in cp932 $ COLUMNS=60 hg --encoding cp932 --config extensions.show=./show.py help show_full_ja hg show_full_ja \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf (esc) \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf (esc) \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf (esc) \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf\x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf\x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf (esc) \x82\xa0\x82\xa2\x82\xa4\x82\xa6\x82\xa8\x82\xa9\x82\xab\x82\xad\x82\xaf (esc) (some details hidden, use --verbose to show complete help) (1-2) display Japanese full-width characters in utf-8 $ COLUMNS=60 hg --encoding utf-8 --config extensions.show=./show.py help show_full_ja hg show_full_ja \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91\xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 (esc) \xe3\x81\x82\xe3\x81\x84\xe3\x81\x86\xe3\x81\x88\xe3\x81\x8a\xe3\x81\x8b\xe3\x81\x8d\xe3\x81\x8f\xe3\x81\x91 (esc) (some details hidden, use --verbose to show complete help) (1-3) display Japanese half-width characters in cp932 $ COLUMNS=60 hg --encoding cp932 --config extensions.show=./show.py help show_half_ja hg show_half_ja \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 (esc) \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 (esc) \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 (esc) \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 (esc) \xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9 (esc) (some details hidden, use --verbose to show complete help) (1-4) display Japanese half-width characters in utf-8 $ COLUMNS=60 hg --encoding utf-8 --config extensions.show=./show.py help show_half_ja hg show_half_ja \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 (esc) \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 (esc) \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 (esc) \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 (esc) \xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9\xef\xbd\xb1\xef\xbd\xb2\xef\xbd\xb3\xef\xbd\xb4\xef\xbd\xb5\xef\xbd\xb6\xef\xbd\xb7\xef\xbd\xb8\xef\xbd\xb9 (esc) (some details hidden, use --verbose to show complete help) (2) test text wrapping for ambiguous-width characters (2-1) treat width of ambiguous characters as narrow (default) (2-1-1) display Japanese ambiguous-width characters in cp932 $ COLUMNS=60 hg --encoding cp932 --config extensions.show=./show.py help show_ambig_ja hg show_ambig_ja \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) (some details hidden, use --verbose to show complete help) (2-1-2) display Japanese ambiguous-width characters in utf-8 $ COLUMNS=60 hg --encoding utf-8 --config extensions.show=./show.py help show_ambig_ja hg show_ambig_ja \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) (some details hidden, use --verbose to show complete help) (2-1-3) display Russian ambiguous-width characters in cp1251 $ COLUMNS=60 hg --encoding cp1251 --config extensions.show=./show.py help show_ambig_ru hg show_ambig_ru \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) (some details hidden, use --verbose to show complete help) (2-1-4) display Russian ambiguous-width characters in utf-8 $ COLUMNS=60 hg --encoding utf-8 --config extensions.show=./show.py help show_ambig_ru hg show_ambig_ru \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) (some details hidden, use --verbose to show complete help) (2-2) treat width of ambiguous characters as wide (2-2-1) display Japanese ambiguous-width characters in cp932 $ COLUMNS=60 HGENCODINGAMBIGUOUS=wide hg --encoding cp932 --config extensions.show=./show.py help show_ambig_ja hg show_ambig_ja \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b\x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) \x83\xbf\x83\xc0\x83\xc1\x83\xc2\x83\xd2\x83\xc4\x83\xc5\x83\xc6\x81\x9b (esc) (some details hidden, use --verbose to show complete help) (2-2-2) display Japanese ambiguous-width characters in utf-8 $ COLUMNS=60 HGENCODINGAMBIGUOUS=wide hg --encoding utf-8 --config extensions.show=./show.py help show_ambig_ja hg show_ambig_ja \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b\xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) \xce\xb1\xce\xb2\xce\xb3\xce\xb4\xcf\x85\xce\xb6\xce\xb7\xce\xb8\xe2\x97\x8b (esc) (some details hidden, use --verbose to show complete help) (2-2-3) display Russian ambiguous-width characters in cp1251 $ COLUMNS=60 HGENCODINGAMBIGUOUS=wide hg --encoding cp1251 --config extensions.show=./show.py help show_ambig_ru hg show_ambig_ru \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8\xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) \xcd\xe0\xf1\xf2\xf0\xee\xe9\xea\xe8 (esc) (some details hidden, use --verbose to show complete help) (2-2-4) display Russian ambiguous-width characters in utf-8 $ COLUMNS=60 HGENCODINGAMBIGUOUS=wide hg --encoding utf-8 --config extensions.show=./show.py help show_ambig_ru hg show_ambig_ru \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8\xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) \xd0\x9d\xd0\xb0\xd1\x81\xd1\x82\xd1\x80\xd0\xbe\xd0\xb9\xd0\xba\xd0\xb8 (esc) (some details hidden, use --verbose to show complete help) $ cd .. mercurial-3.7.3/tests/test-check-pyflakes.t0000644000175000017500000000055112676531525020325 0ustar mpmmpm00000000000000#require test-repo pyflakes $ cd "`dirname "$TESTDIR"`" run pyflakes on all tracked files ending in .py or without a file ending (skipping binary file random-seed) $ hg locate 'set:**.py or grep("^!#.*python")' 2>/dev/null \ > | xargs pyflakes 2>/dev/null | "$TESTDIR/filterpyflakes.py" tests/filterpyflakes.py:61: undefined name 'undefinedname' mercurial-3.7.3/tests/test-hgwebdir.t0000644000175000017500000012051012676531525017225 0ustar mpmmpm00000000000000#require serve hide outer repo and work in dir without '.hg' $ hg init $ mkdir dir $ cd dir Tests some basic hgwebdir functionality. Tests setting up paths and collection, different forms of 404s and the subdirectory support. $ mkdir webdir $ cd webdir $ hg init a $ echo a > a/a $ hg --cwd a ci -Ama -d'1 0' adding a create a mercurial queue repository $ hg --cwd a qinit --config extensions.hgext.mq= -c $ hg init b $ echo b > b/b $ hg --cwd b ci -Amb -d'2 0' adding b create a nested repository $ cd b $ hg init d $ echo d > d/d $ hg --cwd d ci -Amd -d'3 0' adding d $ cd .. $ hg init c $ echo c > c/c $ hg --cwd c ci -Amc -d'3 0' adding c create a subdirectory containing repositories and subrepositories $ mkdir notrepo $ cd notrepo $ hg init e $ echo e > e/e $ hg --cwd e ci -Ame -d'4 0' adding e $ hg init e/e2 $ echo e2 > e/e2/e2 $ hg --cwd e/e2 ci -Ame2 -d '4 0' adding e2 $ hg init f $ echo f > f/f $ hg --cwd f ci -Amf -d'4 0' adding f $ hg init f/f2 $ echo f2 > f/f2/f2 $ hg --cwd f/f2 ci -Amf2 -d '4 0' adding f2 $ echo 'f2 = f2' > f/.hgsub $ hg -R f ci -Am 'add subrepo' -d'4 0' adding .hgsub $ cat >> f/.hg/hgrc << EOF > [web] > name = fancy name for repo f > EOF $ cd .. create repository without .hg/store $ hg init nostore $ rm -R nostore/.hg/store $ root=`pwd` $ cd .. serve $ cat > paths.conf < [paths] > a=$root/a > b=$root/b > EOF $ hg serve -p $HGPORT -d --pid-file=hg.pid --webdir-conf paths.conf \ > -A access-paths.log -E error-paths-1.log $ cat hg.pid >> $DAEMON_PIDS should give a 404 - file does not exist $ get-with-headers.py localhost:$HGPORT 'a/file/tip/bork?style=raw' 404 Not Found error: bork@8580ff50825a: not found in manifest [1] should succeed $ get-with-headers.py localhost:$HGPORT '?style=raw' 200 Script output follows /a/ /b/ $ get-with-headers.py localhost:$HGPORT 'a/file/tip/a?style=raw' 200 Script output follows a $ get-with-headers.py localhost:$HGPORT 'b/file/tip/b?style=raw' 200 Script output follows b should give a 404 - repo is not published $ get-with-headers.py localhost:$HGPORT 'c/file/tip/c?style=raw' 404 Not Found error: repository c/file/tip/c not found [1] atom-log without basedir $ get-with-headers.py localhost:$HGPORT 'a/atom-log' | grep ' (glob) (glob) (glob) rss-log without basedir $ get-with-headers.py localhost:$HGPORT 'a/rss-log' | grep 'http://*:$HGPORT/a/rev/8580ff50825a (glob) $ cat > paths.conf < [paths] > t/a/=$root/a > b=$root/b > coll=$root/* > rcoll=$root/** > star=* > starstar=** > astar=webdir/a/* > EOF $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \ > -A access-paths.log -E error-paths-2.log $ cat hg.pid >> $DAEMON_PIDS should succeed, slashy names $ get-with-headers.py localhost:$HGPORT1 '?style=raw' 200 Script output follows /t/a/ /b/ /coll/a/ /coll/a/.hg/patches/ /coll/b/ /coll/c/ /coll/notrepo/e/ /coll/notrepo/f/ /rcoll/a/ /rcoll/a/.hg/patches/ /rcoll/b/ /rcoll/b/d/ /rcoll/c/ /rcoll/notrepo/e/ /rcoll/notrepo/e/e2/ /rcoll/notrepo/f/ /rcoll/notrepo/f/f2/ /star/webdir/a/ /star/webdir/a/.hg/patches/ /star/webdir/b/ /star/webdir/c/ /star/webdir/notrepo/e/ /star/webdir/notrepo/f/ /starstar/webdir/a/ /starstar/webdir/a/.hg/patches/ /starstar/webdir/b/ /starstar/webdir/b/d/ /starstar/webdir/c/ /starstar/webdir/notrepo/e/ /starstar/webdir/notrepo/e/e2/ /starstar/webdir/notrepo/f/ /starstar/webdir/notrepo/f/f2/ /astar/ /astar/.hg/patches/ $ get-with-headers.py localhost:$HGPORT1 '?style=paper' 200 Script output follows Mercurial repositories index
                        (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob) (glob)
                        Name Description Contact Last modified    
                        t/a unknown Foo Bar <foo.bar@example.com> *
                        b unknown Foo Bar <foo.bar@example.com> *
                        coll/a unknown Foo Bar <foo.bar@example.com> *
                        coll/a/.hg/patches unknown Foo Bar <foo.bar@example.com> *
                        coll/b unknown Foo Bar <foo.bar@example.com> *
                        coll/c unknown Foo Bar <foo.bar@example.com> *
                        coll/notrepo/e unknown Foo Bar <foo.bar@example.com> *
                        fancy name for repo f unknown Foo Bar <foo.bar@example.com> *
                        rcoll/a unknown Foo Bar <foo.bar@example.com> *
                        rcoll/a/.hg/patches unknown Foo Bar <foo.bar@example.com> *
                        rcoll/b unknown Foo Bar <foo.bar@example.com> *
                        rcoll/b/d unknown Foo Bar <foo.bar@example.com> *
                        rcoll/c unknown Foo Bar <foo.bar@example.com> *
                        rcoll/notrepo/e unknown Foo Bar <foo.bar@example.com> *
                        rcoll/notrepo/e/e2 unknown Foo Bar <foo.bar@example.com> *
                        fancy name for repo f unknown Foo Bar <foo.bar@example.com> *
                        rcoll/notrepo/f/f2 unknown Foo Bar <foo.bar@example.com> *
                        star/webdir/a unknown Foo Bar <foo.bar@example.com> *
                        star/webdir/a/.hg/patches unknown Foo Bar <foo.bar@example.com> *
                        star/webdir/b unknown Foo Bar <foo.bar@example.com> *
                        star/webdir/c unknown Foo Bar <foo.bar@example.com> *
                        star/webdir/notrepo/e unknown Foo Bar <foo.bar@example.com> *
                        fancy name for repo f unknown Foo Bar <foo.bar@example.com> *
                        starstar/webdir/a unknown Foo Bar <foo.bar@example.com> *
                        starstar/webdir/a/.hg/patches unknown Foo Bar <foo.bar@example.com> *
                        starstar/webdir/b unknown Foo Bar <foo.bar@example.com> *
                        starstar/webdir/b/d unknown Foo Bar <foo.bar@example.com> *
                        starstar/webdir/c unknown Foo Bar <foo.bar@example.com> *
                        starstar/webdir/notrepo/e unknown Foo Bar <foo.bar@example.com> *
                        starstar/webdir/notrepo/e/e2 unknown Foo Bar <foo.bar@example.com> *
                        fancy name for repo f unknown Foo Bar <foo.bar@example.com> *
                        starstar/webdir/notrepo/f/f2 unknown Foo Bar <foo.bar@example.com> *
                        astar unknown Foo Bar <foo.bar@example.com> *
                        astar/.hg/patches unknown Foo Bar <foo.bar@example.com> *
                        $ get-with-headers.py localhost:$HGPORT1 't?style=raw' 200 Script output follows /t/a/ $ get-with-headers.py localhost:$HGPORT1 't/?style=raw' 200 Script output follows /t/a/ $ get-with-headers.py localhost:$HGPORT1 't/?style=paper' 200 Script output follows Mercurial repositories index
                        (glob)
                        Name Description Contact Last modified    
                        a unknown Foo Bar <foo.bar@example.com> *
                        $ get-with-headers.py localhost:$HGPORT1 't/a?style=atom' 200 Script output follows http://*:$HGPORT1/t/a/ (glob) (glob) (glob) t/a Changelog 1970-01-01T00:00:01+00:00 [default] a http://*:$HGPORT1/t/a/#changeset-8580ff50825a50c8f716709acdf8de0deddcd6ab (glob) (glob) test test 1970-01-01T00:00:01+00:00 1970-01-01T00:00:01+00:00
                        changeset 8580ff50825a
                        branch default
                        bookmark
                        tag tip
                        user test
                        description a
                        files a
                        $ get-with-headers.py localhost:$HGPORT1 't/a/?style=atom' 200 Script output follows http://*:$HGPORT1/t/a/ (glob) (glob) (glob) t/a Changelog 1970-01-01T00:00:01+00:00 [default] a http://*:$HGPORT1/t/a/#changeset-8580ff50825a50c8f716709acdf8de0deddcd6ab (glob) (glob) test test 1970-01-01T00:00:01+00:00 1970-01-01T00:00:01+00:00
                        changeset 8580ff50825a
                        branch default
                        bookmark
                        tag tip
                        user test
                        description a
                        files a
                        $ get-with-headers.py localhost:$HGPORT1 't/a/file/tip/a?style=raw' 200 Script output follows a Test [paths] '*' extension $ get-with-headers.py localhost:$HGPORT1 'coll/?style=raw' 200 Script output follows /coll/a/ /coll/a/.hg/patches/ /coll/b/ /coll/c/ /coll/notrepo/e/ /coll/notrepo/f/ $ get-with-headers.py localhost:$HGPORT1 'coll/a/file/tip/a?style=raw' 200 Script output follows a Test [paths] '**' extension $ get-with-headers.py localhost:$HGPORT1 'rcoll/?style=raw' 200 Script output follows /rcoll/a/ /rcoll/a/.hg/patches/ /rcoll/b/ /rcoll/b/d/ /rcoll/c/ /rcoll/notrepo/e/ /rcoll/notrepo/e/e2/ /rcoll/notrepo/f/ /rcoll/notrepo/f/f2/ $ get-with-headers.py localhost:$HGPORT1 'rcoll/b/d/file/tip/d?style=raw' 200 Script output follows d Test collapse = True $ killdaemons.py $ cat >> paths.conf < [web] > collapse=true > descend = true > EOF $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \ > -A access-paths.log -E error-paths-3.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT1 'coll/?style=raw' 200 Script output follows /coll/a/ /coll/a/.hg/patches/ /coll/b/ /coll/c/ /coll/notrepo/ $ get-with-headers.py localhost:$HGPORT1 'coll/a/file/tip/a?style=raw' 200 Script output follows a $ get-with-headers.py localhost:$HGPORT1 'rcoll/?style=raw' 200 Script output follows /rcoll/a/ /rcoll/a/.hg/patches/ /rcoll/b/ /rcoll/b/d/ /rcoll/c/ /rcoll/notrepo/ $ get-with-headers.py localhost:$HGPORT1 'rcoll/b/d/file/tip/d?style=raw' 200 Script output follows d Test intermediate directories Hide the subrepo parent $ cp $root/notrepo/f/.hg/hgrc $root/notrepo/f/.hg/hgrc.bak $ cat >> $root/notrepo/f/.hg/hgrc << EOF > [web] > hidden = True > EOF $ get-with-headers.py localhost:$HGPORT1 'rcoll/notrepo/?style=raw' 200 Script output follows /rcoll/notrepo/e/ /rcoll/notrepo/e/e2/ Subrepo parent not hidden $ mv $root/notrepo/f/.hg/hgrc.bak $root/notrepo/f/.hg/hgrc $ get-with-headers.py localhost:$HGPORT1 'rcoll/notrepo/?style=raw' 200 Script output follows /rcoll/notrepo/e/ /rcoll/notrepo/e/e2/ /rcoll/notrepo/f/ /rcoll/notrepo/f/f2/ Test repositories inside intermediate directories $ get-with-headers.py localhost:$HGPORT1 'rcoll/notrepo/e/file/tip/e?style=raw' 200 Script output follows e Test subrepositories inside intermediate directories $ get-with-headers.py localhost:$HGPORT1 'rcoll/notrepo/f/f2/file/tip/f2?style=raw' 200 Script output follows f2 Test descend = False $ killdaemons.py $ cat >> paths.conf < descend=false > EOF $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \ > -A access-paths.log -E error-paths-4.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT1 'coll/?style=raw' 200 Script output follows /coll/a/ /coll/b/ /coll/c/ $ get-with-headers.py localhost:$HGPORT1 'coll/a/file/tip/a?style=raw' 200 Script output follows a $ get-with-headers.py localhost:$HGPORT1 'rcoll/?style=raw' 200 Script output follows /rcoll/a/ /rcoll/b/ /rcoll/c/ $ get-with-headers.py localhost:$HGPORT1 'rcoll/b/d/file/tip/d?style=raw' 200 Script output follows d Test intermediate directories $ get-with-headers.py localhost:$HGPORT1 'rcoll/notrepo/?style=raw' 200 Script output follows /rcoll/notrepo/e/ /rcoll/notrepo/f/ Test repositories inside intermediate directories $ get-with-headers.py localhost:$HGPORT1 'rcoll/notrepo/e/file/tip/e?style=raw' 200 Script output follows e Test subrepositories inside intermediate directories $ get-with-headers.py localhost:$HGPORT1 'rcoll/notrepo/f/f2/file/tip/f2?style=raw' 200 Script output follows f2 Test [paths] '*' in a repo root $ hg id http://localhost:$HGPORT1/astar 8580ff50825a $ killdaemons.py $ cat > paths.conf < [paths] > t/a = $root/a > t/b = $root/b > c = $root/c > EOF $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \ > -A access-paths.log -E error-paths-5.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT1 '?style=raw' 200 Script output follows /t/a/ /t/b/ /c/ $ get-with-headers.py localhost:$HGPORT1 't/?style=raw' 200 Script output follows /t/a/ /t/b/ Test collapse = True $ killdaemons.py $ cat >> paths.conf < [web] > collapse=true > EOF $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \ > -A access-paths.log -E error-paths-6.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT1 '?style=raw' 200 Script output follows /t/ /c/ $ get-with-headers.py localhost:$HGPORT1 't/?style=raw' 200 Script output follows /t/a/ /t/b/ test descend = False $ killdaemons.py $ cat >> paths.conf < descend=false > EOF $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \ > -A access-paths.log -E error-paths-7.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT1 '?style=raw' 200 Script output follows /c/ $ get-with-headers.py localhost:$HGPORT1 't/?style=raw' 200 Script output follows /t/a/ /t/b/ $ killdaemons.py $ cat > paths.conf < [paths] > nostore = $root/nostore > inexistent = $root/inexistent > EOF $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf paths.conf \ > -A access-paths.log -E error-paths-8.log $ cat hg.pid >> $DAEMON_PIDS test inexistent and inaccessible repo should be ignored silently $ get-with-headers.py localhost:$HGPORT1 '' 200 Script output follows Mercurial repositories index test listening address/port specified by web-conf (issue4699): $ killdaemons.py $ cat >> paths.conf < [web] > address = localhost > port = $HGPORT1 > EOF $ hg serve -d --pid-file=hg.pid --web-conf paths.conf \ > -A access-paths.log -E error-paths-9.log listening at http://*:$HGPORT1/ (bound to 127.0.0.1:$HGPORT1) (glob) $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT1 '?style=raw' 200 Script output follows test --port option overrides web.port: $ killdaemons.py $ hg serve -p $HGPORT2 -d -v --pid-file=hg.pid --web-conf paths.conf \ > -A access-paths.log -E error-paths-10.log listening at http://*:$HGPORT2/ (bound to 127.0.0.1:$HGPORT2) (glob) $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT2 '?style=raw' 200 Script output follows $ killdaemons.py $ cat > collections.conf < [collections] > $root=$root > EOF $ hg serve --config web.baseurl=http://hg.example.com:8080/ -p $HGPORT2 -d \ > --pid-file=hg.pid --webdir-conf collections.conf \ > -A access-collections.log -E error-collections.log $ cat hg.pid >> $DAEMON_PIDS collections: should succeed $ get-with-headers.py localhost:$HGPORT2 '?style=raw' 200 Script output follows /a/ /a/.hg/patches/ /b/ /c/ /notrepo/e/ /notrepo/f/ $ get-with-headers.py localhost:$HGPORT2 'a/file/tip/a?style=raw' 200 Script output follows a $ get-with-headers.py localhost:$HGPORT2 'b/file/tip/b?style=raw' 200 Script output follows b $ get-with-headers.py localhost:$HGPORT2 'c/file/tip/c?style=raw' 200 Script output follows c atom-log with basedir / $ get-with-headers.py localhost:$HGPORT2 'a/atom-log' | grep ' rss-log with basedir / $ get-with-headers.py localhost:$HGPORT2 'a/rss-log' | grep 'http://hg.example.com:8080/a/rev/8580ff50825a $ killdaemons.py $ hg serve --config web.baseurl=http://hg.example.com:8080/foo/ -p $HGPORT2 -d \ > --pid-file=hg.pid --webdir-conf collections.conf \ > -A access-collections-2.log -E error-collections-2.log $ cat hg.pid >> $DAEMON_PIDS atom-log with basedir /foo/ $ get-with-headers.py localhost:$HGPORT2 'a/atom-log' | grep ' rss-log with basedir /foo/ $ get-with-headers.py localhost:$HGPORT2 'a/rss-log' | grep 'http://hg.example.com:8080/foo/a/rev/8580ff50825a Path refreshing works as expected $ killdaemons.py $ mkdir $root/refreshtest $ hg init $root/refreshtest/a $ cat > paths.conf << EOF > [paths] > / = $root/refreshtest/* > EOF $ hg serve -p $HGPORT1 -d --pid-file hg.pid --webdir-conf paths.conf $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT1 '?style=raw' 200 Script output follows /a/ By default refreshing occurs every 20s and a new repo won't be listed immediately. $ hg init $root/refreshtest/b $ get-with-headers.py localhost:$HGPORT1 '?style=raw' 200 Script output follows /a/ Restart the server with no refresh interval. New repo should appear immediately. $ killdaemons.py $ cat > paths.conf << EOF > [web] > refreshinterval = -1 > [paths] > / = $root/refreshtest/* > EOF $ hg serve -p $HGPORT1 -d --pid-file hg.pid --webdir-conf paths.conf $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT1 '?style=raw' 200 Script output follows /a/ /b/ $ hg init $root/refreshtest/c $ get-with-headers.py localhost:$HGPORT1 '?style=raw' 200 Script output follows /a/ /b/ /c/ paths errors 1 $ cat error-paths-1.log paths errors 2 $ cat error-paths-2.log paths errors 3 $ cat error-paths-3.log paths errors 4 $ cat error-paths-4.log paths errors 5 $ cat error-paths-5.log paths errors 6 $ cat error-paths-6.log paths errors 7 $ cat error-paths-7.log paths errors 8 $ cat error-paths-8.log paths errors 9 $ cat error-paths-9.log paths errors 10 $ cat error-paths-10.log collections errors $ cat error-collections.log collections errors 2 $ cat error-collections-2.log mercurial-3.7.3/tests/test-mq-subrepo.t0000644000175000017500000003637212676531525017540 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [ui] > commitsubrepos = Yes > [extensions] > mq = > record = > [diff] > nodates = 1 > EOF $ stdin=`pwd`/stdin.tmp fn to create new repository w/dirty subrepo, and cd into it $ mkrepo() { > hg init $1 > cd $1 > hg qinit > } fn to create dirty subrepo $ mksubrepo() { > hg init $1 > cd $1 > echo a > a > hg add > cd .. > } $ testadd() { > cat - > "$stdin" > mksubrepo sub > echo sub = sub >> .hgsub > hg add .hgsub > echo % abort when adding .hgsub w/dirty subrepo > hg status -S > echo '%' $* > cat "$stdin" | hg $* > echo [$?] > hg -R sub ci -m0sub > echo % update substate when adding .hgsub w/clean updated subrepo > hg status -S > echo '%' $* > cat "$stdin" | hg $* > hg debugsub > } $ testmod() { > cat - > "$stdin" > mksubrepo sub2 > echo sub2 = sub2 >> .hgsub > echo % abort when modifying .hgsub w/dirty subrepo > hg status -S > echo '%' $* > cat "$stdin" | hg $* > echo [$?] > hg -R sub2 ci -m0sub2 > echo % update substate when modifying .hgsub w/clean updated subrepo > hg status -S > echo '%' $* > cat "$stdin" | hg $* > hg debugsub > } $ testrm1() { > cat - > "$stdin" > mksubrepo sub3 > echo sub3 = sub3 >> .hgsub > hg ci -Aqmsub3 > $EXTRA > echo b >> sub3/a > hg rm .hgsub > echo % update substate when removing .hgsub w/dirty subrepo > hg status -S > echo '%' $* > cat "$stdin" | hg $* > echo % debugsub should be empty > hg debugsub > } $ testrm2() { > cat - > "$stdin" > mksubrepo sub4 > echo sub4 = sub4 >> .hgsub > hg ci -Aqmsub4 > $EXTRA > hg rm .hgsub > echo % update substate when removing .hgsub w/clean updated subrepo > hg status -S > echo '%' $* > cat "$stdin" | hg $* > echo % debugsub should be empty > hg debugsub > } handle subrepos safely on qnew $ mkrepo repo-2499-qnew $ testadd qnew -X path:no-effect -m0 0.diff adding a % abort when adding .hgsub w/dirty subrepo A .hgsub A sub/a % qnew -X path:no-effect -m0 0.diff abort: uncommitted changes in subrepository 'sub' [255] % update substate when adding .hgsub w/clean updated subrepo A .hgsub A sub/a % qnew -X path:no-effect -m0 0.diff path sub source sub revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 $ testmod qnew --cwd .. -R repo-2499-qnew -X path:no-effect -m1 1.diff adding a % abort when modifying .hgsub w/dirty subrepo M .hgsub A sub2/a % qnew --cwd .. -R repo-2499-qnew -X path:no-effect -m1 1.diff abort: uncommitted changes in subrepository 'sub2' [255] % update substate when modifying .hgsub w/clean updated subrepo M .hgsub A sub2/a % qnew --cwd .. -R repo-2499-qnew -X path:no-effect -m1 1.diff path sub source sub revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 path sub2 source sub2 revision 1f94c7611cc6b74f5a17b16121a1170d44776845 $ hg qpop -qa patch queue now empty $ testrm1 qnew -m2 2.diff adding a % update substate when removing .hgsub w/dirty subrepo M sub3/a R .hgsub % qnew -m2 2.diff % debugsub should be empty $ hg qpop -qa patch queue now empty $ testrm2 qnew -m3 3.diff adding a % update substate when removing .hgsub w/clean updated subrepo R .hgsub % qnew -m3 3.diff % debugsub should be empty $ cd .. handle subrepos safely on qrefresh $ mkrepo repo-2499-qrefresh $ hg qnew -m0 0.diff $ testadd qrefresh adding a % abort when adding .hgsub w/dirty subrepo A .hgsub A sub/a % qrefresh abort: uncommitted changes in subrepository 'sub' [255] % update substate when adding .hgsub w/clean updated subrepo A .hgsub A sub/a % qrefresh path sub source sub revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 $ hg qnew -m1 1.diff $ testmod qrefresh adding a % abort when modifying .hgsub w/dirty subrepo M .hgsub A sub2/a % qrefresh abort: uncommitted changes in subrepository 'sub2' [255] % update substate when modifying .hgsub w/clean updated subrepo M .hgsub A sub2/a % qrefresh path sub source sub revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 path sub2 source sub2 revision 1f94c7611cc6b74f5a17b16121a1170d44776845 $ hg qpop -qa patch queue now empty $ EXTRA='hg qnew -m2 2.diff' $ testrm1 qrefresh adding a % update substate when removing .hgsub w/dirty subrepo M sub3/a R .hgsub % qrefresh % debugsub should be empty $ hg qpop -qa patch queue now empty $ EXTRA='hg qnew -m3 3.diff' $ testrm2 qrefresh adding a % update substate when removing .hgsub w/clean updated subrepo R .hgsub % qrefresh % debugsub should be empty $ EXTRA= $ cd .. handle subrepos safely on qpush/qpop (and we cannot qpop / qpush with a modified subrepo) $ mkrepo repo-2499-qpush $ mksubrepo sub adding a $ hg -R sub ci -m0sub $ echo sub = sub > .hgsub $ hg add .hgsub $ hg commit -m0 $ hg debugsub path sub source sub revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 $ echo foo > ./sub/a $ hg -R sub commit -m foo $ hg commit -m1 $ hg qimport -r "0:tip" $ hg -R sub id --id aa037b301eba qpop $ hg -R sub update 0000 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg qpop abort: local changed subrepos found, qrefresh first [255] $ hg revert sub reverting subrepo sub adding sub/a (glob) $ hg qpop popping 1 now at: 0 $ hg status -AS C .hgsub C .hgsubstate C sub/a $ hg -R sub id --id b2fdb12cd82b qpush $ hg -R sub update 0000 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg qpush abort: local changed subrepos found, qrefresh first [255] $ hg revert sub reverting subrepo sub adding sub/a (glob) $ hg qpush applying 1 subrepository sub diverged (local revision: b2fdb12cd82b, remote revision: aa037b301eba) (M)erge, keep (l)ocal or keep (r)emote? m 1 files updated, 0 files merged, 0 files removed, 0 files unresolved now at: 1 $ hg status -AS C .hgsub C .hgsubstate C sub/a $ hg -R sub id --id aa037b301eba $ cd .. handle subrepos safely on qrecord $ mkrepo repo-2499-qrecord $ testadd qrecord --config ui.interactive=1 -m0 0.diff < y > y > EOF adding a % abort when adding .hgsub w/dirty subrepo A .hgsub A sub/a % qrecord --config ui.interactive=1 -m0 0.diff diff --git a/.hgsub b/.hgsub new file mode 100644 examine changes to '.hgsub'? [Ynesfdaq?] y @@ -0,0 +1,1 @@ +sub = sub record this change to '.hgsub'? [Ynesfdaq?] y warning: subrepo spec file '.hgsub' not found abort: uncommitted changes in subrepository 'sub' [255] % update substate when adding .hgsub w/clean updated subrepo A .hgsub A sub/a % qrecord --config ui.interactive=1 -m0 0.diff diff --git a/.hgsub b/.hgsub new file mode 100644 examine changes to '.hgsub'? [Ynesfdaq?] y @@ -0,0 +1,1 @@ +sub = sub record this change to '.hgsub'? [Ynesfdaq?] y warning: subrepo spec file '.hgsub' not found path sub source sub revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 $ testmod qrecord --config ui.interactive=1 -m1 1.diff < y > y > EOF adding a % abort when modifying .hgsub w/dirty subrepo M .hgsub A sub2/a % qrecord --config ui.interactive=1 -m1 1.diff diff --git a/.hgsub b/.hgsub 1 hunks, 1 lines changed examine changes to '.hgsub'? [Ynesfdaq?] y @@ -1,1 +1,2 @@ sub = sub +sub2 = sub2 record this change to '.hgsub'? [Ynesfdaq?] y abort: uncommitted changes in subrepository 'sub2' [255] % update substate when modifying .hgsub w/clean updated subrepo M .hgsub A sub2/a % qrecord --config ui.interactive=1 -m1 1.diff diff --git a/.hgsub b/.hgsub 1 hunks, 1 lines changed examine changes to '.hgsub'? [Ynesfdaq?] y @@ -1,1 +1,2 @@ sub = sub +sub2 = sub2 record this change to '.hgsub'? [Ynesfdaq?] y path sub source sub revision b2fdb12cd82b021c3b7053d67802e77b6eeaee31 path sub2 source sub2 revision 1f94c7611cc6b74f5a17b16121a1170d44776845 $ hg qpop -qa patch queue now empty $ testrm1 qrecord --config ui.interactive=1 -m2 2.diff < y > y > EOF adding a % update substate when removing .hgsub w/dirty subrepo M sub3/a R .hgsub % qrecord --config ui.interactive=1 -m2 2.diff diff --git a/.hgsub b/.hgsub deleted file mode 100644 examine changes to '.hgsub'? [Ynesfdaq?] y % debugsub should be empty $ hg qpop -qa patch queue now empty $ testrm2 qrecord --config ui.interactive=1 -m3 3.diff < y > y > EOF adding a % update substate when removing .hgsub w/clean updated subrepo R .hgsub % qrecord --config ui.interactive=1 -m3 3.diff diff --git a/.hgsub b/.hgsub deleted file mode 100644 examine changes to '.hgsub'? [Ynesfdaq?] y % debugsub should be empty $ cd .. correctly handle subrepos with patch queues $ mkrepo repo-subrepo-with-queue $ mksubrepo sub adding a $ hg -R sub qnew sub0.diff $ echo sub = sub >> .hgsub $ hg add .hgsub $ hg qnew 0.diff $ cd .. check whether MQ operations can import updated .hgsubstate correctly both into 'revision' and 'patch file under .hg/patches': $ hg init importing-hgsubstate $ cd importing-hgsubstate $ echo a > a $ hg commit -u test -d '0 0' -Am '#0 in parent' adding a $ hg init sub $ echo sa > sub/sa $ hg -R sub commit -u test -d '0 0' -Am '#0 in sub' adding sa $ echo 'sub = sub' > .hgsub $ touch .hgsubstate $ hg add .hgsub .hgsubstate $ hg qnew -u test -d '0 0' import-at-qnew $ hg -R sub parents --template '{node} sub\n' b6f6e9c41f3dfd374a6d2ed4535c87951cf979cf sub $ cat .hgsubstate b6f6e9c41f3dfd374a6d2ed4535c87951cf979cf sub $ hg diff -c tip diff -r f499373e340c -r f69e96d86e75 .hgsub --- /dev/null +++ b/.hgsub @@ -0,0 +1,1 @@ +sub = sub diff -r f499373e340c -r f69e96d86e75 .hgsubstate --- /dev/null +++ b/.hgsubstate @@ -0,0 +1,1 @@ +b6f6e9c41f3dfd374a6d2ed4535c87951cf979cf sub $ cat .hg/patches/import-at-qnew # HG changeset patch # User test # Date 0 0 # Parent f499373e340cdca5d01dee904aeb42dd2a325e71 diff -r f499373e340c -r f69e96d86e75 .hgsub --- /dev/null +++ b/.hgsub @@ -0,0 +1,1 @@ +sub = sub diff -r f499373e340c -r f69e96d86e75 .hgsubstate --- /dev/null +++ b/.hgsubstate @@ -0,0 +1,1 @@ +b6f6e9c41f3dfd374a6d2ed4535c87951cf979cf sub $ hg parents --template '{node}\n' f69e96d86e75a6d4fd88285dc9697acb23951041 $ hg parents --template '{files}\n' .hgsub .hgsubstate check also whether qnew not including ".hgsubstate" explicitly causes as same result (in node hash) as one including it. $ hg qpop -a -q patch queue now empty $ hg qdelete import-at-qnew $ echo 'sub = sub' > .hgsub $ hg add .hgsub $ rm -f .hgsubstate $ hg qnew -u test -d '0 0' import-at-qnew $ hg parents --template '{node}\n' f69e96d86e75a6d4fd88285dc9697acb23951041 $ hg parents --template '{files}\n' .hgsub .hgsubstate check whether qrefresh imports updated .hgsubstate correctly $ hg qpop popping import-at-qnew patch queue now empty $ hg qpush applying import-at-qnew now at: import-at-qnew $ hg parents --template '{files}\n' .hgsub .hgsubstate $ hg qnew import-at-qrefresh $ echo sb > sub/sb $ hg -R sub commit -u test -d '0 0' -Am '#1 in sub' adding sb $ hg qrefresh -u test -d '0 0' $ hg -R sub parents --template '{node} sub\n' 88ac1bef5ed43b689d1d200b59886b675dec474b sub $ cat .hgsubstate 88ac1bef5ed43b689d1d200b59886b675dec474b sub $ hg diff -c tip diff -r 05b056bb9c8c -r d987bec230f4 .hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,1 +1,1 @@ -b6f6e9c41f3dfd374a6d2ed4535c87951cf979cf sub +88ac1bef5ed43b689d1d200b59886b675dec474b sub $ cat .hg/patches/import-at-qrefresh # HG changeset patch # User test # Date 0 0 # Parent 05b056bb9c8c05ff15258b84fd42ab3527271033 diff -r 05b056bb9c8c .hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,1 +1,1 @@ -b6f6e9c41f3dfd374a6d2ed4535c87951cf979cf sub +88ac1bef5ed43b689d1d200b59886b675dec474b sub $ hg parents --template '{files}\n' .hgsubstate $ hg qrefresh -u test -d '0 0' $ cat .hgsubstate 88ac1bef5ed43b689d1d200b59886b675dec474b sub $ hg diff -c tip diff -r 05b056bb9c8c -r d987bec230f4 .hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,1 +1,1 @@ -b6f6e9c41f3dfd374a6d2ed4535c87951cf979cf sub +88ac1bef5ed43b689d1d200b59886b675dec474b sub $ cat .hg/patches/import-at-qrefresh # HG changeset patch # User test # Date 0 0 # Parent 05b056bb9c8c05ff15258b84fd42ab3527271033 diff -r 05b056bb9c8c .hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,1 +1,1 @@ -b6f6e9c41f3dfd374a6d2ed4535c87951cf979cf sub +88ac1bef5ed43b689d1d200b59886b675dec474b sub $ hg parents --template '{files}\n' .hgsubstate $ hg update -C tip 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg qpop -a popping import-at-qrefresh popping import-at-qnew patch queue now empty $ hg -R sub update -C 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo 'sub = sub' > .hgsub $ hg commit -Am '#1 in parent' adding .hgsub $ hg -R sub update -C 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg commit -Am '#2 in parent (but will be rolled back soon)' $ hg rollback repository tip rolled back to revision 1 (undo commit) working directory now based on revision 1 $ hg status M .hgsubstate $ hg qnew -u test -d '0 0' checkstate-at-qnew $ hg -R sub parents --template '{node} sub\n' 88ac1bef5ed43b689d1d200b59886b675dec474b sub $ cat .hgsubstate 88ac1bef5ed43b689d1d200b59886b675dec474b sub $ hg diff -c tip diff -r 4d91eb2fa1d1 -r 1259c112d884 .hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,1 +1,1 @@ -b6f6e9c41f3dfd374a6d2ed4535c87951cf979cf sub +88ac1bef5ed43b689d1d200b59886b675dec474b sub $ cat .hg/patches/checkstate-at-qnew # HG changeset patch # User test # Date 0 0 # Parent 4d91eb2fa1d1b22ec513347b9cd06f6b49d470fa diff -r 4d91eb2fa1d1 -r 1259c112d884 .hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,1 +1,1 @@ -b6f6e9c41f3dfd374a6d2ed4535c87951cf979cf sub +88ac1bef5ed43b689d1d200b59886b675dec474b sub $ hg parents --template '{files}\n' .hgsubstate check whether qrefresh not including ".hgsubstate" explicitly causes as same result (in node hash) as one including it. $ hg update -C -q 0 $ hg qpop -a -q patch queue now empty $ hg qnew -u test -d '0 0' add-hgsub-at-qrefresh $ echo 'sub = sub' > .hgsub $ echo > .hgsubstate $ hg add .hgsub .hgsubstate $ hg qrefresh -u test -d '0 0' $ hg parents --template '{node}\n' 7c48c35501aae6770ed9c2517014628615821a8e $ hg parents --template '{files}\n' .hgsub .hgsubstate $ hg qpop -a -q patch queue now empty $ hg qdelete add-hgsub-at-qrefresh $ hg qnew -u test -d '0 0' add-hgsub-at-qrefresh $ echo 'sub = sub' > .hgsub $ hg add .hgsub $ rm -f .hgsubstate $ hg qrefresh -u test -d '0 0' $ hg parents --template '{node}\n' 7c48c35501aae6770ed9c2517014628615821a8e $ hg parents --template '{files}\n' .hgsub .hgsubstate $ cd .. $ cd .. mercurial-3.7.3/tests/test-censor.t0000644000175000017500000003223012676531525016724 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > censor= > EOF $ cp $HGRCPATH $HGRCPATH.orig Create repo with unimpeachable content $ hg init r $ cd r $ echo 'Initially untainted file' > target $ echo 'Normal file here' > bystander $ hg add target bystander $ hg ci -m init Clone repo so we can test pull later $ cd .. $ hg clone r rpull updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd r Introduce content which will ultimately require censorship. Name the first censored node C1, second C2, and so on $ echo 'Tainted file' > target $ echo 'Passwords: hunter2' >> target $ hg ci -m taint target $ C1=`hg id --debug -i` $ echo 'hunter3' >> target $ echo 'Normal file v2' > bystander $ hg ci -m moretaint target bystander $ C2=`hg id --debug -i` Add a new sanitized versions to correct our mistake. Name the first head H1, the second head H2, and so on $ echo 'Tainted file is now sanitized' > target $ hg ci -m sanitized target $ H1=`hg id --debug -i` $ hg update -r $C2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 'Tainted file now super sanitized' > target $ hg ci -m 'super sanitized' target created new head $ H2=`hg id --debug -i` Verify target contents before censorship at each revision $ hg cat -r $H1 target Tainted file is now sanitized $ hg cat -r $H2 target Tainted file now super sanitized $ hg cat -r $C2 target Tainted file Passwords: hunter2 hunter3 $ hg cat -r $C1 target Tainted file Passwords: hunter2 $ hg cat -r 0 target Initially untainted file Try to censor revision with too large of a tombstone message $ hg censor -r $C1 -t 'blah blah blah blah blah blah blah blah bla' target abort: censor tombstone must be no longer than censored data [255] Censor revision with 2 offenses (this also tests file pattern matching: path relative to cwd case) $ mkdir -p foo/bar/baz $ hg --cwd foo/bar/baz censor -r $C2 -t "remove password" ../../../target $ hg cat -r $H1 target Tainted file is now sanitized $ hg cat -r $H2 target Tainted file now super sanitized $ hg cat -r $C2 target abort: censored node: 1e0247a9a4b7 (set censor.policy to ignore errors) [255] $ hg cat -r $C1 target Tainted file Passwords: hunter2 $ hg cat -r 0 target Initially untainted file Censor revision with 1 offense (this also tests file pattern matching: with 'path:' scheme) $ hg --cwd foo/bar/baz censor -r $C1 path:target $ hg cat -r $H1 target Tainted file is now sanitized $ hg cat -r $H2 target Tainted file now super sanitized $ hg cat -r $C2 target abort: censored node: 1e0247a9a4b7 (set censor.policy to ignore errors) [255] $ hg cat -r $C1 target abort: censored node: 613bc869fceb (set censor.policy to ignore errors) [255] $ hg cat -r 0 target Initially untainted file Can only checkout target at uncensored revisions, -X is workaround for --all $ hg revert -r $C2 target abort: censored node: 1e0247a9a4b7 (set censor.policy to ignore errors) [255] $ hg revert -r $C1 target abort: censored node: 613bc869fceb (set censor.policy to ignore errors) [255] $ hg revert -r $C1 --all reverting bystander reverting target abort: censored node: 613bc869fceb (set censor.policy to ignore errors) [255] $ hg revert -r $C1 --all -X target $ cat target Tainted file now super sanitized $ hg revert -r 0 --all reverting target $ cat target Initially untainted file $ hg revert -r $H2 --all reverting bystander reverting target $ cat target Tainted file now super sanitized Uncensored file can be viewed at any revision $ hg cat -r $H1 bystander Normal file v2 $ hg cat -r $C2 bystander Normal file v2 $ hg cat -r $C1 bystander Normal file here $ hg cat -r 0 bystander Normal file here Can update to children of censored revision $ hg update -r $H1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat target Tainted file is now sanitized $ hg update -r $H2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat target Tainted file now super sanitized Set censor policy to abort in trusted $HGRC so hg verify fails $ cp $HGRCPATH.orig $HGRCPATH $ cat >> $HGRCPATH < [censor] > policy = abort > EOF Repo fails verification due to censorship $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files target@1: censored file data target@2: censored file data 2 files, 5 changesets, 7 total revisions 2 integrity errors encountered! (first damaged changeset appears to be 1) [1] Cannot update to revision with censored data $ hg update -r $C2 abort: censored node: 1e0247a9a4b7 (set censor.policy to ignore errors) [255] $ hg update -r $C1 abort: censored node: 613bc869fceb (set censor.policy to ignore errors) [255] $ hg update -r 0 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg update -r $H2 2 files updated, 0 files merged, 0 files removed, 0 files unresolved Set censor policy to ignore in trusted $HGRC so hg verify passes $ cp $HGRCPATH.orig $HGRCPATH $ cat >> $HGRCPATH < [censor] > policy = ignore > EOF Repo passes verification with warnings with explicit config $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 5 changesets, 7 total revisions May update to revision with censored data with explicit config $ hg update -r $C2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat target $ hg update -r $C1 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat target $ hg update -r 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat target Initially untainted file $ hg update -r $H2 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat target Tainted file now super sanitized Can merge in revision with censored data. Test requires one branch of history with the file censored, but we can't censor at a head, so advance H1. $ hg update -r $H1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ C3=$H1 $ echo 'advanced head H1' > target $ hg ci -m 'advance head H1' target $ H1=`hg id --debug -i` $ hg censor -r $C3 target $ hg update -r $H2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge -r $C3 merging target 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) Revisions present in repository heads may not be censored $ hg update -C -r $H2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg censor -r $H2 target abort: cannot censor file in heads (78a8fc215e79) (clean/delete and commit first) [255] $ echo 'twiddling thumbs' > bystander $ hg ci -m 'bystander commit' $ H2=`hg id --debug -i` $ hg censor -r "$H2^" target abort: cannot censor file in heads (efbe78065929) (clean/delete and commit first) [255] Cannot censor working directory $ echo 'seriously no passwords' > target $ hg ci -m 'extend second head arbitrarily' target $ H2=`hg id --debug -i` $ hg update -r "$H2^" 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg censor -r . target abort: cannot censor working directory (clean/delete/update first) [255] $ hg update -r $H2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Can re-add file after being deleted + censored $ C4=$H2 $ hg rm target $ hg ci -m 'delete target so it may be censored' $ H2=`hg id --debug -i` $ hg censor -r $C4 target $ hg cat -r $C4 target $ hg cat -r "$H2^^" target Tainted file now super sanitized $ echo 'fresh start' > target $ hg add target $ hg ci -m reincarnated target $ H2=`hg id --debug -i` $ hg cat -r $H2 target fresh start $ hg cat -r "$H2^" target target: no such file in rev 452ec1762369 [1] $ hg cat -r $C4 target $ hg cat -r "$H2^^^" target Tainted file now super sanitized Can censor after revlog has expanded to no longer permit inline storage $ for x in `python $TESTDIR/seq.py 0 50000` > do > echo "Password: hunter$x" >> target > done $ hg ci -m 'add 100k passwords' $ H2=`hg id --debug -i` $ C5=$H2 $ hg revert -r "$H2^" target $ hg ci -m 'cleaned 100k passwords' $ H2=`hg id --debug -i` $ hg censor -r $C5 target $ hg cat -r $C5 target $ hg cat -r $H2 target fresh start Repo with censored nodes can be cloned and cloned nodes are censored $ cd .. $ hg clone r rclone updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd rclone $ hg cat -r $H1 target advanced head H1 $ hg cat -r $H2~5 target Tainted file now super sanitized $ hg cat -r $C2 target $ hg cat -r $C1 target $ hg cat -r 0 target Initially untainted file $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 12 changesets, 13 total revisions Repo cloned before tainted content introduced can pull censored nodes $ cd ../rpull $ hg cat -r tip target Initially untainted file $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 1 changesets, 2 total revisions $ hg pull -r $H1 -r $H2 pulling from $TESTTMP/r (glob) searching for changes adding changesets adding manifests adding file changes added 11 changesets with 11 changes to 2 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg update 4 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat target Tainted file now super sanitized $ hg cat -r $H1 target advanced head H1 $ hg cat -r $H2~5 target Tainted file now super sanitized $ hg cat -r $C2 target $ hg cat -r $C1 target $ hg cat -r 0 target Initially untainted file $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 12 changesets, 13 total revisions Censored nodes can be pushed if they censor previously unexchanged nodes $ echo 'Passwords: hunter2hunter2' > target $ hg ci -m 're-add password from clone' target created new head $ H3=`hg id --debug -i` $ REV=$H3 $ echo 'Re-sanitized; nothing to see here' > target $ hg ci -m 're-sanitized' target $ H2=`hg id --debug -i` $ CLEANREV=$H2 $ hg cat -r $REV target Passwords: hunter2hunter2 $ hg censor -r $REV target $ hg cat -r $REV target $ hg cat -r $CLEANREV target Re-sanitized; nothing to see here $ hg push -f -r $H2 pushing to $TESTTMP/r (glob) searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files (+1 heads) $ cd ../r $ hg cat -r $REV target $ hg cat -r $CLEANREV target Re-sanitized; nothing to see here $ hg update $CLEANREV 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat target Re-sanitized; nothing to see here Censored nodes can be bundled up and unbundled in another repo $ hg bundle --base 0 ../pwbundle 13 changesets found $ cd ../rclone $ hg unbundle ../pwbundle adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) (run 'hg heads .' to see heads, 'hg merge' to merge) $ hg cat -r $REV target $ hg cat -r $CLEANREV target Re-sanitized; nothing to see here $ hg update $CLEANREV 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat target Re-sanitized; nothing to see here $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 14 changesets, 15 total revisions Censored nodes can be imported on top of censored nodes, consecutively $ hg init ../rimport $ hg bundle --base 1 ../rimport/splitbundle 12 changesets found $ cd ../rimport $ hg pull -r $H1 -r $H2 ../r pulling from ../r adding changesets adding manifests adding file changes added 8 changesets with 10 changes to 2 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg unbundle splitbundle adding changesets adding manifests adding file changes added 6 changesets with 5 changes to 2 files (+1 heads) (run 'hg heads .' to see heads, 'hg merge' to merge) $ hg update $H2 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat target Re-sanitized; nothing to see here $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 14 changesets, 15 total revisions $ cd ../r Can import bundle where first revision of a file is censored $ hg init ../rinit $ hg censor -r 0 target $ hg bundle -r 0 --base null ../rinit/initbundle 1 changesets found $ cd ../rinit $ hg unbundle initbundle adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files (run 'hg update' to get a working copy) $ hg cat -r 0 target mercurial-3.7.3/tests/test-generaldelta.t0000644000175000017500000001344512676531525020071 0ustar mpmmpm00000000000000Check whether size of generaldelta revlog is not bigger than its regular equivalent. Test would fail if generaldelta was naive implementation of parentdelta: third manifest revision would be fully inserted due to big distance from its paren revision (zero). $ hg init repo --config format.generaldelta=no --config format.usegeneraldelta=no $ cd repo $ echo foo > foo $ echo bar > bar $ echo baz > baz $ hg commit -q -Am boo $ hg clone --pull . ../gdrepo -q --config format.generaldelta=yes $ for r in 1 2 3; do > echo $r > foo > hg commit -q -m $r > hg up -q -r 0 > hg pull . -q -r $r -R ../gdrepo > done $ cd .. >>> import os >>> regsize = os.stat("repo/.hg/store/00manifest.i").st_size >>> gdsize = os.stat("gdrepo/.hg/store/00manifest.i").st_size >>> if regsize < gdsize: ... print 'generaldata increased size of manifest' Verify rev reordering doesnt create invalid bundles (issue4462) This requires a commit tree that when pulled will reorder manifest revs such that the second manifest to create a file rev will be ordered before the first manifest to create that file rev. We also need to do a partial pull to ensure reordering happens. At the end we verify the linkrev points at the earliest commit. $ hg init server --config format.generaldelta=True $ cd server $ touch a $ hg commit -Aqm a $ echo x > x $ echo y > y $ hg commit -Aqm xy $ hg up -q '.^' $ echo x > x $ echo z > z $ hg commit -Aqm xz $ hg up -q 1 $ echo b > b $ hg commit -Aqm b $ hg merge -q 2 $ hg commit -Aqm merge $ echo c > c $ hg commit -Aqm c $ hg log -G -T '{rev} {shortest(node)} {desc}' @ 5 ebb8 c | o 4 baf7 merge |\ | o 3 a129 b | | o | 2 958c xz | | | o 1 f00c xy |/ o 0 3903 a $ cd .. $ hg init client --config format.generaldelta=false --config format.usegeneraldelta=false $ cd client $ hg pull -q ../server -r 4 $ hg debugindex x rev offset length base linkrev nodeid p1 p2 0 0 3 0 1 1406e7411862 000000000000 000000000000 $ cd .. Test "usegeneraldelta" config (repo are general delta, but incoming bundle are not re-deltified) delta coming from the server base delta server are not recompressed. (also include the aggressive version for comparison) $ hg clone repo --pull --config format.usegeneraldelta=1 usegd requesting all changes adding changesets adding manifests adding file changes added 4 changesets with 6 changes to 3 files (+2 heads) updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg clone repo --pull --config format.generaldelta=1 full requesting all changes adding changesets adding manifests adding file changes added 4 changesets with 6 changes to 3 files (+2 heads) updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R repo debugindex -m rev offset length base linkrev nodeid p1 p2 0 0 104 0 0 cef96823c800 000000000000 000000000000 1 104 57 0 1 58ab9a8d541d cef96823c800 000000000000 2 161 57 0 2 134fdc6fd680 cef96823c800 000000000000 3 218 104 3 3 723508934dad cef96823c800 000000000000 $ hg -R usegd debugindex -m rev offset length delta linkrev nodeid p1 p2 0 0 104 -1 0 cef96823c800 000000000000 000000000000 1 104 57 0 1 58ab9a8d541d cef96823c800 000000000000 2 161 57 1 2 134fdc6fd680 cef96823c800 000000000000 3 218 57 0 3 723508934dad cef96823c800 000000000000 $ hg -R full debugindex -m rev offset length delta linkrev nodeid p1 p2 0 0 104 -1 0 cef96823c800 000000000000 000000000000 1 104 57 0 1 58ab9a8d541d cef96823c800 000000000000 2 161 57 0 2 134fdc6fd680 cef96823c800 000000000000 3 218 57 0 3 723508934dad cef96823c800 000000000000 Test format.aggressivemergedeltas $ hg init --config format.generaldelta=1 aggressive $ cd aggressive $ cat << EOF >> .hg/hgrc > [format] > generaldelta = 1 > EOF $ touch a b c d e $ hg commit -Aqm side1 $ hg up -q null $ touch x y $ hg commit -Aqm side2 - Verify non-aggressive merge uses p1 (commit 1) as delta parent $ hg merge -q 0 $ hg commit -q -m merge $ hg debugindex -m rev offset length delta linkrev nodeid p1 p2 0 0 59 -1 0 8dde941edb6e 000000000000 000000000000 1 59 61 0 1 315c023f341d 000000000000 000000000000 2 120 65 1 2 2ab389a983eb 315c023f341d 8dde941edb6e $ hg strip -q -r . --config extensions.strip= - Verify aggressive merge uses p2 (commit 0) as delta parent $ hg up -q -C 1 $ hg merge -q 0 $ hg commit -q -m merge --config format.aggressivemergedeltas=True $ hg debugindex -m rev offset length delta linkrev nodeid p1 p2 0 0 59 -1 0 8dde941edb6e 000000000000 000000000000 1 59 61 0 1 315c023f341d 000000000000 000000000000 2 120 62 0 2 2ab389a983eb 315c023f341d 8dde941edb6e Test that strip bundle use bundle2 $ hg --config extensions.strip= strip . 0 files updated, 0 files merged, 5 files removed, 0 files unresolved saved backup bundle to $TESTTMP/aggressive/.hg/strip-backup/1c5d4dc9a8b8-6c68e60c-backup.hg (glob) $ hg debugbundle .hg/strip-backup/* Stream params: {'Compression': 'BZ'} changegroup -- "{'version': '02'}" 1c5d4dc9a8b8d6e1750966d343e94db665e7a1e9 $ cd .. mercurial-3.7.3/tests/test-lrucachedict.py0000644000175000017500000000334512676531525020257 0ustar mpmmpm00000000000000from mercurial import util def printifpresent(d, xs, name='d'): for x in xs: present = x in d print "'%s' in %s: %s" % (x, name, present) if present: print "%s['%s']: %s" % (name, x, d[x]) def test_lrucachedict(): d = util.lrucachedict(4) d['a'] = 'va' d['b'] = 'vb' d['c'] = 'vc' d['d'] = 'vd' # all of these should be present printifpresent(d, ['a', 'b', 'c', 'd']) # 'a' should be dropped because it was least recently used d['e'] = 've' printifpresent(d, ['a', 'b', 'c', 'd', 'e']) # touch entries in some order (get or set). d['e'] d['c'] = 'vc2' d['d'] d['b'] = 'vb2' # 'e' should be dropped now d['f'] = 'vf' printifpresent(d, ['b', 'c', 'd', 'e', 'f']) d.clear() printifpresent(d, ['b', 'c', 'd', 'e', 'f']) # Now test dicts that aren't full. d = util.lrucachedict(4) d['a'] = 1 d['b'] = 2 d['a'] d['b'] printifpresent(d, ['a', 'b']) # test copy method d = util.lrucachedict(4) d['a'] = 'va3' d['b'] = 'vb3' d['c'] = 'vc3' d['d'] = 'vd3' dc = d.copy() # all of these should be present print "\nAll of these should be present:" printifpresent(dc, ['a', 'b', 'c', 'd'], 'dc') # 'a' should be dropped because it was least recently used print "\nAll of these except 'a' should be present:" dc['e'] = 've3' printifpresent(dc, ['a', 'b', 'c', 'd', 'e'], 'dc') # contents and order of original dict should remain unchanged print "\nThese should be in reverse alphabetical order and read 'v?3':" dc['b'] = 'vb3_new' for k in list(iter(d)): print "d['%s']: %s" % (k, d[k]) if __name__ == '__main__': test_lrucachedict() mercurial-3.7.3/tests/test-hgweb-non-interactive.t0000644000175000017500000000462012676531525021634 0ustar mpmmpm00000000000000Tests if hgweb can run without touching sys.stdin, as is required by the WSGI standard and strictly implemented by mod_wsgi. $ hg init repo $ cd repo $ echo foo > bar $ hg add bar $ hg commit -m "test" $ cat > request.py < from mercurial import dispatch > from mercurial.hgweb.hgweb_mod import hgweb > from mercurial.ui import ui > from mercurial import hg > from StringIO import StringIO > import os, sys > > class FileLike(object): > def __init__(self, real): > self.real = real > def fileno(self): > print >> sys.__stdout__, 'FILENO' > return self.real.fileno() > def read(self): > print >> sys.__stdout__, 'READ' > return self.real.read() > def readline(self): > print >> sys.__stdout__, 'READLINE' > return self.real.readline() > > sys.stdin = FileLike(sys.stdin) > errors = StringIO() > input = StringIO() > output = StringIO() > > def startrsp(status, headers): > print '---- STATUS' > print status > print '---- HEADERS' > print [i for i in headers if i[0] != 'ETag'] > print '---- DATA' > return output.write > > env = { > 'wsgi.version': (1, 0), > 'wsgi.url_scheme': 'http', > 'wsgi.errors': errors, > 'wsgi.input': input, > 'wsgi.multithread': False, > 'wsgi.multiprocess': False, > 'wsgi.run_once': False, > 'REQUEST_METHOD': 'GET', > 'SCRIPT_NAME': '', > 'PATH_INFO': '', > 'QUERY_STRING': '', > 'SERVER_NAME': '127.0.0.1', > 'SERVER_PORT': os.environ['HGPORT'], > 'SERVER_PROTOCOL': 'HTTP/1.0' > } > > i = hgweb('.') > for c in i(env, startrsp): > pass > print '---- ERRORS' > print errors.getvalue() > print '---- OS.ENVIRON wsgi variables' > print sorted([x for x in os.environ if x.startswith('wsgi')]) > print '---- request.ENVIRON wsgi variables' > with i._obtainrepo() as repo: > print sorted([x for x in repo.ui.environ if x.startswith('wsgi')]) > EOF $ python request.py ---- STATUS 200 Script output follows ---- HEADERS [('Content-Type', 'text/html; charset=ascii')] ---- DATA ---- ERRORS ---- OS.ENVIRON wsgi variables [] ---- request.ENVIRON wsgi variables ['wsgi.errors', 'wsgi.input', 'wsgi.multiprocess', 'wsgi.multithread', 'wsgi.run_once', 'wsgi.url_scheme', 'wsgi.version'] $ cd .. mercurial-3.7.3/tests/test-clone-update-order.t0000644000175000017500000000613312676531525021127 0ustar mpmmpm00000000000000 $ hg init $ echo foo > bar $ hg commit -Am default adding bar $ hg up -r null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg branch mine marked working directory as branch mine (branches are permanent and global, did you want a bookmark?) $ echo hello > world $ hg commit -Am hello adding world $ hg up -r null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg branch other marked working directory as branch other $ echo good > bye $ hg commit -Am other adding bye $ hg up -r mine 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg clone -U -u . .#other ../b -r 0 -r 1 -r 2 -b other abort: cannot specify both --noupdate and --updaterev [255] $ hg clone -U .#other ../b -r 0 -r 1 -r 2 -b other adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files (+2 heads) $ rm -rf ../b $ hg clone -u . .#other ../b -r 0 -r 1 -r 2 -b other adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files (+2 heads) updating to branch mine 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf ../b $ hg clone -u 0 .#other ../b -r 0 -r 1 -r 2 -b other adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files (+2 heads) updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf ../b $ hg clone -u 1 .#other ../b -r 0 -r 1 -r 2 -b other adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files (+2 heads) updating to branch mine 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf ../b $ hg clone -u 2 .#other ../b -r 0 -r 1 -r 2 -b other adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files (+2 heads) updating to branch other 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf ../b Test -r mine ... mine is ignored: $ hg clone -u 2 .#other ../b -r mine -r 0 -r 1 -r 2 -b other adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files (+2 heads) updating to branch other 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf ../b $ hg clone .#other ../b -b default -b mine adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files (+2 heads) updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf ../b $ hg clone .#other ../b adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch other 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf ../b $ hg clone -U . ../c -r 1 -r 2 > /dev/null $ hg clone ../c ../b updating to branch other 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf ../b ../c mercurial-3.7.3/tests/test-convert-authormap.t0000644000175000017500000000234612676531525021116 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > convert= > EOF Prepare orig repo $ hg init orig $ cd orig $ echo foo > foo $ HGUSER='user name' hg ci -qAm 'foo' $ cd .. Explicit --authors $ cat > authormap.txt < user name = Long User Name > > # comment > this line is ignored > EOF $ hg convert --authors authormap.txt orig new initializing destination new repository ignoring bad line in author map file authormap.txt: this line is ignored scanning source... sorting... converting... 0 foo writing author map file $TESTTMP/new/.hg/authormap (glob) $ cat new/.hg/authormap user name=Long User Name $ hg -Rnew log changeset: 0:d89716e88087 tag: tip user: Long User Name date: Thu Jan 01 00:00:00 1970 +0000 summary: foo $ rm -rf new Implicit .hg/authormap $ hg init new $ mv authormap.txt new/.hg/authormap $ hg convert orig new ignoring bad line in author map file $TESTTMP/new/.hg/authormap: this line is ignored (glob) scanning source... sorting... converting... 0 foo $ hg -Rnew log changeset: 0:d89716e88087 tag: tip user: Long User Name date: Thu Jan 01 00:00:00 1970 +0000 summary: foo mercurial-3.7.3/tests/test-wireproto.py.out0000644000175000017500000000006112676531525020455 0ustar mpmmpm00000000000000Hello, Foobar ['Hello, Fo, =;:> $HGRCPATH $ echo "convert = " >> $HGRCPATH create cvs repository with one project $ mkdir cvsrepo $ cd cvsrepo $ CVSROOT=`pwd` $ export CVSROOT $ CVS_OPTIONS=-f $ export CVS_OPTIONS $ cd .. $ rmdir cvsrepo $ cvscall() > { > cvs -f "$@" > } output of 'cvs ci' varies unpredictably, so just discard it $ cvsci() > { > sleep 1 > cvs -f ci "$@" >/dev/null > } $ cvscall -d "$CVSROOT" init $ mkdir cvsrepo/proj $ cvscall -q co proj create file1 on the trunk $ cd proj $ touch file1 $ cvscall -Q add file1 $ cvsci -m"add file1 on trunk" file1 create two branches $ cvscall -q tag -b v1_0 T file1 $ cvscall -q tag -b v1_1 T file1 create file2 on branch v1_0 $ cvscall -Q up -rv1_0 $ touch file2 $ cvscall -Q add file2 $ cvsci -m"add file2" file2 create file3, file4 on branch v1_1 $ cvscall -Q up -rv1_1 $ touch file3 $ touch file4 $ cvscall -Q add file3 file4 $ cvsci -m"add file3, file4 on branch v1_1" file3 file4 merge file2 from v1_0 to v1_1 $ cvscall -Q up -jv1_0 $ cvsci -m"MERGE from v1_0: add file2" cvs commit: Examining . Step things up a notch: now we make the history really hairy, with changes bouncing back and forth between trunk and v1_2 and merges going both ways. (I.e., try to model the real world.) create branch v1_2 $ cvscall -Q up -A $ cvscall -q tag -b v1_2 T file1 create file5 on branch v1_2 $ cvscall -Q up -rv1_2 $ touch file5 $ cvs -Q add file5 $ cvsci -m"add file5 on v1_2" cvs commit: Examining . create file6 on trunk post-v1_2 $ cvscall -Q up -A $ touch file6 $ cvscall -Q add file6 $ cvsci -m"add file6 on trunk post-v1_2" cvs commit: Examining . merge file5 from v1_2 to trunk $ cvscall -Q up -A $ cvscall -Q up -jv1_2 file5 $ cvsci -m"MERGE from v1_2: add file5" cvs commit: Examining . merge file6 from trunk to v1_2 $ cvscall -Q up -rv1_2 $ cvscall up -jHEAD file6 U file6 $ cvsci -m"MERGE from HEAD: add file6" cvs commit: Examining . cvs rlog output $ cvscall -q rlog proj | egrep '^(RCS file|revision)' RCS file: $TESTTMP/cvsrepo/proj/file1,v revision 1.1 RCS file: $TESTTMP/cvsrepo/proj/Attic/file2,v revision 1.1 revision 1.1.4.2 revision 1.1.4.1 revision 1.1.2.1 RCS file: $TESTTMP/cvsrepo/proj/Attic/file3,v revision 1.1 revision 1.1.2.1 RCS file: $TESTTMP/cvsrepo/proj/Attic/file4,v revision 1.1 revision 1.1.2.1 RCS file: $TESTTMP/cvsrepo/proj/file5,v revision 1.2 revision 1.1 revision 1.1.2.1 RCS file: $TESTTMP/cvsrepo/proj/file6,v revision 1.1 revision 1.1.2.2 revision 1.1.2.1 convert to hg (#1) $ cd .. $ hg convert --datesort proj proj.hg initializing destination proj.hg repository connecting to $TESTTMP/cvsrepo scanning source... collecting CVS rlog 15 log entries creating changesets 9 changeset entries sorting... converting... 8 add file1 on trunk 7 add file2 6 MERGE from v1_0: add file2 5 file file3 was initially added on branch v1_1. 4 add file3, file4 on branch v1_1 3 add file5 on v1_2 2 add file6 on trunk post-v1_2 1 MERGE from HEAD: add file6 0 MERGE from v1_2: add file5 hg log -G output (#1) $ hg -R proj.hg log -G --template "{rev} {desc}\n" o 8 MERGE from v1_2: add file5 | | o 7 MERGE from HEAD: add file6 | | o | 6 add file6 on trunk post-v1_2 | | | o 5 add file5 on v1_2 | | | | o 4 add file3, file4 on branch v1_1 | | | o | | 3 file file3 was initially added on branch v1_1. |/ / | o 2 MERGE from v1_0: add file2 |/ | o 1 add file2 |/ o 0 add file1 on trunk convert to hg (#2: with merge detection) $ hg convert \ > --config convert.cvsps.mergefrom='"^MERGE from (\S+):"' \ > --datesort \ > proj proj.hg2 initializing destination proj.hg2 repository connecting to $TESTTMP/cvsrepo scanning source... collecting CVS rlog 15 log entries creating changesets 9 changeset entries sorting... converting... 8 add file1 on trunk 7 add file2 6 MERGE from v1_0: add file2 5 file file3 was initially added on branch v1_1. 4 add file3, file4 on branch v1_1 3 add file5 on v1_2 2 add file6 on trunk post-v1_2 1 MERGE from HEAD: add file6 0 MERGE from v1_2: add file5 hg log -G output (#2) $ hg -R proj.hg2 log -G --template "{rev} {desc}\n" o 8 MERGE from v1_2: add file5 | | o 7 MERGE from HEAD: add file6 | | o | 6 add file6 on trunk post-v1_2 | | | o 5 add file5 on v1_2 | | | | o 4 add file3, file4 on branch v1_1 | | | o | | 3 file file3 was initially added on branch v1_1. |/ / | o 2 MERGE from v1_0: add file2 |/ | o 1 add file2 |/ o 0 add file1 on trunk mercurial-3.7.3/tests/test-hgweb-auth.py.out0000644000175000017500000001130012676531525020454 0ustar mpmmpm00000000000000 *** Test in-uri schemes CFG: {x.prefix: http://example.org} URI: http://example.org/foo ('x', 'x') URI: http://example.org/foo/bar ('x', 'x') URI: http://example.org/bar ('x', 'x') URI: https://example.org/foo abort URI: https://example.org/foo/bar abort URI: https://example.org/bar abort URI: https://x@example.org/bar abort URI: https://y@example.org/bar abort CFG: {x.prefix: https://example.org} URI: http://example.org/foo abort URI: http://example.org/foo/bar abort URI: http://example.org/bar abort URI: https://example.org/foo ('x', 'x') URI: https://example.org/foo/bar ('x', 'x') URI: https://example.org/bar ('x', 'x') URI: https://x@example.org/bar ('x', 'x') URI: https://y@example.org/bar abort CFG: {x.prefix: http://example.org, x.schemes: https} URI: http://example.org/foo ('x', 'x') URI: http://example.org/foo/bar ('x', 'x') URI: http://example.org/bar ('x', 'x') URI: https://example.org/foo abort URI: https://example.org/foo/bar abort URI: https://example.org/bar abort URI: https://x@example.org/bar abort URI: https://y@example.org/bar abort CFG: {x.prefix: https://example.org, x.schemes: http} URI: http://example.org/foo abort URI: http://example.org/foo/bar abort URI: http://example.org/bar abort URI: https://example.org/foo ('x', 'x') URI: https://example.org/foo/bar ('x', 'x') URI: https://example.org/bar ('x', 'x') URI: https://x@example.org/bar ('x', 'x') URI: https://y@example.org/bar abort *** Test separately configured schemes CFG: {x.prefix: example.org, x.schemes: http} URI: http://example.org/foo ('x', 'x') URI: http://example.org/foo/bar ('x', 'x') URI: http://example.org/bar ('x', 'x') URI: https://example.org/foo abort URI: https://example.org/foo/bar abort URI: https://example.org/bar abort URI: https://x@example.org/bar abort URI: https://y@example.org/bar abort CFG: {x.prefix: example.org, x.schemes: https} URI: http://example.org/foo abort URI: http://example.org/foo/bar abort URI: http://example.org/bar abort URI: https://example.org/foo ('x', 'x') URI: https://example.org/foo/bar ('x', 'x') URI: https://example.org/bar ('x', 'x') URI: https://x@example.org/bar ('x', 'x') URI: https://y@example.org/bar abort CFG: {x.prefix: example.org, x.schemes: http https} URI: http://example.org/foo ('x', 'x') URI: http://example.org/foo/bar ('x', 'x') URI: http://example.org/bar ('x', 'x') URI: https://example.org/foo ('x', 'x') URI: https://example.org/foo/bar ('x', 'x') URI: https://example.org/bar ('x', 'x') URI: https://x@example.org/bar ('x', 'x') URI: https://y@example.org/bar abort *** Test prefix matching CFG: {x.prefix: http://example.org/foo, y.prefix: http://example.org/bar} URI: http://example.org/foo ('x', 'x') URI: http://example.org/foo/bar ('x', 'x') URI: http://example.org/bar ('y', 'y') URI: https://example.org/foo abort URI: https://example.org/foo/bar abort URI: https://example.org/bar abort URI: https://x@example.org/bar abort URI: https://y@example.org/bar abort CFG: {x.prefix: http://example.org/foo, y.prefix: http://example.org/foo/bar} URI: http://example.org/foo ('x', 'x') URI: http://example.org/foo/bar ('y', 'y') URI: http://example.org/bar abort URI: https://example.org/foo abort URI: https://example.org/foo/bar abort URI: https://example.org/bar abort URI: https://x@example.org/bar abort URI: https://y@example.org/bar abort CFG: {x.prefix: *, y.prefix: https://example.org/bar} URI: http://example.org/foo abort URI: http://example.org/foo/bar abort URI: http://example.org/bar abort URI: https://example.org/foo ('x', 'x') URI: https://example.org/foo/bar ('x', 'x') URI: https://example.org/bar ('y', 'y') URI: https://x@example.org/bar ('x', 'x') URI: https://y@example.org/bar ('y', 'y') *** Test user matching CFG: {x.password: xpassword, x.prefix: http://example.org/foo, x.username: None} URI: http://y@example.org/foo ('y', 'xpassword') CFG: {x.password: xpassword, x.prefix: http://example.org/foo, x.username: None, y.password: ypassword, y.prefix: http://example.org/foo, y.username: y} URI: http://y@example.org/foo ('y', 'ypassword') CFG: {x.password: xpassword, x.prefix: http://example.org/foo/bar, x.username: None, y.password: ypassword, y.prefix: http://example.org/foo, y.username: y} URI: http://y@example.org/foo/bar ('y', 'xpassword') *** Test urllib2 and util.url URIs: http://user@example.com:8080/foo http://example.com:8080/foo ('user', '') mercurial-3.7.3/tests/test-inherit-mode.t0000644000175000017500000000744112676531525020025 0ustar mpmmpm00000000000000#require unix-permissions test that new files created in .hg inherit the permissions from .hg/store $ mkdir dir just in case somebody has a strange $TMPDIR $ chmod g-s dir $ cd dir $ cat >printmodes.py < import os, sys > > allnames = [] > isdir = {} > for root, dirs, files in os.walk(sys.argv[1]): > for d in dirs: > name = os.path.join(root, d) > isdir[name] = 1 > allnames.append(name) > for f in files: > name = os.path.join(root, f) > allnames.append(name) > allnames.sort() > for name in allnames: > suffix = name in isdir and '/' or '' > print '%05o %s%s' % (os.lstat(name).st_mode & 07777, name, suffix) > EOF $ cat >mode.py < import sys > import os > print '%05o' % os.lstat(sys.argv[1]).st_mode > EOF $ umask 077 $ hg init repo $ cd repo $ chmod 0770 .hg/store before commit store can be written by the group, other files cannot store is setgid $ python ../printmodes.py . 00700 ./.hg/ 00600 ./.hg/00changelog.i 00600 ./.hg/requires 00770 ./.hg/store/ $ mkdir dir $ touch foo dir/bar $ hg ci -qAm 'add files' after commit working dir files can only be written by the owner files created in .hg can be written by the group (in particular, store/**, dirstate, branch cache file, undo files) new directories are setgid $ python ../printmodes.py . 00700 ./.hg/ 00600 ./.hg/00changelog.i 00770 ./.hg/cache/ 00660 ./.hg/cache/branch2-served 00660 ./.hg/cache/rbc-names-v1 00660 ./.hg/cache/rbc-revs-v1 00660 ./.hg/dirstate 00660 ./.hg/last-message.txt 00600 ./.hg/requires 00770 ./.hg/store/ 00660 ./.hg/store/00changelog.i 00660 ./.hg/store/00manifest.i 00770 ./.hg/store/data/ 00770 ./.hg/store/data/dir/ 00660 ./.hg/store/data/dir/bar.i 00660 ./.hg/store/data/foo.i 00660 ./.hg/store/fncache 00660 ./.hg/store/phaseroots 00660 ./.hg/store/undo 00660 ./.hg/store/undo.backupfiles 00660 ./.hg/store/undo.phaseroots 00660 ./.hg/undo.backup.dirstate 00660 ./.hg/undo.bookmarks 00660 ./.hg/undo.branch 00660 ./.hg/undo.desc 00660 ./.hg/undo.dirstate 00700 ./dir/ 00600 ./dir/bar 00600 ./foo $ umask 007 $ hg init ../push before push group can write everything $ python ../printmodes.py ../push 00770 ../push/.hg/ 00660 ../push/.hg/00changelog.i 00660 ../push/.hg/requires 00770 ../push/.hg/store/ $ umask 077 $ hg -q push ../push after push group can still write everything $ python ../printmodes.py ../push 00770 ../push/.hg/ 00660 ../push/.hg/00changelog.i 00770 ../push/.hg/cache/ 00660 ../push/.hg/cache/branch2-base 00660 ../push/.hg/cache/rbc-names-v1 00660 ../push/.hg/cache/rbc-revs-v1 00660 ../push/.hg/requires 00770 ../push/.hg/store/ 00660 ../push/.hg/store/00changelog.i 00660 ../push/.hg/store/00manifest.i 00770 ../push/.hg/store/data/ 00770 ../push/.hg/store/data/dir/ 00660 ../push/.hg/store/data/dir/bar.i 00660 ../push/.hg/store/data/foo.i 00660 ../push/.hg/store/fncache 00660 ../push/.hg/store/undo 00660 ../push/.hg/store/undo.backupfiles 00660 ../push/.hg/store/undo.phaseroots 00660 ../push/.hg/undo.bookmarks 00660 ../push/.hg/undo.branch 00660 ../push/.hg/undo.desc 00660 ../push/.hg/undo.dirstate Test that we don't lose the setgid bit when we call chmod. Not all systems support setgid directories (e.g. HFS+), so just check that directories have the same mode. $ cd .. $ hg init setgid $ cd setgid $ chmod g+rwx .hg/store $ chmod g+s .hg/store 2> /dev/null || true $ mkdir dir $ touch dir/file $ hg ci -qAm 'add dir/file' $ storemode=`python ../mode.py .hg/store` $ dirmode=`python ../mode.py .hg/store/data/dir` $ if [ "$storemode" != "$dirmode" ]; then > echo "$storemode != $dirmode" > fi $ cd .. $ cd .. # g-s dir mercurial-3.7.3/tests/test-ui-verbosity.py0000644000175000017500000000250012676531525020256 0ustar mpmmpm00000000000000import os from mercurial import ui hgrc = os.environ['HGRCPATH'] f = open(hgrc) basehgrc = f.read() f.close() print ' hgrc settings command line options final result ' print ' quiet verbo debug quiet verbo debug quiet verbo debug' for i in xrange(64): hgrc_quiet = bool(i & 1<<0) hgrc_verbose = bool(i & 1<<1) hgrc_debug = bool(i & 1<<2) cmd_quiet = bool(i & 1<<3) cmd_verbose = bool(i & 1<<4) cmd_debug = bool(i & 1<<5) f = open(hgrc, 'w') f.write(basehgrc) f.write('\n[ui]\n') if hgrc_quiet: f.write('quiet = True\n') if hgrc_verbose: f.write('verbose = True\n') if hgrc_debug: f.write('debug = True\n') f.close() u = ui.ui() if cmd_quiet or cmd_debug or cmd_verbose: u.setconfig('ui', 'quiet', str(bool(cmd_quiet))) u.setconfig('ui', 'verbose', str(bool(cmd_verbose))) u.setconfig('ui', 'debug', str(bool(cmd_debug))) check = '' if u.debugflag: if not u.verbose or u.quiet: check = ' *' elif u.verbose and u.quiet: check = ' +' print ('%2d %5s %5s %5s %5s %5s %5s -> %5s %5s %5s%s' % (i, hgrc_quiet, hgrc_verbose, hgrc_debug, cmd_quiet, cmd_verbose, cmd_debug, u.quiet, u.verbose, u.debugflag, check)) mercurial-3.7.3/tests/test-revert.t0000644000175000017500000007644412676531525016761 0ustar mpmmpm00000000000000 $ hg init repo $ cd repo $ echo 123 > a $ echo 123 > c $ echo 123 > e $ hg add a c e $ hg commit -m "first" a c e nothing changed $ hg revert abort: no files or directories specified (use --all to revert all files) [255] $ hg revert --all Introduce some changes and revert them -------------------------------------- $ echo 123 > b $ hg status ? b $ echo 12 > c $ hg status M c ? b $ hg add b $ hg status M c A b $ hg rm a $ hg status M c A b R a revert removal of a file $ hg revert a $ hg status M c A b revert addition of a file $ hg revert b $ hg status M c ? b revert modification of a file (--no-backup) $ hg revert --no-backup c $ hg status ? b revert deletion (! status) of a added file ------------------------------------------ $ hg add b $ hg status b A b $ rm b $ hg status b ! b $ hg revert -v b forgetting b $ hg status b b: * (glob) $ ls a c e Test creation of backup (.orig) files ------------------------------------- $ echo z > e $ hg revert --all -v saving current version of e as e.orig reverting e Test creation of backup (.orig) file in configured file location ---------------------------------------------------------------- $ echo z > e $ hg revert --all -v --config 'ui.origbackuppath=.hg/origbackups' creating directory: $TESTTMP/repo/.hg/origbackups (glob) saving current version of e as $TESTTMP/repo/.hg/origbackups/e.orig (glob) reverting e $ rm -rf .hg/origbackups revert on clean file (no change) -------------------------------- $ hg revert a no changes needed to a revert on an untracked file --------------------------- $ echo q > q $ hg revert q file not managed: q $ rm q revert on file that does not exists ----------------------------------- $ hg revert notfound notfound: no such file in rev 334a9e57682c $ touch d $ hg add d $ hg rm a $ hg commit -m "second" $ echo z > z $ hg add z $ hg st A z ? e.orig revert to another revision (--rev) ---------------------------------- $ hg revert --all -r0 adding a removing d forgetting z revert explicitly to parent (--rev) ----------------------------------- $ hg revert --all -rtip forgetting a undeleting d $ rm a *.orig revert to another revision (--rev) and exact match -------------------------------------------------- exact match are more silent $ hg revert -r0 a $ hg st a A a $ hg rm d $ hg st d R d should keep d removed $ hg revert -r0 d no changes needed to d $ hg st d R d $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved revert of exec bit ------------------ #if execbit $ chmod +x c $ hg revert --all reverting c $ test -x c || echo non-executable non-executable $ chmod +x c $ hg commit -m exe $ chmod -x c $ hg revert --all reverting c $ test -x c && echo executable executable #endif Test that files reverted to other than the parent are treated as "modified", even if none of mode, size and timestamp of it isn't changed on the filesystem (see also issue4583). $ echo 321 > e $ hg diff --git diff --git a/e b/e --- a/e +++ b/e @@ -1,1 +1,1 @@ -123 +321 $ hg commit -m 'ambiguity from size' $ cat e 321 $ touch -t 200001010000 e $ hg debugrebuildstate $ cat >> .hg/hgrc < [fakedirstatewritetime] > # emulate invoking dirstate.write() via repo.status() > # at 2000-01-01 00:00 > fakenow = 200001010000 > > [extensions] > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py > EOF $ hg revert -r 0 e $ cat >> .hg/hgrc < [extensions] > fakedirstatewritetime = ! > EOF $ cat e 123 $ touch -t 200001010000 e $ hg status -A e M e $ cd .. Issue241: update and revert produces inconsistent repositories -------------------------------------------------------------- $ hg init a $ cd a $ echo a >> a $ hg commit -A -d '1 0' -m a adding a $ echo a >> a $ hg commit -d '2 0' -m a $ hg update 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ mkdir b $ echo b > b/b call `hg revert` with no file specified --------------------------------------- $ hg revert -rtip abort: no files or directories specified (use --all to revert all files, or 'hg update 1' to update) [255] call `hg revert` with -I --------------------------- $ echo a >> a $ hg revert -I a reverting a call `hg revert` with -X --------------------------- $ echo a >> a $ hg revert -X d reverting a call `hg revert` with --all --------------------------- $ hg revert --all -rtip reverting a $ rm *.orig Issue332: confusing message when reverting directory ---------------------------------------------------- $ hg ci -A -m b adding b/b created new head $ echo foobar > b/b $ mkdir newdir $ echo foo > newdir/newfile $ hg add newdir/newfile $ hg revert b newdir reverting b/b (glob) forgetting newdir/newfile (glob) $ echo foobar > b/b $ hg revert . reverting b/b (glob) reverting a rename target should revert the source -------------------------------------------------- $ hg mv a newa $ hg revert newa $ hg st a newa ? newa Also true for move overwriting an existing file $ hg mv --force a b/b $ hg revert b/b $ hg status a b/b $ cd .. $ hg init ignored $ cd ignored $ echo '^ignored$' > .hgignore $ echo '^ignoreddir$' >> .hgignore $ echo '^removed$' >> .hgignore $ mkdir ignoreddir $ touch ignoreddir/file $ touch ignoreddir/removed $ touch ignored $ touch removed 4 ignored files (we will add/commit everything) $ hg st -A -X .hgignore I ignored I ignoreddir/file I ignoreddir/removed I removed $ hg ci -qAm 'add files' ignored ignoreddir/file ignoreddir/removed removed $ echo >> ignored $ echo >> ignoreddir/file $ hg rm removed ignoreddir/removed should revert ignored* and undelete *removed -------------------------------------------- $ hg revert -a --no-backup reverting ignored reverting ignoreddir/file (glob) undeleting ignoreddir/removed (glob) undeleting removed $ hg st -mardi $ hg up -qC $ echo >> ignored $ hg rm removed should silently revert the named files -------------------------------------- $ hg revert --no-backup ignored removed $ hg st -mardi Reverting copy (issue3920) -------------------------- someone set up us the copies $ rm .hgignore $ hg update -C 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg mv ignored allyour $ hg copy removed base $ hg commit -m rename copies and renames, you have no chance to survive make your time (issue3920) $ hg update '.^' 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg revert -rtip -a adding allyour adding base removing ignored $ hg status -C A allyour ignored A base removed R ignored Test revert of a file added by one side of the merge ==================================================== remove any pending change $ hg revert --all forgetting allyour forgetting base undeleting ignored $ hg purge --all --config extensions.purge= Adds a new commit $ echo foo > newadd $ hg add newadd $ hg commit -m 'other adds' created new head merge it with the other head $ hg merge # merge 1 into 2 2 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg summary parent: 2:b8ec310b2d4e tip other adds parent: 1:f6180deb8fbe rename branch: default commit: 2 modified, 1 removed (merge) update: (current) phases: 3 draft clarifies who added what $ hg status M allyour M base R ignored $ hg status --change 'p1()' A newadd $ hg status --change 'p2()' A allyour A base R ignored revert file added by p1() to p1() state ----------------------------------------- $ hg revert -r 'p1()' 'glob:newad?' $ hg status M allyour M base R ignored revert file added by p1() to p2() state ------------------------------------------ $ hg revert -r 'p2()' 'glob:newad?' removing newadd $ hg status M allyour M base R ignored R newadd revert file added by p2() to p2() state ------------------------------------------ $ hg revert -r 'p2()' 'glob:allyou?' $ hg status M allyour M base R ignored R newadd revert file added by p2() to p1() state ------------------------------------------ $ hg revert -r 'p1()' 'glob:allyou?' removing allyour $ hg status M base R allyour R ignored R newadd Systematic behavior validation of most possible cases ===================================================== This section tests most of the possible combinations of revision states and working directory states. The number of possible cases is significant but they but they all have a slightly different handling. So this section commits to and testing all of them to allow safe refactoring of the revert code. A python script is used to generate a file history for each combination of states, on one side the content (or lack thereof) in two revisions, and on the other side, the content and "tracked-ness" of the working directory. The three states generated are: - a "base" revision - a "parent" revision - the working directory (based on "parent") The files generated have names of the form: __- All known states are not tested yet. See inline documentation for details. Special cases from merge and rename are not tested by this section. Write the python script to disk ------------------------------- check list of planned files $ python $TESTDIR/generate-working-copy-states.py filelist 2 content1_content1_content1-tracked content1_content1_content1-untracked content1_content1_content3-tracked content1_content1_content3-untracked content1_content1_missing-tracked content1_content1_missing-untracked content1_content2_content1-tracked content1_content2_content1-untracked content1_content2_content2-tracked content1_content2_content2-untracked content1_content2_content3-tracked content1_content2_content3-untracked content1_content2_missing-tracked content1_content2_missing-untracked content1_missing_content1-tracked content1_missing_content1-untracked content1_missing_content3-tracked content1_missing_content3-untracked content1_missing_missing-tracked content1_missing_missing-untracked missing_content2_content2-tracked missing_content2_content2-untracked missing_content2_content3-tracked missing_content2_content3-untracked missing_content2_missing-tracked missing_content2_missing-untracked missing_missing_content3-tracked missing_missing_content3-untracked missing_missing_missing-tracked missing_missing_missing-untracked Script to make a simple text version of the content --------------------------------------------------- $ cat << EOF >> dircontent.py > # generate a simple text view of the directory for easy comparison > import os > files = os.listdir('.') > files.sort() > for filename in files: > if os.path.isdir(filename): > continue > content = open(filename).read() > print '%-6s %s' % (content.strip(), filename) > EOF Generate appropriate repo state ------------------------------- $ hg init revert-ref $ cd revert-ref Generate base changeset $ python $TESTDIR/generate-working-copy-states.py state 2 1 $ hg addremove --similarity 0 adding content1_content1_content1-tracked adding content1_content1_content1-untracked adding content1_content1_content3-tracked adding content1_content1_content3-untracked adding content1_content1_missing-tracked adding content1_content1_missing-untracked adding content1_content2_content1-tracked adding content1_content2_content1-untracked adding content1_content2_content2-tracked adding content1_content2_content2-untracked adding content1_content2_content3-tracked adding content1_content2_content3-untracked adding content1_content2_missing-tracked adding content1_content2_missing-untracked adding content1_missing_content1-tracked adding content1_missing_content1-untracked adding content1_missing_content3-tracked adding content1_missing_content3-untracked adding content1_missing_missing-tracked adding content1_missing_missing-untracked $ hg status A content1_content1_content1-tracked A content1_content1_content1-untracked A content1_content1_content3-tracked A content1_content1_content3-untracked A content1_content1_missing-tracked A content1_content1_missing-untracked A content1_content2_content1-tracked A content1_content2_content1-untracked A content1_content2_content2-tracked A content1_content2_content2-untracked A content1_content2_content3-tracked A content1_content2_content3-untracked A content1_content2_missing-tracked A content1_content2_missing-untracked A content1_missing_content1-tracked A content1_missing_content1-untracked A content1_missing_content3-tracked A content1_missing_content3-untracked A content1_missing_missing-tracked A content1_missing_missing-untracked $ hg commit -m 'base' (create a simple text version of the content) $ python ../dircontent.py > ../content-base.txt $ cat ../content-base.txt content1 content1_content1_content1-tracked content1 content1_content1_content1-untracked content1 content1_content1_content3-tracked content1 content1_content1_content3-untracked content1 content1_content1_missing-tracked content1 content1_content1_missing-untracked content1 content1_content2_content1-tracked content1 content1_content2_content1-untracked content1 content1_content2_content2-tracked content1 content1_content2_content2-untracked content1 content1_content2_content3-tracked content1 content1_content2_content3-untracked content1 content1_content2_missing-tracked content1 content1_content2_missing-untracked content1 content1_missing_content1-tracked content1 content1_missing_content1-untracked content1 content1_missing_content3-tracked content1 content1_missing_content3-untracked content1 content1_missing_missing-tracked content1 content1_missing_missing-untracked Create parent changeset $ python $TESTDIR/generate-working-copy-states.py state 2 2 $ hg addremove --similarity 0 removing content1_missing_content1-tracked removing content1_missing_content1-untracked removing content1_missing_content3-tracked removing content1_missing_content3-untracked removing content1_missing_missing-tracked removing content1_missing_missing-untracked adding missing_content2_content2-tracked adding missing_content2_content2-untracked adding missing_content2_content3-tracked adding missing_content2_content3-untracked adding missing_content2_missing-tracked adding missing_content2_missing-untracked $ hg status M content1_content2_content1-tracked M content1_content2_content1-untracked M content1_content2_content2-tracked M content1_content2_content2-untracked M content1_content2_content3-tracked M content1_content2_content3-untracked M content1_content2_missing-tracked M content1_content2_missing-untracked A missing_content2_content2-tracked A missing_content2_content2-untracked A missing_content2_content3-tracked A missing_content2_content3-untracked A missing_content2_missing-tracked A missing_content2_missing-untracked R content1_missing_content1-tracked R content1_missing_content1-untracked R content1_missing_content3-tracked R content1_missing_content3-untracked R content1_missing_missing-tracked R content1_missing_missing-untracked $ hg commit -m 'parent' (create a simple text version of the content) $ python ../dircontent.py > ../content-parent.txt $ cat ../content-parent.txt content1 content1_content1_content1-tracked content1 content1_content1_content1-untracked content1 content1_content1_content3-tracked content1 content1_content1_content3-untracked content1 content1_content1_missing-tracked content1 content1_content1_missing-untracked content2 content1_content2_content1-tracked content2 content1_content2_content1-untracked content2 content1_content2_content2-tracked content2 content1_content2_content2-untracked content2 content1_content2_content3-tracked content2 content1_content2_content3-untracked content2 content1_content2_missing-tracked content2 content1_content2_missing-untracked content2 missing_content2_content2-tracked content2 missing_content2_content2-untracked content2 missing_content2_content3-tracked content2 missing_content2_content3-untracked content2 missing_content2_missing-tracked content2 missing_content2_missing-untracked Setup working directory $ python $TESTDIR/generate-working-copy-states.py state 2 wc $ hg addremove --similarity 0 adding content1_missing_content1-tracked adding content1_missing_content1-untracked adding content1_missing_content3-tracked adding content1_missing_content3-untracked adding content1_missing_missing-tracked adding content1_missing_missing-untracked adding missing_missing_content3-tracked adding missing_missing_content3-untracked adding missing_missing_missing-tracked adding missing_missing_missing-untracked $ hg forget *_*_*-untracked $ rm *_*_missing-* $ hg status M content1_content1_content3-tracked M content1_content2_content1-tracked M content1_content2_content3-tracked M missing_content2_content3-tracked A content1_missing_content1-tracked A content1_missing_content3-tracked A missing_missing_content3-tracked R content1_content1_content1-untracked R content1_content1_content3-untracked R content1_content1_missing-untracked R content1_content2_content1-untracked R content1_content2_content2-untracked R content1_content2_content3-untracked R content1_content2_missing-untracked R missing_content2_content2-untracked R missing_content2_content3-untracked R missing_content2_missing-untracked ! content1_content1_missing-tracked ! content1_content2_missing-tracked ! content1_missing_missing-tracked ! missing_content2_missing-tracked ! missing_missing_missing-tracked ? content1_missing_content1-untracked ? content1_missing_content3-untracked ? missing_missing_content3-untracked $ hg status --rev 'desc("base")' M content1_content1_content3-tracked M content1_content2_content2-tracked M content1_content2_content3-tracked M content1_missing_content3-tracked A missing_content2_content2-tracked A missing_content2_content3-tracked A missing_missing_content3-tracked R content1_content1_content1-untracked R content1_content1_content3-untracked R content1_content1_missing-untracked R content1_content2_content1-untracked R content1_content2_content2-untracked R content1_content2_content3-untracked R content1_content2_missing-untracked R content1_missing_content1-untracked R content1_missing_content3-untracked R content1_missing_missing-untracked ! content1_content1_missing-tracked ! content1_content2_missing-tracked ! content1_missing_missing-tracked ! missing_content2_missing-tracked ! missing_missing_missing-tracked ? missing_missing_content3-untracked (create a simple text version of the content) $ python ../dircontent.py > ../content-wc.txt $ cat ../content-wc.txt content1 content1_content1_content1-tracked content1 content1_content1_content1-untracked content3 content1_content1_content3-tracked content3 content1_content1_content3-untracked content1 content1_content2_content1-tracked content1 content1_content2_content1-untracked content2 content1_content2_content2-tracked content2 content1_content2_content2-untracked content3 content1_content2_content3-tracked content3 content1_content2_content3-untracked content1 content1_missing_content1-tracked content1 content1_missing_content1-untracked content3 content1_missing_content3-tracked content3 content1_missing_content3-untracked content2 missing_content2_content2-tracked content2 missing_content2_content2-untracked content3 missing_content2_content3-tracked content3 missing_content2_content3-untracked content3 missing_missing_content3-tracked content3 missing_missing_content3-untracked $ cd .. Test revert --all to parent content ----------------------------------- (setup from reference repo) $ cp -r revert-ref revert-parent-all $ cd revert-parent-all check revert output $ hg revert --all undeleting content1_content1_content1-untracked reverting content1_content1_content3-tracked undeleting content1_content1_content3-untracked reverting content1_content1_missing-tracked undeleting content1_content1_missing-untracked reverting content1_content2_content1-tracked undeleting content1_content2_content1-untracked undeleting content1_content2_content2-untracked reverting content1_content2_content3-tracked undeleting content1_content2_content3-untracked reverting content1_content2_missing-tracked undeleting content1_content2_missing-untracked forgetting content1_missing_content1-tracked forgetting content1_missing_content3-tracked forgetting content1_missing_missing-tracked undeleting missing_content2_content2-untracked reverting missing_content2_content3-tracked undeleting missing_content2_content3-untracked reverting missing_content2_missing-tracked undeleting missing_content2_missing-untracked forgetting missing_missing_content3-tracked forgetting missing_missing_missing-tracked Compare resulting directory with revert target. The diff is filtered to include change only. The only difference should be additional `.orig` backup file when applicable. $ python ../dircontent.py > ../content-parent-all.txt $ cd .. $ diff -U 0 -- content-parent.txt content-parent-all.txt | grep _ +content3 content1_content1_content3-tracked.orig +content3 content1_content1_content3-untracked.orig +content1 content1_content2_content1-tracked.orig +content1 content1_content2_content1-untracked.orig +content3 content1_content2_content3-tracked.orig +content3 content1_content2_content3-untracked.orig +content1 content1_missing_content1-tracked +content1 content1_missing_content1-untracked +content3 content1_missing_content3-tracked +content3 content1_missing_content3-untracked +content3 missing_content2_content3-tracked.orig +content3 missing_content2_content3-untracked.orig +content3 missing_missing_content3-tracked +content3 missing_missing_content3-untracked Test revert --all to "base" content ----------------------------------- (setup from reference repo) $ cp -r revert-ref revert-base-all $ cd revert-base-all check revert output $ hg revert --all --rev 'desc(base)' undeleting content1_content1_content1-untracked reverting content1_content1_content3-tracked undeleting content1_content1_content3-untracked reverting content1_content1_missing-tracked undeleting content1_content1_missing-untracked undeleting content1_content2_content1-untracked reverting content1_content2_content2-tracked undeleting content1_content2_content2-untracked reverting content1_content2_content3-tracked undeleting content1_content2_content3-untracked reverting content1_content2_missing-tracked undeleting content1_content2_missing-untracked adding content1_missing_content1-untracked reverting content1_missing_content3-tracked adding content1_missing_content3-untracked reverting content1_missing_missing-tracked adding content1_missing_missing-untracked removing missing_content2_content2-tracked removing missing_content2_content3-tracked removing missing_content2_missing-tracked forgetting missing_missing_content3-tracked forgetting missing_missing_missing-tracked Compare resulting directory with revert target. The diff is filtered to include change only. The only difference should be additional `.orig` backup file when applicable. $ python ../dircontent.py > ../content-base-all.txt $ cd .. $ diff -U 0 -- content-base.txt content-base-all.txt | grep _ +content3 content1_content1_content3-tracked.orig +content3 content1_content1_content3-untracked.orig +content2 content1_content2_content2-untracked.orig +content3 content1_content2_content3-tracked.orig +content3 content1_content2_content3-untracked.orig +content3 content1_missing_content3-tracked.orig +content3 content1_missing_content3-untracked.orig +content2 missing_content2_content2-untracked +content3 missing_content2_content3-tracked.orig +content3 missing_content2_content3-untracked +content3 missing_missing_content3-tracked +content3 missing_missing_content3-untracked Test revert to parent content with explicit file name ----------------------------------------------------- (setup from reference repo) $ cp -r revert-ref revert-parent-explicit $ cd revert-parent-explicit revert all files individually and check the output (output is expected to be different than in the --all case) $ for file in `python $TESTDIR/generate-working-copy-states.py filelist 2`; do > echo '### revert for:' $file; > hg revert $file; > echo > done ### revert for: content1_content1_content1-tracked no changes needed to content1_content1_content1-tracked ### revert for: content1_content1_content1-untracked ### revert for: content1_content1_content3-tracked ### revert for: content1_content1_content3-untracked ### revert for: content1_content1_missing-tracked ### revert for: content1_content1_missing-untracked ### revert for: content1_content2_content1-tracked ### revert for: content1_content2_content1-untracked ### revert for: content1_content2_content2-tracked no changes needed to content1_content2_content2-tracked ### revert for: content1_content2_content2-untracked ### revert for: content1_content2_content3-tracked ### revert for: content1_content2_content3-untracked ### revert for: content1_content2_missing-tracked ### revert for: content1_content2_missing-untracked ### revert for: content1_missing_content1-tracked ### revert for: content1_missing_content1-untracked file not managed: content1_missing_content1-untracked ### revert for: content1_missing_content3-tracked ### revert for: content1_missing_content3-untracked file not managed: content1_missing_content3-untracked ### revert for: content1_missing_missing-tracked ### revert for: content1_missing_missing-untracked content1_missing_missing-untracked: no such file in rev * (glob) ### revert for: missing_content2_content2-tracked no changes needed to missing_content2_content2-tracked ### revert for: missing_content2_content2-untracked ### revert for: missing_content2_content3-tracked ### revert for: missing_content2_content3-untracked ### revert for: missing_content2_missing-tracked ### revert for: missing_content2_missing-untracked ### revert for: missing_missing_content3-tracked ### revert for: missing_missing_content3-untracked file not managed: missing_missing_content3-untracked ### revert for: missing_missing_missing-tracked ### revert for: missing_missing_missing-untracked missing_missing_missing-untracked: no such file in rev * (glob) check resulting directory against the --all run (There should be no difference) $ python ../dircontent.py > ../content-parent-explicit.txt $ cd .. $ diff -U 0 -- content-parent-all.txt content-parent-explicit.txt | grep _ [1] Test revert to "base" content with explicit file name ----------------------------------------------------- (setup from reference repo) $ cp -r revert-ref revert-base-explicit $ cd revert-base-explicit revert all files individually and check the output (output is expected to be different than in the --all case) $ for file in `python $TESTDIR/generate-working-copy-states.py filelist 2`; do > echo '### revert for:' $file; > hg revert $file --rev 'desc(base)'; > echo > done ### revert for: content1_content1_content1-tracked no changes needed to content1_content1_content1-tracked ### revert for: content1_content1_content1-untracked ### revert for: content1_content1_content3-tracked ### revert for: content1_content1_content3-untracked ### revert for: content1_content1_missing-tracked ### revert for: content1_content1_missing-untracked ### revert for: content1_content2_content1-tracked no changes needed to content1_content2_content1-tracked ### revert for: content1_content2_content1-untracked ### revert for: content1_content2_content2-tracked ### revert for: content1_content2_content2-untracked ### revert for: content1_content2_content3-tracked ### revert for: content1_content2_content3-untracked ### revert for: content1_content2_missing-tracked ### revert for: content1_content2_missing-untracked ### revert for: content1_missing_content1-tracked no changes needed to content1_missing_content1-tracked ### revert for: content1_missing_content1-untracked ### revert for: content1_missing_content3-tracked ### revert for: content1_missing_content3-untracked ### revert for: content1_missing_missing-tracked ### revert for: content1_missing_missing-untracked ### revert for: missing_content2_content2-tracked ### revert for: missing_content2_content2-untracked no changes needed to missing_content2_content2-untracked ### revert for: missing_content2_content3-tracked ### revert for: missing_content2_content3-untracked no changes needed to missing_content2_content3-untracked ### revert for: missing_content2_missing-tracked ### revert for: missing_content2_missing-untracked no changes needed to missing_content2_missing-untracked ### revert for: missing_missing_content3-tracked ### revert for: missing_missing_content3-untracked file not managed: missing_missing_content3-untracked ### revert for: missing_missing_missing-tracked ### revert for: missing_missing_missing-untracked missing_missing_missing-untracked: no such file in rev * (glob) check resulting directory against the --all run (There should be no difference) $ python ../dircontent.py > ../content-base-explicit.txt $ cd .. $ diff -U 0 -- content-base-all.txt content-base-explicit.txt | grep _ [1] Revert to an ancestor of P2 during a merge (issue5052) ----------------------------------------------------- (prepare the repository) $ hg init issue5052 $ cd issue5052 $ echo '.\.orig' > .hgignore $ echo 0 > root $ hg ci -qAm C0 $ echo 0 > A $ hg ci -qAm C1 $ echo 1 >> A $ hg ci -qm C2 $ hg up -q 0 $ echo 1 > B $ hg ci -qAm C3 $ hg status --rev 'ancestor(.,2)' --rev 2 A A $ hg log -G -T '{rev} ({files})\n' @ 3 (B) | | o 2 (A) | | | o 1 (A) |/ o 0 (.hgignore root) actual tests: reverting to something else than a merge parent $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status --rev 'p1()' M A $ hg status --rev 'p2()' A B $ hg status --rev '1' M A A B $ hg revert --rev 1 --all reverting A removing B $ hg status --rev 1 From the other parents $ hg up -C 'p2()' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status --rev 'p1()' M B $ hg status --rev 'p2()' A A $ hg status --rev '1' M A A B $ hg revert --rev 1 --all reverting A removing B $ hg status --rev 1 $ cd .. mercurial-3.7.3/tests/test-unionrepo.t0000644000175000017500000000722212676531525017454 0ustar mpmmpm00000000000000Test unionrepo functionality Create one repository $ hg init repo1 $ cd repo1 $ touch repo1-0 $ echo repo1-0 > f $ hg ci -Aqmrepo1-0 $ touch repo1-1 $ echo repo1-1 >> f $ hg ci -Aqmrepo1-1 $ touch repo1-2 $ echo repo1-2 >> f $ hg ci -Aqmrepo1-2 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' 2:68c0685446a3 repo1-2 1:8a58db72e69d repo1-1 0:f093fec0529b repo1-0 $ tip1=`hg id -q` $ cd .. - and a clone with a not-completely-trivial history $ hg clone -q repo1 --rev 0 repo2 $ cd repo2 $ touch repo2-1 $ sed '1i\ > repo2-1 at top > ' f > f.tmp $ mv f.tmp f $ hg ci -Aqmrepo2-1 $ touch repo2-2 $ hg pull -q ../repo1 -r 1 $ hg merge -q $ hg ci -Aqmrepo2-2-merge $ touch repo2-3 $ echo repo2-3 >> f $ hg ci -mrepo2-3 $ hg log --template '{rev}:{node|short} {desc|firstline}\n' 4:2f0d178c469c repo2-3 3:9e6fb3e0b9da repo2-2-merge 2:8a58db72e69d repo1-1 1:c337dba826e7 repo2-1 0:f093fec0529b repo1-0 $ cd .. revisions from repo2 appear as appended / pulled to repo1 $ hg -R union:repo1+repo2 log --template '{rev}:{node|short} {desc|firstline}\n' 5:2f0d178c469c repo2-3 4:9e6fb3e0b9da repo2-2-merge 3:c337dba826e7 repo2-1 2:68c0685446a3 repo1-2 1:8a58db72e69d repo1-1 0:f093fec0529b repo1-0 manifest can be retrieved for revisions in both repos $ hg -R union:repo1+repo2 mani -r $tip1 f repo1-0 repo1-1 repo1-2 $ hg -R union:repo1+repo2 mani -r 4 f repo1-0 repo1-1 repo2-1 repo2-2 files can be retrieved form both repos $ hg -R repo1 cat repo1/f -r2 repo1-0 repo1-1 repo1-2 $ hg -R union:repo1+repo2 cat -r$tip1 repo1/f repo1-0 repo1-1 repo1-2 $ hg -R union:repo1+repo2 cat -r4 $TESTTMP/repo1/f repo2-1 at top repo1-0 repo1-1 files can be compared across repos $ hg -R union:repo1+repo2 diff -r$tip1 -rtip diff -r 68c0685446a3 -r 2f0d178c469c f --- a/f Thu Jan 01 00:00:00 1970 +0000 +++ b/f Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +1,4 @@ +repo2-1 at top repo1-0 repo1-1 -repo1-2 +repo2-3 heads from both repos are found correctly $ hg -R union:repo1+repo2 heads --template '{rev}:{node|short} {desc|firstline}\n' 5:2f0d178c469c repo2-3 2:68c0685446a3 repo1-2 revsets works across repos $ hg -R union:repo1+repo2 id -r "ancestor($tip1, 5)" 8a58db72e69d annotate works - an indication that linkrevs works $ hg --cwd repo1 -Runion:../repo2 annotate $TESTTMP/repo1/f -r tip 3: repo2-1 at top 0: repo1-0 1: repo1-1 5: repo2-3 union repos can be cloned ... and clones works correctly $ hg clone -U union:repo1+repo2 repo3 requesting all changes adding changesets adding manifests adding file changes added 6 changesets with 11 changes to 6 files (+1 heads) $ hg -R repo3 paths default = union:repo1+repo2 $ hg -R repo3 verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 6 files, 6 changesets, 11 total revisions $ hg -R repo3 heads --template '{rev}:{node|short} {desc|firstline}\n' 5:2f0d178c469c repo2-3 2:68c0685446a3 repo1-2 $ hg -R repo3 log --template '{rev}:{node|short} {desc|firstline}\n' 5:2f0d178c469c repo2-3 4:9e6fb3e0b9da repo2-2-merge 3:c337dba826e7 repo2-1 2:68c0685446a3 repo1-2 1:8a58db72e69d repo1-1 0:f093fec0529b repo1-0 union repos should use the correct rev number (issue5024) $ hg init a $ cd a $ echo a0 >> f $ hg ci -Aqm a0 $ cd .. $ hg init b $ cd b $ echo b0 >> f $ hg ci -Aqm b0 $ echo b1 >> f $ hg ci -qm b1 $ cd .. "hg files -v" to call fctx.size() -> fctx.iscensored() $ hg files -R union:b+a -r2 -v 3 b/f (glob) mercurial-3.7.3/tests/test-bad-extension.t0000644000175000017500000000302612676531525020174 0ustar mpmmpm00000000000000 $ echo 'raise Exception("bit bucket overflow")' > badext.py $ abspath=`pwd`/badext.py $ cat <> $HGRCPATH > [extensions] > gpg = > hgext.gpg = > badext = $abspath > badext2 = > EOF $ hg -q help help 2>&1 |grep extension *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow *** failed to import extension badext2: No module named badext2 show traceback $ hg -q help help --traceback 2>&1 | egrep ' extension|^Exception|Traceback|ImportError' *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow Traceback (most recent call last): Exception: bit bucket overflow *** failed to import extension badext2: No module named badext2 Traceback (most recent call last): ImportError: No module named badext2 show traceback for ImportError of hgext.name if debug is set (note that --debug option isn't applied yet when loading extensions) $ (hg -q help help --traceback --config ui.debug=True 2>&1) \ > | grep -v '^ ' \ > | egrep 'extension..[^p]|^Exception|Traceback|ImportError|not import' *** failed to import extension badext from $TESTTMP/badext.py: bit bucket overflow Traceback (most recent call last): Exception: bit bucket overflow could not import hgext.badext2 (No module named *badext2): trying badext2 (glob) Traceback (most recent call last): ImportError: No module named *badext2 (glob) *** failed to import extension badext2: No module named badext2 Traceback (most recent call last): ImportError: No module named badext2 mercurial-3.7.3/tests/test-mq-qrefresh-interactive.t0000644000175000017500000002030112676531525022174 0ustar mpmmpm00000000000000Create configuration $ echo "[ui]" >> $HGRCPATH $ echo "interactive=true" >> $HGRCPATH help qrefresh (no record) $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg help qrefresh hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]... update the current patch If any file patterns are provided, the refreshed patch will contain only the modifications that match those patterns; the remaining modifications will remain in the working directory. If -s/--short is specified, files currently included in the patch will be refreshed just like matched files and remain in the patch. If -e/--edit is specified, Mercurial will start your configured editor for you to enter a message. In case qrefresh fails, you will find a backup of your message in ".hg/last-message.txt". hg add/remove/copy/rename work as usual, though you might want to use git- style patches (-g/--git or [diff] git=1) to track copies and renames. See the diffs help topic for more information on the git diff format. Returns 0 on success. options ([+] can be repeated): -e --edit invoke editor on commit messages -g --git use git extended diff format -s --short refresh only files already in the patch and specified files -U --currentuser add/update author field in patch with current user -u --user USER add/update author field in patch with given user -D --currentdate add/update date field in patch with current date -d --date DATE add/update date field in patch with given date -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns -m --message TEXT use text as commit message -l --logfile FILE read commit message from file (some details hidden, use --verbose to show complete help) help qrefresh (record) $ echo "record=" >> $HGRCPATH $ hg help qrefresh hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]... update the current patch If any file patterns are provided, the refreshed patch will contain only the modifications that match those patterns; the remaining modifications will remain in the working directory. If -s/--short is specified, files currently included in the patch will be refreshed just like matched files and remain in the patch. If -e/--edit is specified, Mercurial will start your configured editor for you to enter a message. In case qrefresh fails, you will find a backup of your message in ".hg/last-message.txt". hg add/remove/copy/rename work as usual, though you might want to use git- style patches (-g/--git or [diff] git=1) to track copies and renames. See the diffs help topic for more information on the git diff format. Returns 0 on success. options ([+] can be repeated): -e --edit invoke editor on commit messages -g --git use git extended diff format -s --short refresh only files already in the patch and specified files -U --currentuser add/update author field in patch with current user -u --user USER add/update author field in patch with given user -D --currentdate add/update date field in patch with current date -d --date DATE add/update date field in patch with given date -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns -m --message TEXT use text as commit message -l --logfile FILE read commit message from file -i --interactive interactively select changes to refresh (some details hidden, use --verbose to show complete help) $ hg init a $ cd a Base commit $ cat > 1.txt < 1 > 2 > 3 > 4 > 5 > EOF $ cat > 2.txt < a > b > c > d > e > f > EOF $ mkdir dir $ cat > dir/a.txt < hello world > > someone > up > there > loves > me > EOF $ hg add 1.txt 2.txt dir/a.txt $ hg commit -m aaa $ hg qrecord --config ui.interactive=false patch abort: running non-interactively, use qnew instead [255] $ hg qnew -i --config ui.interactive=false patch abort: running non-interactively [255] $ hg qnew -d '0 0' patch Changing files $ sed -e 's/2/2 2/;s/4/4 4/' 1.txt > 1.txt.new $ sed -e 's/b/b b/' 2.txt > 2.txt.new $ sed -e 's/hello world/hello world!/' dir/a.txt > dir/a.txt.new $ mv -f 1.txt.new 1.txt $ mv -f 2.txt.new 2.txt $ mv -f dir/a.txt.new dir/a.txt Whole diff $ hg diff --nodates diff -r ed27675cb5df 1.txt --- a/1.txt +++ b/1.txt @@ -1,5 +1,5 @@ 1 -2 +2 2 3 -4 +4 4 5 diff -r ed27675cb5df 2.txt --- a/2.txt +++ b/2.txt @@ -1,5 +1,5 @@ a -b +b b c d e diff -r ed27675cb5df dir/a.txt --- a/dir/a.txt +++ b/dir/a.txt @@ -1,4 +1,4 @@ -hello world +hello world! someone up partial qrefresh $ hg qrefresh -i --config ui.interactive=false abort: running non-interactively [255] $ hg qrefresh -i -d '0 0' < y > y > n > y > y > n > EOF diff --git a/1.txt b/1.txt 2 hunks, 2 lines changed examine changes to '1.txt'? [Ynesfdaq?] y @@ -1,3 +1,3 @@ 1 -2 +2 2 3 record change 1/4 to '1.txt'? [Ynesfdaq?] y @@ -3,3 +3,3 @@ 3 -4 +4 4 5 record change 2/4 to '1.txt'? [Ynesfdaq?] n diff --git a/2.txt b/2.txt 1 hunks, 1 lines changed examine changes to '2.txt'? [Ynesfdaq?] y @@ -1,5 +1,5 @@ a -b +b b c d e record change 3/4 to '2.txt'? [Ynesfdaq?] y diff --git a/dir/a.txt b/dir/a.txt 1 hunks, 1 lines changed examine changes to 'dir/a.txt'? [Ynesfdaq?] n After partial qrefresh 'tip' $ hg tip -p changeset: 1:0738af1a8211 tag: patch tag: qbase tag: qtip tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: [mq]: patch diff -r 1fd39ab63a33 -r 0738af1a8211 1.txt --- a/1.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/1.txt Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +1,5 @@ 1 -2 +2 2 3 4 5 diff -r 1fd39ab63a33 -r 0738af1a8211 2.txt --- a/2.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/2.txt Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +1,5 @@ a -b +b b c d e After partial qrefresh 'diff' $ hg diff --nodates diff -r 0738af1a8211 1.txt --- a/1.txt +++ b/1.txt @@ -1,5 +1,5 @@ 1 2 2 3 -4 +4 4 5 diff -r 0738af1a8211 dir/a.txt --- a/dir/a.txt +++ b/dir/a.txt @@ -1,4 +1,4 @@ -hello world +hello world! someone up qrefresh interactively everything else $ hg qrefresh -i -d '0 0' < y > y > y > y > EOF diff --git a/1.txt b/1.txt 1 hunks, 1 lines changed examine changes to '1.txt'? [Ynesfdaq?] y @@ -1,5 +1,5 @@ 1 2 2 3 -4 +4 4 5 record change 1/2 to '1.txt'? [Ynesfdaq?] y diff --git a/dir/a.txt b/dir/a.txt 1 hunks, 1 lines changed examine changes to 'dir/a.txt'? [Ynesfdaq?] y @@ -1,4 +1,4 @@ -hello world +hello world! someone up record change 2/2 to 'dir/a.txt'? [Ynesfdaq?] y After final qrefresh 'tip' $ hg tip -p changeset: 1:2c3f66afeed9 tag: patch tag: qbase tag: qtip tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: [mq]: patch diff -r 1fd39ab63a33 -r 2c3f66afeed9 1.txt --- a/1.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/1.txt Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +1,5 @@ 1 -2 +2 2 3 -4 +4 4 5 diff -r 1fd39ab63a33 -r 2c3f66afeed9 2.txt --- a/2.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/2.txt Thu Jan 01 00:00:00 1970 +0000 @@ -1,5 +1,5 @@ a -b +b b c d e diff -r 1fd39ab63a33 -r 2c3f66afeed9 dir/a.txt --- a/dir/a.txt Thu Jan 01 00:00:00 1970 +0000 +++ b/dir/a.txt Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +1,4 @@ -hello world +hello world! someone up After qrefresh 'diff' $ hg diff --nodates $ cd .. mercurial-3.7.3/tests/test-diff-unified.t0000644000175000017500000001105612676531525017767 0ustar mpmmpm00000000000000 $ hg init repo $ cd repo $ cat > a < c > c > a > a > b > a > a > c > c > EOF $ hg ci -Am adda adding a $ cat > a < c > c > a > a > dd > a > a > c > c > EOF default context $ hg diff --nodates diff -r cf9f4ba66af2 a --- a/a +++ b/a @@ -2,7 +2,7 @@ c a a -b +dd a a c invalid --unified $ hg diff --nodates -U foo abort: diff context lines count must be an integer, not 'foo' [255] $ hg diff --nodates -U 2 diff -r cf9f4ba66af2 a --- a/a +++ b/a @@ -3,5 +3,5 @@ a a -b +dd a a $ hg --config diff.unified=2 diff --nodates diff -r cf9f4ba66af2 a --- a/a +++ b/a @@ -3,5 +3,5 @@ a a -b +dd a a $ hg diff --nodates -U 1 diff -r cf9f4ba66af2 a --- a/a +++ b/a @@ -4,3 +4,3 @@ a -b +dd a invalid diff.unified $ hg --config diff.unified=foo diff --nodates abort: diff context lines count must be an integer, not 'foo' [255] noprefix config and option $ hg --config diff.noprefix=True diff --nodates diff -r cf9f4ba66af2 a --- a +++ a @@ -2,7 +2,7 @@ c a a -b +dd a a c $ hg diff --noprefix --nodates diff -r cf9f4ba66af2 a --- a +++ a @@ -2,7 +2,7 @@ c a a -b +dd a a c noprefix config disabled in plain mode, but option still enabled $ HGPLAIN=1 hg --config diff.noprefix=True diff --nodates diff -r cf9f4ba66af2 a --- a/a +++ b/a @@ -2,7 +2,7 @@ c a a -b +dd a a c $ HGPLAIN=1 hg diff --noprefix --nodates diff -r cf9f4ba66af2 a --- a +++ a @@ -2,7 +2,7 @@ c a a -b +dd a a c $ cd .. 0 lines of context hunk header matches gnu diff hunk header $ hg init diffzero $ cd diffzero $ cat > f1 << EOF > c2 > c4 > c5 > EOF $ hg commit -Am0 adding f1 $ cat > f2 << EOF > c1 > c2 > c3 > c4 > EOF $ mv f2 f1 $ hg diff -U0 --nodates diff -r 55d8ff78db23 f1 --- a/f1 +++ b/f1 @@ -0,0 +1,1 @@ +c1 @@ -1,0 +3,1 @@ +c3 @@ -3,1 +4,0 @@ -c5 $ hg diff -U0 --nodates --git diff --git a/f1 b/f1 --- a/f1 +++ b/f1 @@ -0,0 +1,1 @@ +c1 @@ -1,0 +3,1 @@ +c3 @@ -3,1 +4,0 @@ -c5 $ hg diff -U0 --nodates -p diff -r 55d8ff78db23 f1 --- a/f1 +++ b/f1 @@ -0,0 +1,1 @@ +c1 @@ -1,0 +3,1 @@ c2 +c3 @@ -3,1 +4,0 @@ c4 -c5 $ echo a > f1 $ hg ci -m movef2 Test diff headers terminating with TAB when necessary (issue3357) Regular diff --nodates, file creation $ hg mv f1 'f 1' $ echo b > 'f 1' $ hg diff --nodates 'f 1' diff -r 7574207d0d15 f 1 --- /dev/null +++ b/f 1 @@ -0,0 +1,1 @@ +b Git diff, adding space $ hg diff --git diff --git a/f1 b/f 1 rename from f1 rename to f 1 --- a/f1 +++ b/f 1 @@ -1,1 +1,1 @@ -a +b Git diff with noprefix $ hg --config diff.noprefix=True diff --git --nodates diff --git f1 f 1 rename from f1 rename to f 1 --- f1 +++ f 1 @@ -1,1 +1,1 @@ -a +b noprefix config disabled in plain mode, but option still enabled $ HGPLAIN=1 hg --config diff.noprefix=True diff --git --nodates diff --git a/f1 b/f 1 rename from f1 rename to f 1 --- a/f1 +++ b/f 1 @@ -1,1 +1,1 @@ -a +b $ HGPLAIN=1 hg diff --git --noprefix --nodates diff --git f1 f 1 rename from f1 rename to f 1 --- f1 +++ f 1 @@ -1,1 +1,1 @@ -a +b Regular diff --nodates, file deletion $ hg ci -m addspace $ hg mv 'f 1' f1 $ echo a > f1 $ hg diff --nodates 'f 1' diff -r ca50fe67c9c7 f 1 --- a/f 1 +++ /dev/null @@ -1,1 +0,0 @@ -b Git diff, removing space $ hg diff --git diff --git a/f 1 b/f1 rename from f 1 rename to f1 --- a/f 1 +++ b/f1 @@ -1,1 +1,1 @@ -b +a showfunc diff $ cat > f1 << EOF > int main() { > int a = 0; > int b = 1; > int c = 2; > int d = 3; > return a + b + c + d; > } > EOF $ hg commit -m addfunction $ cat > f1 << EOF > int main() { > int a = 0; > int b = 1; > int c = 2; > int e = 3; > return a + b + c + e; > } > EOF $ hg diff --git diff --git a/f1 b/f1 --- a/f1 +++ b/f1 @@ -2,6 +2,6 @@ int a = 0; int b = 1; int c = 2; - int d = 3; - return a + b + c + d; + int e = 3; + return a + b + c + e; } $ hg diff --config diff.showfunc=True --git diff --git a/f1 b/f1 --- a/f1 +++ b/f1 @@ -2,6 +2,6 @@ int main() { int a = 0; int b = 1; int c = 2; - int d = 3; - return a + b + c + d; + int e = 3; + return a + b + c + e; } $ cd .. mercurial-3.7.3/tests/test-subrepo-deep-nested-change.t0000644000175000017500000005434612676531525022544 0ustar mpmmpm00000000000000Preparing the subrepository 'sub2' $ hg init sub2 $ echo sub2 > sub2/sub2 $ hg add -R sub2 adding sub2/sub2 (glob) $ hg commit -R sub2 -m "sub2 import" Preparing the 'sub1' repo which depends on the subrepo 'sub2' $ hg init sub1 $ echo sub1 > sub1/sub1 $ echo "sub2 = ../sub2" > sub1/.hgsub $ hg clone sub2 sub1/sub2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg add -R sub1 adding sub1/.hgsub (glob) adding sub1/sub1 (glob) $ hg commit -R sub1 -m "sub1 import" Preparing the 'main' repo which depends on the subrepo 'sub1' $ hg init main $ echo main > main/main $ echo "sub1 = ../sub1" > main/.hgsub $ hg clone sub1 main/sub1 updating to branch default cloning subrepo sub2 from $TESTTMP/sub2 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg add -R main adding main/.hgsub (glob) adding main/main (glob) $ hg commit -R main -m "main import" Cleaning both repositories, just as a clone -U $ hg up -C -R sub2 null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg up -C -R sub1 null 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg up -C -R main null 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ rm -rf main/sub1 $ rm -rf sub1/sub2 Clone main $ hg --config extensions.largefiles= clone main cloned updating to branch default cloning subrepo sub1 from $TESTTMP/sub1 cloning subrepo sub1/sub2 from $TESTTMP/sub2 (glob) 3 files updated, 0 files merged, 0 files removed, 0 files unresolved Largefiles is NOT enabled in the clone if the source repo doesn't require it $ cat cloned/.hg/hgrc # example repository config (see "hg help config" for more info) [paths] default = $TESTTMP/main (glob) # path aliases to other clones of this repo in URLs or filesystem paths # (see "hg help config.paths" for more info) # # default-push = ssh://jdoe@example.net/hg/jdoes-fork # my-fork = ssh://jdoe@example.net/hg/jdoes-fork # my-clone = /home/jdoe/jdoes-clone [ui] # name and email (local to this repository, optional), e.g. # username = Jane Doe Checking cloned repo ids $ printf "cloned " ; hg id -R cloned cloned 7f491f53a367 tip $ printf "cloned/sub1 " ; hg id -R cloned/sub1 cloned/sub1 fc3b4ce2696f tip $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2 cloned/sub1/sub2 c57a0840e3ba tip debugsub output for main and sub1 $ hg debugsub -R cloned path sub1 source ../sub1 revision fc3b4ce2696f7741438c79207583768f2ce6b0dd $ hg debugsub -R cloned/sub1 path sub2 source ../sub2 revision c57a0840e3badd667ef3c3ef65471609acb2ba3c Modifying deeply nested 'sub2' $ echo modified > cloned/sub1/sub2/sub2 $ hg commit --subrepos -m "deep nested modif should trigger a commit" -R cloned committing subrepository sub1 committing subrepository sub1/sub2 (glob) Checking modified node ids $ printf "cloned " ; hg id -R cloned cloned ffe6649062fe tip $ printf "cloned/sub1 " ; hg id -R cloned/sub1 cloned/sub1 2ecb03bf44a9 tip $ printf "cloned/sub1/sub2 " ; hg id -R cloned/sub1/sub2 cloned/sub1/sub2 53dd3430bcaf tip debugsub output for main and sub1 $ hg debugsub -R cloned path sub1 source ../sub1 revision 2ecb03bf44a94e749e8669481dd9069526ce7cb9 $ hg debugsub -R cloned/sub1 path sub2 source ../sub2 revision 53dd3430bcaf5ab4a7c48262bcad6d441f510487 Check that deep archiving works $ cd cloned $ echo 'test' > sub1/sub2/test.txt $ hg --config extensions.largefiles=! add sub1/sub2/test.txt $ mkdir sub1/sub2/folder $ echo 'subfolder' > sub1/sub2/folder/test.txt $ hg ci -ASm "add test.txt" adding sub1/sub2/folder/test.txt committing subrepository sub1 committing subrepository sub1/sub2 (glob) .. but first take a detour through some deep removal testing $ hg remove -S -I 're:.*.txt' . removing sub1/sub2/folder/test.txt (glob) removing sub1/sub2/test.txt (glob) $ hg status -S R sub1/sub2/folder/test.txt R sub1/sub2/test.txt $ hg update -Cq $ hg remove -I 're:.*.txt' sub1 $ hg status -S $ hg remove sub1/sub2/folder/test.txt $ hg remove sub1/.hgsubstate $ mv sub1/.hgsub sub1/x.hgsub $ hg status -S warning: subrepo spec file 'sub1/.hgsub' not found R sub1/.hgsubstate R sub1/sub2/folder/test.txt ! sub1/.hgsub ? sub1/x.hgsub $ mv sub1/x.hgsub sub1/.hgsub $ hg update -Cq $ touch sub1/foo $ hg forget sub1/sub2/folder/test.txt $ rm sub1/sub2/test.txt Test relative path printing + subrepos $ mkdir -p foo/bar $ cd foo $ touch bar/abc $ hg addremove -S .. adding ../sub1/sub2/folder/test.txt (glob) removing ../sub1/sub2/test.txt (glob) adding ../sub1/foo (glob) adding bar/abc (glob) $ cd .. $ hg status -S A foo/bar/abc A sub1/foo R sub1/sub2/test.txt Archive wdir() with subrepos $ hg rm main $ hg archive -S -r 'wdir()' ../wdir $ diff -r . ../wdir | egrep -v '\.hg$|^Common subdirectories:' Only in ../wdir: .hg_archival.txt $ find ../wdir -type f | sort ../wdir/.hg_archival.txt ../wdir/.hgsub ../wdir/.hgsubstate ../wdir/foo/bar/abc ../wdir/sub1/.hgsub ../wdir/sub1/.hgsubstate ../wdir/sub1/foo ../wdir/sub1/sub1 ../wdir/sub1/sub2/folder/test.txt ../wdir/sub1/sub2/sub2 $ cat ../wdir/.hg_archival.txt repo: 7f491f53a367861f47ee64a80eb997d1f341b77a node: 9bb10eebee29dc0f1201dcf5977b811a540255fd+ branch: default latesttag: null latesttagdistance: 4 changessincelatesttag: 4 Attempting to archive 'wdir()' with a missing file is handled gracefully $ rm sub1/sub1 $ rm -r ../wdir $ hg archive -v -S -r 'wdir()' ../wdir $ find ../wdir -type f | sort ../wdir/.hg_archival.txt ../wdir/.hgsub ../wdir/.hgsubstate ../wdir/foo/bar/abc ../wdir/sub1/.hgsub ../wdir/sub1/.hgsubstate ../wdir/sub1/foo ../wdir/sub1/sub2/folder/test.txt ../wdir/sub1/sub2/sub2 Continue relative path printing + subrepos $ hg update -Cq $ rm -r ../wdir $ hg archive -S -r 'wdir()' ../wdir $ cat ../wdir/.hg_archival.txt repo: 7f491f53a367861f47ee64a80eb997d1f341b77a node: 9bb10eebee29dc0f1201dcf5977b811a540255fd branch: default latesttag: null latesttagdistance: 4 changessincelatesttag: 4 $ touch sub1/sub2/folder/bar $ hg addremove sub1/sub2 adding sub1/sub2/folder/bar (glob) $ hg status -S A sub1/sub2/folder/bar ? foo/bar/abc ? sub1/foo $ hg update -Cq $ hg addremove sub1 adding sub1/sub2/folder/bar (glob) adding sub1/foo (glob) $ hg update -Cq $ rm sub1/sub2/folder/test.txt $ rm sub1/sub2/test.txt $ hg ci -ASm "remove test.txt" adding sub1/sub2/folder/bar removing sub1/sub2/folder/test.txt removing sub1/sub2/test.txt adding sub1/foo adding foo/bar/abc committing subrepository sub1 committing subrepository sub1/sub2 (glob) $ hg forget sub1/sub2/sub2 $ echo x > sub1/sub2/x.txt $ hg add sub1/sub2/x.txt Files sees uncommitted adds and removes in subrepos $ hg files -S .hgsub .hgsubstate foo/bar/abc (glob) main sub1/.hgsub (glob) sub1/.hgsubstate (glob) sub1/foo (glob) sub1/sub1 (glob) sub1/sub2/folder/bar (glob) sub1/sub2/x.txt (glob) $ hg files -S "set:eol('dos') or eol('unix') or size('<= 0')" .hgsub .hgsubstate foo/bar/abc (glob) main sub1/.hgsub (glob) sub1/.hgsubstate (glob) sub1/foo (glob) sub1/sub1 (glob) sub1/sub2/folder/bar (glob) sub1/sub2/x.txt (glob) $ hg files -r '.^' -S "set:eol('dos') or eol('unix')" .hgsub .hgsubstate main sub1/.hgsub (glob) sub1/.hgsubstate (glob) sub1/sub1 (glob) sub1/sub2/folder/test.txt (glob) sub1/sub2/sub2 (glob) sub1/sub2/test.txt (glob) $ hg files sub1 sub1/.hgsub (glob) sub1/.hgsubstate (glob) sub1/foo (glob) sub1/sub1 (glob) sub1/sub2/folder/bar (glob) sub1/sub2/x.txt (glob) $ hg files sub1/sub2 sub1/sub2/folder/bar (glob) sub1/sub2/x.txt (glob) $ hg files .hgsub .hgsubstate foo/bar/abc (glob) main $ hg files -S -r '.^' sub1/sub2/folder sub1/sub2/folder/test.txt (glob) $ hg files -S -r '.^' sub1/sub2/missing sub1/sub2/missing: no such file in rev 78026e779ea6 (glob) [1] $ hg files -r '.^' sub1/ sub1/.hgsub (glob) sub1/.hgsubstate (glob) sub1/sub1 (glob) sub1/sub2/folder/test.txt (glob) sub1/sub2/sub2 (glob) sub1/sub2/test.txt (glob) $ hg files -r '.^' sub1/sub2 sub1/sub2/folder/test.txt (glob) sub1/sub2/sub2 (glob) sub1/sub2/test.txt (glob) $ hg rollback -q $ hg up -Cq $ hg --config extensions.largefiles=! archive -S ../archive_all $ find ../archive_all | sort ../archive_all ../archive_all/.hg_archival.txt ../archive_all/.hgsub ../archive_all/.hgsubstate ../archive_all/main ../archive_all/sub1 ../archive_all/sub1/.hgsub ../archive_all/sub1/.hgsubstate ../archive_all/sub1/sub1 ../archive_all/sub1/sub2 ../archive_all/sub1/sub2/folder ../archive_all/sub1/sub2/folder/test.txt ../archive_all/sub1/sub2/sub2 ../archive_all/sub1/sub2/test.txt Check that archive -X works in deep subrepos $ hg --config extensions.largefiles=! archive -S -X '**test*' ../archive_exclude $ find ../archive_exclude | sort ../archive_exclude ../archive_exclude/.hg_archival.txt ../archive_exclude/.hgsub ../archive_exclude/.hgsubstate ../archive_exclude/main ../archive_exclude/sub1 ../archive_exclude/sub1/.hgsub ../archive_exclude/sub1/.hgsubstate ../archive_exclude/sub1/sub1 ../archive_exclude/sub1/sub2 ../archive_exclude/sub1/sub2/sub2 $ hg --config extensions.largefiles=! archive -S -I '**test*' ../archive_include $ find ../archive_include | sort ../archive_include ../archive_include/sub1 ../archive_include/sub1/sub2 ../archive_include/sub1/sub2/folder ../archive_include/sub1/sub2/folder/test.txt ../archive_include/sub1/sub2/test.txt Check that deep archive works with largefiles (which overrides hgsubrepo impl) This also tests the repo.ui regression in 43fb170a23bd, and that lf subrepo subrepos are archived properly. Note that add --large through a subrepo currently adds the file as a normal file $ echo "large" > sub1/sub2/large.bin $ hg --config extensions.largefiles= add --large -R sub1/sub2 sub1/sub2/large.bin $ echo "large" > large.bin $ hg --config extensions.largefiles= add --large large.bin $ hg --config extensions.largefiles= ci -S -m "add large files" committing subrepository sub1 committing subrepository sub1/sub2 (glob) $ hg --config extensions.largefiles= archive -S ../archive_lf $ find ../archive_lf | sort ../archive_lf ../archive_lf/.hg_archival.txt ../archive_lf/.hgsub ../archive_lf/.hgsubstate ../archive_lf/large.bin ../archive_lf/main ../archive_lf/sub1 ../archive_lf/sub1/.hgsub ../archive_lf/sub1/.hgsubstate ../archive_lf/sub1/sub1 ../archive_lf/sub1/sub2 ../archive_lf/sub1/sub2/folder ../archive_lf/sub1/sub2/folder/test.txt ../archive_lf/sub1/sub2/large.bin ../archive_lf/sub1/sub2/sub2 ../archive_lf/sub1/sub2/test.txt $ rm -rf ../archive_lf Exclude large files from main and sub-sub repo $ hg --config extensions.largefiles= archive -S -X '**.bin' ../archive_lf $ find ../archive_lf | sort ../archive_lf ../archive_lf/.hg_archival.txt ../archive_lf/.hgsub ../archive_lf/.hgsubstate ../archive_lf/main ../archive_lf/sub1 ../archive_lf/sub1/.hgsub ../archive_lf/sub1/.hgsubstate ../archive_lf/sub1/sub1 ../archive_lf/sub1/sub2 ../archive_lf/sub1/sub2/folder ../archive_lf/sub1/sub2/folder/test.txt ../archive_lf/sub1/sub2/sub2 ../archive_lf/sub1/sub2/test.txt $ rm -rf ../archive_lf Exclude normal files from main and sub-sub repo $ hg --config extensions.largefiles= archive -S -X '**.txt' -p '.' ../archive_lf.tgz $ tar -tzf ../archive_lf.tgz | sort .hgsub .hgsubstate large.bin main sub1/.hgsub sub1/.hgsubstate sub1/sub1 sub1/sub2/large.bin sub1/sub2/sub2 Include normal files from within a largefiles subrepo $ hg --config extensions.largefiles= archive -S -I '**.txt' ../archive_lf $ find ../archive_lf | sort ../archive_lf ../archive_lf/.hg_archival.txt ../archive_lf/sub1 ../archive_lf/sub1/sub2 ../archive_lf/sub1/sub2/folder ../archive_lf/sub1/sub2/folder/test.txt ../archive_lf/sub1/sub2/test.txt $ rm -rf ../archive_lf Include large files from within a largefiles subrepo $ hg --config extensions.largefiles= archive -S -I '**.bin' ../archive_lf $ find ../archive_lf | sort ../archive_lf ../archive_lf/large.bin ../archive_lf/sub1 ../archive_lf/sub1/sub2 ../archive_lf/sub1/sub2/large.bin $ rm -rf ../archive_lf Find an exact largefile match in a largefiles subrepo $ hg --config extensions.largefiles= archive -S -I 'sub1/sub2/large.bin' ../archive_lf $ find ../archive_lf | sort ../archive_lf ../archive_lf/sub1 ../archive_lf/sub1/sub2 ../archive_lf/sub1/sub2/large.bin $ rm -rf ../archive_lf The local repo enables largefiles if a largefiles repo is cloned $ hg showconfig extensions abort: repository requires features unknown to this Mercurial: largefiles! (see https://mercurial-scm.org/wiki/MissingRequirement for more information) [255] $ hg --config extensions.largefiles= clone -qU . ../lfclone $ cat ../lfclone/.hg/hgrc # example repository config (see "hg help config" for more info) [paths] default = $TESTTMP/cloned (glob) # path aliases to other clones of this repo in URLs or filesystem paths # (see "hg help config.paths" for more info) # # default-push = ssh://jdoe@example.net/hg/jdoes-fork # my-fork = ssh://jdoe@example.net/hg/jdoes-fork # my-clone = /home/jdoe/jdoes-clone [ui] # name and email (local to this repository, optional), e.g. # username = Jane Doe [extensions] largefiles= Find an exact match to a standin (should archive nothing) $ hg --config extensions.largefiles= archive -S -I 'sub/sub2/.hglf/large.bin' ../archive_lf $ find ../archive_lf 2> /dev/null | sort $ cat >> $HGRCPATH < [extensions] > largefiles= > [largefiles] > patterns=glob:**.dat > EOF Test forget through a deep subrepo with the largefiles extension, both a largefile and a normal file. Then a largefile that hasn't been committed yet. $ touch sub1/sub2/untracked.txt $ touch sub1/sub2/large.dat $ hg forget sub1/sub2/large.bin sub1/sub2/test.txt sub1/sub2/untracked.txt not removing sub1/sub2/untracked.txt: file is already untracked (glob) [1] $ hg add --large --dry-run -v sub1/sub2/untracked.txt adding sub1/sub2/untracked.txt as a largefile (glob) $ hg add --large -v sub1/sub2/untracked.txt adding sub1/sub2/untracked.txt as a largefile (glob) $ hg add --normal -v sub1/sub2/large.dat adding sub1/sub2/large.dat (glob) $ hg forget -v sub1/sub2/untracked.txt removing sub1/sub2/untracked.txt (glob) $ hg status -S A sub1/sub2/large.dat R sub1/sub2/large.bin R sub1/sub2/test.txt ? foo/bar/abc ? sub1/sub2/untracked.txt ? sub1/sub2/x.txt $ hg add sub1/sub2 $ hg archive -S -r 'wdir()' ../wdir2 $ diff -r . ../wdir2 | egrep -v '\.hg$|^Common subdirectories:' Only in ../wdir2: .hg_archival.txt Only in .: .hglf Only in .: foo Only in ./sub1/sub2: large.bin Only in ./sub1/sub2: test.txt Only in ./sub1/sub2: untracked.txt Only in ./sub1/sub2: x.txt $ find ../wdir2 -type f | sort ../wdir2/.hg_archival.txt ../wdir2/.hgsub ../wdir2/.hgsubstate ../wdir2/large.bin ../wdir2/main ../wdir2/sub1/.hgsub ../wdir2/sub1/.hgsubstate ../wdir2/sub1/sub1 ../wdir2/sub1/sub2/folder/test.txt ../wdir2/sub1/sub2/large.dat ../wdir2/sub1/sub2/sub2 $ hg status -S -mac -n | sort .hgsub .hgsubstate large.bin main sub1/.hgsub sub1/.hgsubstate sub1/sub1 sub1/sub2/folder/test.txt sub1/sub2/large.dat sub1/sub2/sub2 $ hg ci -Sqm 'forget testing' Test 'wdir()' modified file archiving with largefiles $ echo 'mod' > main $ echo 'mod' > large.bin $ echo 'mod' > sub1/sub2/large.dat $ hg archive -S -r 'wdir()' ../wdir3 $ diff -r . ../wdir3 | egrep -v '\.hg$|^Common subdirectories' Only in ../wdir3: .hg_archival.txt Only in .: .hglf Only in .: foo Only in ./sub1/sub2: large.bin Only in ./sub1/sub2: test.txt Only in ./sub1/sub2: untracked.txt Only in ./sub1/sub2: x.txt $ find ../wdir3 -type f | sort ../wdir3/.hg_archival.txt ../wdir3/.hgsub ../wdir3/.hgsubstate ../wdir3/large.bin ../wdir3/main ../wdir3/sub1/.hgsub ../wdir3/sub1/.hgsubstate ../wdir3/sub1/sub1 ../wdir3/sub1/sub2/folder/test.txt ../wdir3/sub1/sub2/large.dat ../wdir3/sub1/sub2/sub2 $ hg up -Cq Test issue4330: commit a directory where only normal files have changed $ touch foo/bar/large.dat $ hg add --large foo/bar/large.dat $ hg ci -m 'add foo/bar/large.dat' $ touch a.txt $ touch a.dat $ hg add -v foo/bar/abc a.txt a.dat adding a.dat as a largefile adding a.txt adding foo/bar/abc (glob) $ hg ci -m 'dir commit with only normal file deltas' foo/bar $ hg status A a.dat A a.txt Test a directory commit with a changed largefile and a changed normal file $ echo changed > foo/bar/large.dat $ echo changed > foo/bar/abc $ hg ci -m 'dir commit with normal and lf file deltas' foo $ hg status A a.dat A a.txt $ hg ci -m "add a.*" $ hg mv a.dat b.dat $ hg mv foo/bar/abc foo/bar/def $ hg status -C A b.dat a.dat A foo/bar/def foo/bar/abc R a.dat R foo/bar/abc $ hg ci -m "move large and normal" $ hg status -C --rev '.^' --rev . A b.dat a.dat A foo/bar/def foo/bar/abc R a.dat R foo/bar/abc $ echo foo > main $ hg ci -m "mod parent only" $ hg init sub3 $ echo "sub3 = sub3" >> .hgsub $ echo xyz > sub3/a.txt $ hg add sub3/a.txt $ hg ci -Sm "add sub3" committing subrepository sub3 $ cat .hgsub | grep -v sub3 > .hgsub1 $ mv .hgsub1 .hgsub $ hg ci -m "remove sub3" $ hg log -r "subrepo()" --style compact 0 7f491f53a367 1970-01-01 00:00 +0000 test main import 1 ffe6649062fe 1970-01-01 00:00 +0000 test deep nested modif should trigger a commit 2 9bb10eebee29 1970-01-01 00:00 +0000 test add test.txt 3 7c64f035294f 1970-01-01 00:00 +0000 test add large files 4 f734a59e2e35 1970-01-01 00:00 +0000 test forget testing 11 9685a22af5db 1970-01-01 00:00 +0000 test add sub3 12[tip] 2e0485b475b9 1970-01-01 00:00 +0000 test remove sub3 $ hg log -r "subrepo('sub3')" --style compact 11 9685a22af5db 1970-01-01 00:00 +0000 test add sub3 12[tip] 2e0485b475b9 1970-01-01 00:00 +0000 test remove sub3 $ hg log -r "subrepo('bogus')" --style compact Test .hgsubstate in the R state $ hg rm .hgsub .hgsubstate $ hg ci -m 'trash subrepo tracking' $ hg log -r "subrepo('re:sub\d+')" --style compact 0 7f491f53a367 1970-01-01 00:00 +0000 test main import 1 ffe6649062fe 1970-01-01 00:00 +0000 test deep nested modif should trigger a commit 2 9bb10eebee29 1970-01-01 00:00 +0000 test add test.txt 3 7c64f035294f 1970-01-01 00:00 +0000 test add large files 4 f734a59e2e35 1970-01-01 00:00 +0000 test forget testing 11 9685a22af5db 1970-01-01 00:00 +0000 test add sub3 12 2e0485b475b9 1970-01-01 00:00 +0000 test remove sub3 13[tip] a68b2c361653 1970-01-01 00:00 +0000 test trash subrepo tracking Restore the trashed subrepo tracking $ hg rollback -q $ hg update -Cq . Interaction with extdiff, largefiles and subrepos $ hg --config extensions.extdiff= extdiff -S $ hg --config extensions.extdiff= extdiff -r '.^' -S diff -Npru cloned.*/.hgsub cloned/.hgsub (glob) --- cloned.*/.hgsub * +0000 (glob) +++ cloned/.hgsub * +0000 (glob) @@ -1,2 +1 @@ sub1 = ../sub1 -sub3 = sub3 diff -Npru cloned.*/.hgsubstate cloned/.hgsubstate (glob) --- cloned.*/.hgsubstate * +0000 (glob) +++ cloned/.hgsubstate * +0000 (glob) @@ -1,2 +1 @@ 7a36fa02b66e61f27f3d4a822809f159479b8ab2 sub1 -b1a26de6f2a045a9f079323693614ee322f1ff7e sub3 [1] $ hg --config extensions.extdiff= extdiff -r 0 -r '.^' -S diff -Npru cloned.*/.hglf/b.dat cloned.*/.hglf/b.dat (glob) --- cloned.*/.hglf/b.dat * (glob) +++ cloned.*/.hglf/b.dat * (glob) @@ -0,0 +1 @@ +da39a3ee5e6b4b0d3255bfef95601890afd80709 diff -Npru cloned.*/.hglf/foo/bar/large.dat cloned.*/.hglf/foo/bar/large.dat (glob) --- cloned.*/.hglf/foo/bar/large.dat * (glob) +++ cloned.*/.hglf/foo/bar/large.dat * (glob) @@ -0,0 +1 @@ +2f6933b5ee0f5fdd823d9717d8729f3c2523811b diff -Npru cloned.*/.hglf/large.bin cloned.*/.hglf/large.bin (glob) --- cloned.*/.hglf/large.bin * (glob) +++ cloned.*/.hglf/large.bin * (glob) @@ -0,0 +1 @@ +7f7097b041ccf68cc5561e9600da4655d21c6d18 diff -Npru cloned.*/.hgsub cloned.*/.hgsub (glob) --- cloned.*/.hgsub * (glob) +++ cloned.*/.hgsub * (glob) @@ -1 +1,2 @@ sub1 = ../sub1 +sub3 = sub3 diff -Npru cloned.*/.hgsubstate cloned.*/.hgsubstate (glob) --- cloned.*/.hgsubstate * (glob) +++ cloned.*/.hgsubstate * (glob) @@ -1 +1,2 @@ -fc3b4ce2696f7741438c79207583768f2ce6b0dd sub1 +7a36fa02b66e61f27f3d4a822809f159479b8ab2 sub1 +b1a26de6f2a045a9f079323693614ee322f1ff7e sub3 diff -Npru cloned.*/foo/bar/def cloned.*/foo/bar/def (glob) --- cloned.*/foo/bar/def * (glob) +++ cloned.*/foo/bar/def * (glob) @@ -0,0 +1 @@ +changed diff -Npru cloned.*/main cloned.*/main (glob) --- cloned.*/main * (glob) +++ cloned.*/main * (glob) @@ -1 +1 @@ -main +foo diff -Npru cloned.*/sub1/.hgsubstate cloned.*/sub1/.hgsubstate (glob) --- cloned.*/sub1/.hgsubstate * (glob) +++ cloned.*/sub1/.hgsubstate * (glob) @@ -1 +1 @@ -c57a0840e3badd667ef3c3ef65471609acb2ba3c sub2 +c77908c81ccea3794a896c79e98b0e004aee2e9e sub2 diff -Npru cloned.*/sub1/sub2/folder/test.txt cloned.*/sub1/sub2/folder/test.txt (glob) --- cloned.*/sub1/sub2/folder/test.txt * (glob) +++ cloned.*/sub1/sub2/folder/test.txt * (glob) @@ -0,0 +1 @@ +subfolder diff -Npru cloned.*/sub1/sub2/sub2 cloned.*/sub1/sub2/sub2 (glob) --- cloned.*/sub1/sub2/sub2 * (glob) +++ cloned.*/sub1/sub2/sub2 * (glob) @@ -1 +1 @@ -sub2 +modified diff -Npru cloned.*/sub3/a.txt cloned.*/sub3/a.txt (glob) --- cloned.*/sub3/a.txt * (glob) +++ cloned.*/sub3/a.txt * (glob) @@ -0,0 +1 @@ +xyz [1] $ echo mod > sub1/sub2/sub2 $ hg --config extensions.extdiff= extdiff -S --- */cloned.*/sub1/sub2/sub2 * (glob) +++ */cloned/sub1/sub2/sub2 * (glob) @@ -1 +1 @@ -modified +mod [1] $ cd .. mercurial-3.7.3/tests/test-install.t0000644000175000017500000000646612676531525017115 0ustar mpmmpm00000000000000hg debuginstall $ hg debuginstall checking encoding (ascii)... checking Python executable (*) (glob) checking Python version (2.*) (glob) checking Python lib (*lib*)... (glob) checking installed modules (*mercurial)... (glob) checking templates (*mercurial?templates)... (glob) checking commit editor... checking username... no problems detected hg debuginstall with no username $ HGUSER= hg debuginstall checking encoding (ascii)... checking Python executable (*) (glob) checking Python version (2.*) (glob) checking Python lib (*lib*)... (glob) checking installed modules (*mercurial)... (glob) checking templates (*mercurial?templates)... (glob) checking commit editor... checking username... no username supplied (specify a username in your configuration file) 1 problems detected, please check your install! [1] path variables are expanded (~ is the same as $TESTTMP) $ mkdir tools $ touch tools/testeditor.exe #if execbit $ chmod 755 tools/testeditor.exe #endif $ hg debuginstall --config ui.editor=~/tools/testeditor.exe checking encoding (ascii)... checking Python executable (*) (glob) checking Python version (*) (glob) checking Python lib (*lib*)... (glob) checking installed modules (*mercurial)... (glob) checking templates (*mercurial?templates)... (glob) checking commit editor... checking username... no problems detected #if test-repo $ cat >> wixxml.py << EOF > import os, subprocess, sys > import xml.etree.ElementTree as ET > > # MSYS mangles the path if it expands $TESTDIR > testdir = os.environ['TESTDIR'] > ns = {'wix' : 'http://schemas.microsoft.com/wix/2006/wi'} > > def directory(node, relpath): > '''generator of files in the xml node, rooted at relpath''' > dirs = node.findall('./{%(wix)s}Directory' % ns) > > for d in dirs: > for subfile in directory(d, relpath + d.attrib['Name'] + '/'): > yield subfile > > files = node.findall('./{%(wix)s}Component/{%(wix)s}File' % ns) > > for f in files: > yield relpath + f.attrib['Name'] > > def hgdirectory(relpath): > '''generator of tracked files, rooted at relpath''' > hgdir = "%s/../mercurial" % (testdir) > args = ['hg', '--cwd', hgdir, 'files', relpath] > proc = subprocess.Popen(args, stdout=subprocess.PIPE, > stderr=subprocess.PIPE) > output = proc.communicate()[0] > > slash = '/' > for line in output.splitlines(): > if os.name == 'nt': > yield line.replace(os.sep, slash) > else: > yield line > > tracked = [f for f in hgdirectory(sys.argv[1])] > > xml = ET.parse("%s/../contrib/wix/%s.wxs" % (testdir, sys.argv[1])) > root = xml.getroot() > dir = root.find('.//{%(wix)s}DirectoryRef' % ns) > > installed = [f for f in directory(dir, '')] > > print('Not installed:') > for f in sorted(set(tracked) - set(installed)): > print(' %s' % f) > > print('Not tracked:') > for f in sorted(set(installed) - set(tracked)): > print(' %s' % f) > EOF $ python wixxml.py help Not installed: help/common.txt help/hg.1.txt help/hgignore.5.txt help/hgrc.5.txt Not tracked: $ python wixxml.py templates Not installed: Not tracked: #endif mercurial-3.7.3/tests/test-import-eol.t0000644000175000017500000000526212676531525017527 0ustar mpmmpm00000000000000 $ cat > makepatch.py < f = file('eol.diff', 'wb') > w = f.write > w('test message\n') > w('diff --git a/a b/a\n') > w('--- a/a\n') > w('+++ b/a\n') > w('@@ -1,5 +1,5 @@\n') > w(' a\n') > w('-bbb\r\n') > w('+yyyy\r\n') > w(' cc\r\n') > w(' \n') > w(' d\n') > w('-e\n') > w('\ No newline at end of file\n') > w('+z\r\n') > w('\ No newline at end of file\r\n') > EOF $ hg init repo $ cd repo $ echo '\.diff' > .hgignore Test different --eol values $ $PYTHON -c 'file("a", "wb").write("a\nbbb\ncc\n\nd\ne")' $ hg ci -Am adda adding .hgignore adding a $ python ../makepatch.py invalid eol $ hg --config patch.eol='LFCR' import eol.diff applying eol.diff abort: unsupported line endings type: LFCR [255] $ hg revert -a force LF $ hg --traceback --config patch.eol='LF' import eol.diff applying eol.diff $ cat a a yyyy cc d e (no-eol) $ hg st force CRLF $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --traceback --config patch.eol='CRLF' import eol.diff applying eol.diff $ cat a a\r (esc) yyyy\r (esc) cc\r (esc) \r (esc) d\r (esc) e (no-eol) $ hg st auto EOL on LF file $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --traceback --config patch.eol='auto' import eol.diff applying eol.diff $ cat a a yyyy cc d e (no-eol) $ hg st auto EOL on CRLF file $ $PYTHON -c 'file("a", "wb").write("a\r\nbbb\r\ncc\r\n\r\nd\r\ne")' $ hg commit -m 'switch EOLs in a' $ hg --traceback --config patch.eol='auto' import eol.diff applying eol.diff $ cat a a\r (esc) yyyy\r (esc) cc\r (esc) \r (esc) d\r (esc) e (no-eol) $ hg st auto EOL on new file or source without any EOL $ $PYTHON -c 'file("noeol", "wb").write("noeol")' $ hg add noeol $ hg commit -m 'add noeol' $ $PYTHON -c 'file("noeol", "wb").write("noeol\r\nnoeol\n")' $ $PYTHON -c 'file("neweol", "wb").write("neweol\nneweol\r\n")' $ hg add neweol $ hg diff --git > noeol.diff $ hg revert --no-backup noeol neweol $ rm neweol $ hg --traceback --config patch.eol='auto' import -m noeol noeol.diff applying noeol.diff $ cat noeol noeol\r (esc) noeol $ cat neweol neweol neweol\r (esc) $ hg st Test --eol and binary patches $ $PYTHON -c 'file("b", "wb").write("a\x00\nb\r\nd")' $ hg ci -Am addb adding b $ $PYTHON -c 'file("b", "wb").write("a\x00\nc\r\nd")' $ hg diff --git > bin.diff $ hg revert --no-backup b binary patch with --eol $ hg import --config patch.eol='CRLF' -m changeb bin.diff applying bin.diff $ cat b a\x00 (esc) c\r (esc) d (no-eol) $ hg st $ cd .. mercurial-3.7.3/tests/test-extdiff.t0000644000175000017500000002362012676531525017067 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "extdiff=" >> $HGRCPATH $ hg init a $ cd a $ echo a > a $ echo b > b $ hg add adding a adding b Should diff cloned directories: $ hg extdiff -o -r $opt Only in a: a Only in a: b [1] $ cat <> $HGRCPATH > [extdiff] > cmd.falabala = echo > opts.falabala = diffing > cmd.edspace = echo > opts.edspace = "name " > EOF $ hg falabala diffing a.000000000000 a [1] $ hg help falabala hg falabala [OPTION]... [FILE]... use 'echo' to diff repository (or selected files) Show differences between revisions for the specified files, using the 'echo' program. When two revision arguments are given, then changes are shown between those revisions. If only one revision is specified then that revision is compared to the working directory, and, when no revisions are specified, the working directory files are compared to its parent. options ([+] can be repeated): -o --option OPT [+] pass option to comparison program -r --rev REV [+] revision -c --change REV change made by revision --patch compare patches for two revisions -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns -S --subrepos recurse into subrepositories (some details hidden, use --verbose to show complete help) $ hg ci -d '0 0' -mtest1 $ echo b >> a $ hg ci -d '1 0' -mtest2 Should diff cloned files directly: #if windows $ hg falabala -r 0:1 diffing "*\\extdiff.*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) [1] #else $ hg falabala -r 0:1 diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) [1] #endif Specifying an empty revision should abort. $ hg extdiff --patch --rev 'ancestor()' --rev 1 abort: empty revision on one side of range [255] Test diff during merge: $ hg update -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo c >> c $ hg add c $ hg ci -m "new branch" -d '1 0' created new head $ hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) Should diff cloned file against wc file: #if windows $ hg falabala diffing "*\\extdiff.*\\a.2a13a4d2da36\\a" "*\\a\\a" (glob) [1] #else $ hg falabala diffing */extdiff.*/a.2a13a4d2da36/a */a/a (glob) [1] #endif Test --change option: $ hg ci -d '2 0' -mtest3 #if windows $ hg falabala -c 1 diffing "*\\extdiff.*\\a.8a5febb7f867\\a" "a.34eed99112ab\\a" (glob) [1] #else $ hg falabala -c 1 diffing */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) [1] #endif Check diff are made from the first parent: #if windows $ hg falabala -c 3 || echo "diff-like tools yield a non-zero exit code" diffing "*\\extdiff.*\\a.2a13a4d2da36\\a" "a.46c0e4daeb72\\a" (glob) diff-like tools yield a non-zero exit code #else $ hg falabala -c 3 || echo "diff-like tools yield a non-zero exit code" diffing */extdiff.*/a.2a13a4d2da36/a a.46c0e4daeb72/a (glob) diff-like tools yield a non-zero exit code #endif issue3153: ensure using extdiff with removed subrepos doesn't crash: $ hg init suba $ cd suba $ echo suba > suba $ hg add adding suba $ hg ci -m "adding suba file" $ cd .. $ echo suba=suba > .hgsub $ hg add adding .hgsub $ hg ci -Sm "adding subrepo" $ echo > .hgsub $ hg ci -m "removing subrepo" $ hg falabala -r 4 -r 5 -S diffing a.398e36faf9c6 a.5ab95fb166c4 [1] issue4463: usage of command line configuration without additional quoting $ cat <> $HGRCPATH > [extdiff] > cmd.4463a = echo > opts.4463a = a-naked 'single quoted' "double quoted" > 4463b = echo b-naked 'single quoted' "double quoted" > echo = > EOF $ hg update -q -C 0 $ echo a >> a #if windows $ hg --debug 4463a | grep '^running' running 'echo a-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob) $ hg --debug 4463b | grep '^running' running 'echo b-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob) $ hg --debug echo | grep '^running' running '*echo* "*\\a" "*\\a"' in */extdiff.* (glob) #else $ hg --debug 4463a | grep '^running' running 'echo a-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob) $ hg --debug 4463b | grep '^running' running 'echo b-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob) $ hg --debug echo | grep '^running' running '*echo */a $TESTTMP/a/a' in */extdiff.* (glob) #endif (getting options from other than extdiff section) $ cat <> $HGRCPATH > [extdiff] > # using diff-tools diffargs > 4463b2 = echo > # using merge-tools diffargs > 4463b3 = echo > # no diffargs > 4463b4 = echo > [diff-tools] > 4463b2.diffargs = b2-naked 'single quoted' "double quoted" > [merge-tools] > 4463b3.diffargs = b3-naked 'single quoted' "double quoted" > EOF #if windows $ hg --debug 4463b2 | grep '^running' running 'echo b2-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob) $ hg --debug 4463b3 | grep '^running' running 'echo b3-naked \'single quoted\' "double quoted" "*\\a" "*\\a"' in */extdiff.* (glob) $ hg --debug 4463b4 | grep '^running' running 'echo "*\\a" "*\\a"' in */extdiff.* (glob) $ hg --debug 4463b4 --option b4-naked --option 'being quoted' | grep '^running' running 'echo b4-naked "being quoted" "*\\a" "*\\a"' in */extdiff.* (glob) $ hg --debug extdiff -p echo --option echo-naked --option 'being quoted' | grep '^running' running 'echo echo-naked "being quoted" "*\\a" "*\\a"' in */extdiff.* (glob) #else $ hg --debug 4463b2 | grep '^running' running 'echo b2-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob) $ hg --debug 4463b3 | grep '^running' running 'echo b3-naked \'single quoted\' "double quoted" */a $TESTTMP/a/a' in */extdiff.* (glob) $ hg --debug 4463b4 | grep '^running' running 'echo */a $TESTTMP/a/a' in */extdiff.* (glob) $ hg --debug 4463b4 --option b4-naked --option 'being quoted' | grep '^running' running "echo b4-naked 'being quoted' */a $TESTTMP/a/a" in */extdiff.* (glob) $ hg --debug extdiff -p echo --option echo-naked --option 'being quoted' | grep '^running' running "echo echo-naked 'being quoted' */a $TESTTMP/a/a" in */extdiff.* (glob) #endif $ touch 'sp ace' $ hg add 'sp ace' $ hg ci -m 'sp ace' created new head $ echo > 'sp ace' Test pre-72a89cf86fcd backward compatibility with half-baked manual quoting $ cat <> $HGRCPATH > [extdiff] > odd = > [merge-tools] > odd.diffargs = --foo='\$clabel' '\$clabel' "--bar=\$clabel" "\$clabel" > odd.executable = echo > EOF #if windows TODO #else $ hg --debug odd | grep '^running' running "*/echo --foo='sp ace' 'sp ace' --bar='sp ace' 'sp ace'" in * (glob) #endif Empty argument must be quoted $ cat <> $HGRCPATH > [extdiff] > kdiff3 = echo > [merge-tools] > kdiff3.diffargs=--L1 \$plabel1 --L2 \$clabel \$parent \$child > EOF #if windows $ hg --debug kdiff3 -r0 | grep '^running' running 'echo --L1 "@0" --L2 "" a.8a5febb7f867 a' in * (glob) #else $ hg --debug kdiff3 -r0 | grep '^running' running "echo --L1 '@0' --L2 '' a.8a5febb7f867 a" in * (glob) #endif #if execbit Test extdiff of multiple files in tmp dir: $ hg update -C 0 > /dev/null $ echo changed > a $ echo changed > b $ chmod +x b Diff in working directory, before: $ hg diff --git diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,1 @@ -a +changed diff --git a/b b/b old mode 100644 new mode 100755 --- a/b +++ b/b @@ -1,1 +1,1 @@ -b +changed Edit with extdiff -p: Prepare custom diff/edit tool: $ cat > 'diff tool.py' << EOT > #!/usr/bin/env python > import time > time.sleep(1) # avoid unchanged-timestamp problems > file('a/a', 'ab').write('edited\n') > file('a/b', 'ab').write('edited\n') > EOT $ chmod +x 'diff tool.py' will change to /tmp/extdiff.TMP and populate directories a.TMP and a and start tool $ hg extdiff -p "`pwd`/diff tool.py" [1] Diff in working directory, after: $ hg diff --git diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,2 @@ -a +changed +edited diff --git a/b b/b old mode 100644 new mode 100755 --- a/b +++ b/b @@ -1,1 +1,2 @@ -b +changed +edited Test extdiff with --option: $ hg extdiff -p echo -o this -c 1 this */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) [1] $ hg falabala -o this -c 1 diffing this */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) [1] Test extdiff's handling of options with spaces in them: $ hg edspace -c 1 name */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) [1] $ hg extdiff -p echo -o "name " -c 1 name */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) [1] Test with revsets: $ hg extdif -p echo -c "rev(1)" */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) [1] $ hg extdif -p echo -r "0::1" */extdiff.*/a.8a5febb7f867/a a.34eed99112ab/a (glob) [1] Fallback to merge-tools.tool.executable|regkey $ mkdir dir $ cat > 'dir/tool.sh' << EOF > #!/bin/sh > echo "** custom diff **" > EOF $ chmod +x dir/tool.sh $ tool=`pwd`/dir/tool.sh $ hg --debug tl --config extdiff.tl= --config merge-tools.tl.executable=$tool making snapshot of 2 files from rev * (glob) a b making snapshot of 2 files from working directory a b running '$TESTTMP/a/dir/tool.sh a.* a' in */extdiff.* (glob) ** custom diff ** cleaning up temp directory [1] $ cd .. #endif #if symlink Test symlinks handling (issue1909) $ hg init testsymlinks $ cd testsymlinks $ echo a > a $ hg ci -Am adda adding a $ echo a >> a $ ln -s missing linka $ hg add linka $ hg falabala -r 0 --traceback diffing testsymlinks.07f494440405 testsymlinks [1] $ cd .. #endif mercurial-3.7.3/tests/test-mq-header-date.t0000644000175000017500000003650712676531525020224 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [extensions] > mq = > [diff] > nodates = true > EOF $ catpatch() { > cat .hg/patches/$1.patch | sed -e "s/^diff \-r [0-9a-f]* /diff -r ... /" \ > -e "s/^\(# Parent \).*/\1/" > } $ catlog() { > catpatch $1 > hg log --template "{rev}: {desc} - {author}\n" > } $ catlogd() { > catpatch $1 > hg log --template "{rev}: {desc} - {author} - {date}\n" > } $ drop() { > hg qpop > hg qdel $1.patch > } $ runtest() { > echo ==== init > hg init a > cd a > hg qinit > > > echo ==== qnew -d > hg qnew -d '3 0' 1.patch > catlogd 1 > > echo ==== qref > echo "1" >1 > hg add > hg qref > catlogd 1 > > echo ==== qref -d > hg qref -d '4 0' > catlogd 1 > > > echo ==== qnew > hg qnew 2.patch > echo "2" >2 > hg add > hg qref > catlog 2 > > echo ==== qref -d > hg qref -d '5 0' > catlog 2 > > drop 2 > > > echo ==== qnew -d -m > hg qnew -d '6 0' -m "Three" 3.patch > catlogd 3 > > echo ==== qref > echo "3" >3 > hg add > hg qref > catlogd 3 > > echo ==== qref -m > hg qref -m "Drei" > catlogd 3 > > echo ==== qref -d > hg qref -d '7 0' > catlogd 3 > > echo ==== qref -d -m > hg qref -d '8 0' -m "Three (again)" > catlogd 3 > > > echo ==== qnew -m > hg qnew -m "Four" 4.patch > echo "4" >4 > hg add > hg qref > catlog 4 > > echo ==== qref -d > hg qref -d '9 0' > catlog 4 > > drop 4 > > > echo ==== qnew with HG header > hg qnew --config 'mq.plain=true' 5.patch > hg qpop > echo "# HG changeset patch" >>.hg/patches/5.patch > echo "# Date 10 0" >>.hg/patches/5.patch > hg qpush 2>&1 | grep 'Now at' > catlogd 5 > > echo ==== hg qref > echo "5" >5 > hg add > hg qref > catlogd 5 > > echo ==== hg qref -d > hg qref -d '11 0' > catlogd 5 > > > echo ==== qnew with plain header > hg qnew --config 'mq.plain=true' -d '12 0' 6.patch > hg qpop > hg qpush 2>&1 | grep 'now at' > catlog 6 > > echo ==== hg qref > echo "6" >6 > hg add > hg qref > catlogd 6 > > echo ==== hg qref -d > hg qref -d '13 0' > catlogd 6 > > drop 6 > > > echo ==== qnew -u > hg qnew -u jane 6.patch > echo "6" >6 > hg add > hg qref > catlog 6 > > echo ==== qref -d > hg qref -d '12 0' > catlog 6 > > drop 6 > > > echo ==== qnew -d > hg qnew -d '13 0' 7.patch > echo "7" >7 > hg add > hg qref > catlog 7 > > echo ==== qref -u > hg qref -u john > catlogd 7 > > > echo ==== qnew > hg qnew 8.patch > echo "8" >8 > hg add > hg qref > catlog 8 > > echo ==== qref -u -d > hg qref -u john -d '14 0' > catlog 8 > > drop 8 > > > echo ==== qnew -m > hg qnew -m "Nine" 9.patch > echo "9" >9 > hg add > hg qref > catlog 9 > > echo ==== qref -u -d > hg qref -u john -d '15 0' > catlog 9 > > drop 9 > > > echo ==== "qpop -a / qpush -a" > hg qpop -a > hg qpush -a > hg log --template "{rev}: {desc} - {author} - {date}\n" > } ======= plain headers $ echo "[mq]" >> $HGRCPATH $ echo "plain=true" >> $HGRCPATH $ mkdir sandbox $ (cd sandbox ; runtest) ==== init ==== qnew -d Date: 3 0 0: [mq]: 1.patch - test - 3.00 ==== qref adding 1 Date: 3 0 diff -r ... 1 --- /dev/null +++ b/1 @@ -0,0 +1,1 @@ +1 0: [mq]: 1.patch - test - 3.00 ==== qref -d Date: 4 0 diff -r ... 1 --- /dev/null +++ b/1 @@ -0,0 +1,1 @@ +1 0: [mq]: 1.patch - test - 4.00 ==== qnew adding 2 diff -r ... 2 --- /dev/null +++ b/2 @@ -0,0 +1,1 @@ +2 1: [mq]: 2.patch - test 0: [mq]: 1.patch - test ==== qref -d Date: 5 0 diff -r ... 2 --- /dev/null +++ b/2 @@ -0,0 +1,1 @@ +2 1: [mq]: 2.patch - test 0: [mq]: 1.patch - test popping 2.patch now at: 1.patch ==== qnew -d -m Date: 6 0 Three 1: Three - test - 6.00 0: [mq]: 1.patch - test - 4.00 ==== qref adding 3 Date: 6 0 Three diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 1: Three - test - 6.00 0: [mq]: 1.patch - test - 4.00 ==== qref -m Date: 6 0 Drei diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 1: Drei - test - 6.00 0: [mq]: 1.patch - test - 4.00 ==== qref -d Date: 7 0 Drei diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 1: Drei - test - 7.00 0: [mq]: 1.patch - test - 4.00 ==== qref -d -m Date: 8 0 Three (again) diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 ==== qnew -m adding 4 Four diff -r ... 4 --- /dev/null +++ b/4 @@ -0,0 +1,1 @@ +4 2: Four - test 1: Three (again) - test 0: [mq]: 1.patch - test ==== qref -d Date: 9 0 Four diff -r ... 4 --- /dev/null +++ b/4 @@ -0,0 +1,1 @@ +4 2: Four - test 1: Three (again) - test 0: [mq]: 1.patch - test popping 4.patch now at: 3.patch ==== qnew with HG header popping 5.patch now at: 3.patch # HG changeset patch # Date 10 0 2: imported patch 5.patch - test - 10.00 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 ==== hg qref adding 5 # HG changeset patch # Date 10 0 # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 2: [mq]: 5.patch - test - 10.00 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 ==== hg qref -d # HG changeset patch # Date 11 0 # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 2: [mq]: 5.patch - test - 11.00 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 ==== qnew with plain header popping 6.patch now at: 5.patch now at: 6.patch Date: 12 0 3: imported patch 6.patch - test 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test ==== hg qref adding 6 Date: 12 0 diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 3: [mq]: 6.patch - test - 12.00 2: [mq]: 5.patch - test - 11.00 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 ==== hg qref -d Date: 13 0 diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 3: [mq]: 6.patch - test - 13.00 2: [mq]: 5.patch - test - 11.00 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 popping 6.patch now at: 5.patch ==== qnew -u adding 6 From: jane diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 3: [mq]: 6.patch - jane 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test ==== qref -d From: jane Date: 12 0 diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 3: [mq]: 6.patch - jane 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test popping 6.patch now at: 5.patch ==== qnew -d adding 7 Date: 13 0 diff -r ... 7 --- /dev/null +++ b/7 @@ -0,0 +1,1 @@ +7 3: [mq]: 7.patch - test 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test ==== qref -u From: john Date: 13 0 diff -r ... 7 --- /dev/null +++ b/7 @@ -0,0 +1,1 @@ +7 3: [mq]: 7.patch - john - 13.00 2: [mq]: 5.patch - test - 11.00 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 ==== qnew adding 8 diff -r ... 8 --- /dev/null +++ b/8 @@ -0,0 +1,1 @@ +8 4: [mq]: 8.patch - test 3: [mq]: 7.patch - john 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test ==== qref -u -d From: john Date: 14 0 diff -r ... 8 --- /dev/null +++ b/8 @@ -0,0 +1,1 @@ +8 4: [mq]: 8.patch - john 3: [mq]: 7.patch - john 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test popping 8.patch now at: 7.patch ==== qnew -m adding 9 Nine diff -r ... 9 --- /dev/null +++ b/9 @@ -0,0 +1,1 @@ +9 4: Nine - test 3: [mq]: 7.patch - john 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test ==== qref -u -d From: john Date: 15 0 Nine diff -r ... 9 --- /dev/null +++ b/9 @@ -0,0 +1,1 @@ +9 4: Nine - john 3: [mq]: 7.patch - john 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test popping 9.patch now at: 7.patch ==== qpop -a / qpush -a popping 7.patch popping 5.patch popping 3.patch popping 1.patch patch queue now empty applying 1.patch applying 3.patch applying 5.patch applying 7.patch now at: 7.patch 3: imported patch 7.patch - john - 13.00 2: imported patch 5.patch - test - 11.00 1: Three (again) - test - 8.00 0: imported patch 1.patch - test - 4.00 $ rm -r sandbox ======= hg headers $ echo "plain=false" >> $HGRCPATH $ mkdir sandbox $ (cd sandbox ; runtest) ==== init ==== qnew -d # HG changeset patch # Date 3 0 # Parent 0: [mq]: 1.patch - test - 3.00 ==== qref adding 1 # HG changeset patch # Date 3 0 # Parent diff -r ... 1 --- /dev/null +++ b/1 @@ -0,0 +1,1 @@ +1 0: [mq]: 1.patch - test - 3.00 ==== qref -d # HG changeset patch # Date 4 0 # Parent diff -r ... 1 --- /dev/null +++ b/1 @@ -0,0 +1,1 @@ +1 0: [mq]: 1.patch - test - 4.00 ==== qnew adding 2 # HG changeset patch # Parent diff -r ... 2 --- /dev/null +++ b/2 @@ -0,0 +1,1 @@ +2 1: [mq]: 2.patch - test 0: [mq]: 1.patch - test ==== qref -d # HG changeset patch # Date 5 0 # Parent diff -r ... 2 --- /dev/null +++ b/2 @@ -0,0 +1,1 @@ +2 1: [mq]: 2.patch - test 0: [mq]: 1.patch - test popping 2.patch now at: 1.patch ==== qnew -d -m # HG changeset patch # Date 6 0 # Parent Three 1: Three - test - 6.00 0: [mq]: 1.patch - test - 4.00 ==== qref adding 3 # HG changeset patch # Date 6 0 # Parent Three diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 1: Three - test - 6.00 0: [mq]: 1.patch - test - 4.00 ==== qref -m # HG changeset patch # Date 6 0 # Parent Drei diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 1: Drei - test - 6.00 0: [mq]: 1.patch - test - 4.00 ==== qref -d # HG changeset patch # Date 7 0 # Parent Drei diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 1: Drei - test - 7.00 0: [mq]: 1.patch - test - 4.00 ==== qref -d -m # HG changeset patch # Date 8 0 # Parent Three (again) diff -r ... 3 --- /dev/null +++ b/3 @@ -0,0 +1,1 @@ +3 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 ==== qnew -m adding 4 # HG changeset patch # Parent Four diff -r ... 4 --- /dev/null +++ b/4 @@ -0,0 +1,1 @@ +4 2: Four - test 1: Three (again) - test 0: [mq]: 1.patch - test ==== qref -d # HG changeset patch # Date 9 0 # Parent Four diff -r ... 4 --- /dev/null +++ b/4 @@ -0,0 +1,1 @@ +4 2: Four - test 1: Three (again) - test 0: [mq]: 1.patch - test popping 4.patch now at: 3.patch ==== qnew with HG header popping 5.patch now at: 3.patch # HG changeset patch # Date 10 0 2: imported patch 5.patch - test - 10.00 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 ==== hg qref adding 5 # HG changeset patch # Date 10 0 # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 2: [mq]: 5.patch - test - 10.00 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 ==== hg qref -d # HG changeset patch # Date 11 0 # Parent diff -r ... 5 --- /dev/null +++ b/5 @@ -0,0 +1,1 @@ +5 2: [mq]: 5.patch - test - 11.00 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 ==== qnew with plain header popping 6.patch now at: 5.patch now at: 6.patch Date: 12 0 3: imported patch 6.patch - test 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test ==== hg qref adding 6 Date: 12 0 diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 3: [mq]: 6.patch - test - 12.00 2: [mq]: 5.patch - test - 11.00 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 ==== hg qref -d Date: 13 0 diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 3: [mq]: 6.patch - test - 13.00 2: [mq]: 5.patch - test - 11.00 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 popping 6.patch now at: 5.patch ==== qnew -u adding 6 # HG changeset patch # User jane # Parent diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 3: [mq]: 6.patch - jane 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test ==== qref -d # HG changeset patch # User jane # Date 12 0 # Parent diff -r ... 6 --- /dev/null +++ b/6 @@ -0,0 +1,1 @@ +6 3: [mq]: 6.patch - jane 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test popping 6.patch now at: 5.patch ==== qnew -d adding 7 # HG changeset patch # Date 13 0 # Parent diff -r ... 7 --- /dev/null +++ b/7 @@ -0,0 +1,1 @@ +7 3: [mq]: 7.patch - test 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test ==== qref -u # HG changeset patch # User john # Date 13 0 # Parent diff -r ... 7 --- /dev/null +++ b/7 @@ -0,0 +1,1 @@ +7 3: [mq]: 7.patch - john - 13.00 2: [mq]: 5.patch - test - 11.00 1: Three (again) - test - 8.00 0: [mq]: 1.patch - test - 4.00 ==== qnew adding 8 # HG changeset patch # Parent diff -r ... 8 --- /dev/null +++ b/8 @@ -0,0 +1,1 @@ +8 4: [mq]: 8.patch - test 3: [mq]: 7.patch - john 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test ==== qref -u -d # HG changeset patch # User john # Date 14 0 # Parent diff -r ... 8 --- /dev/null +++ b/8 @@ -0,0 +1,1 @@ +8 4: [mq]: 8.patch - john 3: [mq]: 7.patch - john 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test popping 8.patch now at: 7.patch ==== qnew -m adding 9 # HG changeset patch # Parent Nine diff -r ... 9 --- /dev/null +++ b/9 @@ -0,0 +1,1 @@ +9 4: Nine - test 3: [mq]: 7.patch - john 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test ==== qref -u -d # HG changeset patch # User john # Date 15 0 # Parent Nine diff -r ... 9 --- /dev/null +++ b/9 @@ -0,0 +1,1 @@ +9 4: Nine - john 3: [mq]: 7.patch - john 2: [mq]: 5.patch - test 1: Three (again) - test 0: [mq]: 1.patch - test popping 9.patch now at: 7.patch ==== qpop -a / qpush -a popping 7.patch popping 5.patch popping 3.patch popping 1.patch patch queue now empty applying 1.patch applying 3.patch applying 5.patch applying 7.patch now at: 7.patch 3: imported patch 7.patch - john - 13.00 2: imported patch 5.patch - test - 11.00 1: Three (again) - test - 8.00 0: imported patch 1.patch - test - 4.00 $ rm -r sandbox mercurial-3.7.3/tests/test-import-context.t0000644000175000017500000000441212676531525020430 0ustar mpmmpm00000000000000Test applying context diffs $ cat > writepatterns.py < import sys > > path = sys.argv[1] > lasteol = sys.argv[2] == '1' > patterns = sys.argv[3:] > > fp = file(path, 'wb') > for i, pattern in enumerate(patterns): > count = int(pattern[0:-1]) > char = pattern[-1] + '\n' > if not lasteol and i == len(patterns) - 1: > fp.write((char*count)[:-1]) > else: > fp.write(char*count) > fp.close() > EOF $ cat > cat.py < import sys > sys.stdout.write(repr(file(sys.argv[1], 'rb').read()) + '\n') > EOF Initialize the test repository $ hg init repo $ cd repo $ python ../writepatterns.py a 0 5A 1B 5C 1D $ python ../writepatterns.py b 1 1A 1B $ python ../writepatterns.py c 1 5A $ python ../writepatterns.py d 1 5A 1B $ hg add adding a adding b adding c adding d $ hg ci -m addfiles Add file, missing a last end of line $ hg import --no-commit - < *** /dev/null 2010-10-16 18:05:49.000000000 +0200 > --- b/newnoeol 2010-10-16 18:23:26.000000000 +0200 > *************** > *** 0 **** > --- 1,2 ---- > + a > + b > \ No newline at end of file > *** a/a Sat Oct 16 16:35:51 2010 > --- b/a Sat Oct 16 16:35:51 2010 > *************** > *** 3,12 **** > A > A > A > ! B > C > C > C > C > C > ! D > \ No newline at end of file > --- 3,13 ---- > A > A > A > ! E > C > C > C > C > C > ! F > ! F > > *** a/b 2010-10-16 18:40:38.000000000 +0200 > --- /dev/null 2010-10-16 18:05:49.000000000 +0200 > *************** > *** 1,2 **** > - A > - B > --- 0 ---- > *** a/c Sat Oct 16 21:34:26 2010 > --- b/c Sat Oct 16 21:34:27 2010 > *************** > *** 3,5 **** > --- 3,7 ---- > A > A > A > + B > + B > *** a/d Sat Oct 16 21:47:20 2010 > --- b/d Sat Oct 16 21:47:22 2010 > *************** > *** 2,6 **** > A > A > A > - A > - B > --- 2,4 ---- > EOF applying patch from stdin $ hg st M a M c M d A newnoeol R b What's in a $ python ../cat.py a 'A\nA\nA\nA\nA\nE\nC\nC\nC\nC\nC\nF\nF\n' $ python ../cat.py newnoeol 'a\nb' $ python ../cat.py c 'A\nA\nA\nA\nA\nB\nB\n' $ python ../cat.py d 'A\nA\nA\nA\n' $ cd .. mercurial-3.7.3/tests/test-hook.t0000644000175000017500000006311012676531525016374 0ustar mpmmpm00000000000000commit hooks can see env vars (and post-transaction one are run unlocked) $ cat << EOF >> $HGRCPATH > [experimental] > # drop me once bundle2 is the default, > # added to get test change early. > bundle2-exp = True > EOF $ cat > $TESTTMP/txnabort.checkargs.py < def showargs(ui, repo, hooktype, **kwargs): > ui.write('%s python hook: %s\n' % (hooktype, ','.join(sorted(kwargs)))) > EOF $ hg init a $ cd a $ cat > .hg/hgrc < [hooks] > commit = sh -c "HG_LOCAL= HG_TAG= printenv.py commit" > commit.b = sh -c "HG_LOCAL= HG_TAG= printenv.py commit.b" > precommit = sh -c "HG_LOCAL= HG_NODE= HG_TAG= printenv.py precommit" > pretxncommit = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxncommit" > pretxncommit.tip = hg -q tip > pre-identify = printenv.py pre-identify 1 > pre-cat = printenv.py pre-cat > post-cat = printenv.py post-cat > pretxnopen = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnopen" > pretxnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py pretxnclose" > txnclose = sh -c "HG_LOCAL= HG_TAG= printenv.py txnclose" > txnabort.0 = python:$TESTTMP/txnabort.checkargs.py:showargs > txnabort.1 = sh -c "HG_LOCAL= HG_TAG= printenv.py txnabort" > txnclose.checklock = sh -c "hg debuglock > /dev/null" > EOF $ echo a > a $ hg add a $ hg commit -m a precommit hook: HG_PARENT1=0000000000000000000000000000000000000000 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob) pretxncommit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 HG_PENDING=$TESTTMP/a 0:cb9a9f314b8b pretxnclose hook: HG_PENDING=$TESTTMP/a HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob) txnclose hook: HG_PHASES_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=commit (glob) commit hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 commit.b hook: HG_NODE=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PARENT1=0000000000000000000000000000000000000000 $ hg clone . ../b updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../b changegroup hooks can see env vars $ cat > .hg/hgrc < [hooks] > prechangegroup = printenv.py prechangegroup > changegroup = printenv.py changegroup > incoming = printenv.py incoming > EOF pretxncommit and commit hooks can see both parents of merge $ cd ../a $ echo b >> a $ hg commit -m a1 -d "1 0" precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob) pretxncommit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a 1:ab228980c14d pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob) txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob) commit hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b commit.b hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b $ hg update -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b > b $ hg add b $ hg commit -m b -d '1 0' precommit hook: HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob) pretxncommit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b HG_PENDING=$TESTTMP/a 2:ee9deb46ab31 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob) created new head txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob) commit hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b commit.b hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT1=cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b $ hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m merge -d '2 0' precommit hook: HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob) pretxncommit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd HG_PENDING=$TESTTMP/a 3:07f3376c1e65 pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob) txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob) commit hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd commit.b hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PARENT1=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_PARENT2=ab228980c14deea8b9555d91c9581127383e40fd test generic hooks $ hg id pre-identify hook: HG_ARGS=id HG_OPTS={'bookmarks': None, 'branch': None, 'id': None, 'insecure': None, 'num': None, 'remotecmd': '', 'rev': '', 'ssh': '', 'tags': None} HG_PATS=[] abort: pre-identify hook exited with status 1 [255] $ hg cat b pre-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] b post-cat hook: HG_ARGS=cat b HG_OPTS={'decode': None, 'exclude': [], 'include': [], 'output': '', 'rev': ''} HG_PATS=['b'] HG_RESULT=0 $ cd ../b $ hg pull ../a pulling from ../a searching for changes prechangegroup hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob) adding changesets adding manifests adding file changes added 3 changesets with 2 changes to 2 files changegroup hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_NODE_LAST=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob) incoming hook: HG_NODE=ab228980c14deea8b9555d91c9581127383e40fd HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob) incoming hook: HG_NODE=ee9deb46ab31e4cc3310f3cf0c3d668e4d8fffc2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob) incoming hook: HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob) (run 'hg update' to get a working copy) tag hooks can see env vars $ cd ../a $ cat >> .hg/hgrc < pretag = printenv.py pretag > tag = sh -c "HG_PARENT1= HG_PARENT2= printenv.py tag" > EOF $ hg tag -d '3 0' a pretag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a precommit hook: HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob) pretxncommit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 HG_PENDING=$TESTTMP/a 4:539e4b31b6dc pretxnclose hook: HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=commit (glob) tag hook: HG_LOCAL=0 HG_NODE=07f3376c1e655977439df2a814e3cc14b27abac2 HG_TAG=a txnclose hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob) commit hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 commit.b hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PARENT1=07f3376c1e655977439df2a814e3cc14b27abac2 $ hg tag -l la pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la tag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=la pretag hook can forbid tagging $ echo "pretag.forbid = printenv.py pretag.forbid 1" >> .hg/hgrc $ hg tag -d '4 0' fa pretag hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa pretag.forbid hook: HG_LOCAL=0 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fa abort: pretag.forbid hook exited with status 1 [255] $ hg tag -l fla pretag hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla pretag.forbid hook: HG_LOCAL=1 HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_TAG=fla abort: pretag.forbid hook exited with status 1 [255] pretxncommit hook can see changeset, can roll back txn, changeset no more there after $ echo "pretxncommit.forbid0 = hg tip -q" >> .hg/hgrc $ echo "pretxncommit.forbid1 = printenv.py pretxncommit.forbid 1" >> .hg/hgrc $ echo z > z $ hg add z $ hg -q tip 4:539e4b31b6dc $ hg commit -m 'fail' -d '4 0' precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob) pretxncommit hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a 5:6f611f8018c1 5:6f611f8018c1 pretxncommit.forbid hook: HG_NODE=6f611f8018c10e827fee6bd2bc807f937e761567 HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/a transaction abort! txnabort python hook: txnid,txnname txnabort hook: HG_TXNID=TXN:* HG_TXNNAME=commit (glob) rollback completed abort: pretxncommit.forbid1 hook exited with status 1 [255] $ hg -q tip 4:539e4b31b6dc (Check that no 'changelog.i.a' file were left behind) $ ls -1 .hg/store/ 00changelog.i 00manifest.i data fncache journal.phaseroots phaseroots undo undo.backup.fncache undo.backupfiles undo.phaseroots precommit hook can prevent commit $ echo "precommit.forbid = printenv.py precommit.forbid 1" >> .hg/hgrc $ hg commit -m 'fail' -d '4 0' precommit hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 precommit.forbid hook: HG_PARENT1=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 abort: precommit.forbid hook exited with status 1 [255] $ hg -q tip 4:539e4b31b6dc preupdate hook can prevent update $ echo "preupdate = printenv.py preupdate" >> .hg/hgrc $ hg update 1 preupdate hook: HG_PARENT1=ab228980c14d 0 files updated, 0 files merged, 2 files removed, 0 files unresolved update hook $ echo "update = printenv.py update" >> .hg/hgrc $ hg update preupdate hook: HG_PARENT1=539e4b31b6dc update hook: HG_ERROR=0 HG_PARENT1=539e4b31b6dc 2 files updated, 0 files merged, 0 files removed, 0 files unresolved pushkey hook $ echo "pushkey = printenv.py pushkey" >> .hg/hgrc $ cd ../b $ hg bookmark -r null foo $ hg push -B foo ../a pushing to ../a searching for changes no changes found pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob) pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_PENDING=$TESTTMP/a HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob) pushkey hook: HG_KEY=foo HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_RET=1 txnclose hook: HG_BOOKMARK_MOVED=1 HG_BUNDLE2=1 HG_SOURCE=push HG_TXNID=TXN:* HG_TXNNAME=push HG_URL=push (glob) exporting bookmark foo [1] $ cd ../a listkeys hook $ echo "listkeys = printenv.py listkeys" >> .hg/hgrc $ hg bookmark -r null bar pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) $ cd ../b $ hg pull -B bar ../a pulling from ../a listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'} no changes found listkeys hook: HG_NAMESPACE=phase HG_VALUES={} adding remote bookmark bar listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'} $ cd ../a test that prepushkey can prevent incoming keys $ echo "prepushkey = printenv.py prepushkey.forbid 1" >> .hg/hgrc $ cd ../b $ hg bookmark -r null baz $ hg push -B baz ../a pushing to ../a searching for changes listkeys hook: HG_NAMESPACE=phases HG_VALUES={'cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b': '1', 'publishing': 'True'} listkeys hook: HG_NAMESPACE=bookmarks HG_VALUES={'bar': '0000000000000000000000000000000000000000', 'foo': '0000000000000000000000000000000000000000'} no changes found pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=push (glob) prepushkey.forbid hook: HG_BUNDLE2=1 HG_KEY=baz HG_NAMESPACE=bookmarks HG_NEW=0000000000000000000000000000000000000000 HG_SOURCE=push HG_TXNID=TXN:* HG_URL=push (glob) pushkey-abort: prepushkey hook exited with status 1 abort: exporting bookmark baz failed! [255] $ cd ../a test that prelistkeys can prevent listing keys $ echo "prelistkeys = printenv.py prelistkeys.forbid 1" >> .hg/hgrc $ hg bookmark -r null quux pretxnopen hook: HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) pretxnclose hook: HG_BOOKMARK_MOVED=1 HG_PENDING=$TESTTMP/a HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) txnclose hook: HG_BOOKMARK_MOVED=1 HG_TXNID=TXN:* HG_TXNNAME=bookmark (glob) $ cd ../b $ hg pull -B quux ../a pulling from ../a prelistkeys.forbid hook: HG_NAMESPACE=bookmarks abort: prelistkeys hook exited with status 1 [255] $ cd ../a $ rm .hg/hgrc prechangegroup hook can prevent incoming changes $ cd ../b $ hg -q tip 3:07f3376c1e65 $ cat > .hg/hgrc < [hooks] > prechangegroup.forbid = printenv.py prechangegroup.forbid 1 > EOF $ hg pull ../a pulling from ../a searching for changes prechangegroup.forbid hook: HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob) abort: prechangegroup.forbid hook exited with status 1 [255] pretxnchangegroup hook can see incoming changes, can roll back txn, incoming changes no longer there after $ cat > .hg/hgrc < [hooks] > pretxnchangegroup.forbid0 = hg tip -q > pretxnchangegroup.forbid1 = printenv.py pretxnchangegroup.forbid 1 > EOF $ hg pull ../a pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files 4:539e4b31b6dc pretxnchangegroup.forbid hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_NODE_LAST=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_PENDING=$TESTTMP/b HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=file:$TESTTMP/a (glob) transaction abort! rollback completed abort: pretxnchangegroup.forbid1 hook exited with status 1 [255] $ hg -q tip 3:07f3376c1e65 outgoing hooks can see env vars $ rm .hg/hgrc $ cat > ../a/.hg/hgrc < [hooks] > preoutgoing = printenv.py preoutgoing > outgoing = printenv.py outgoing > EOF $ hg pull ../a pulling from ../a searching for changes preoutgoing hook: HG_SOURCE=pull outgoing hook: HG_NODE=539e4b31b6dc99b3cfbaa6b53cbc1c1f9a1e3a10 HG_SOURCE=pull adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files adding remote bookmark quux (run 'hg update' to get a working copy) $ hg rollback repository tip rolled back to revision 3 (undo pull) preoutgoing hook can prevent outgoing changes $ echo "preoutgoing.forbid = printenv.py preoutgoing.forbid 1" >> ../a/.hg/hgrc $ hg pull ../a pulling from ../a searching for changes preoutgoing hook: HG_SOURCE=pull preoutgoing.forbid hook: HG_SOURCE=pull abort: preoutgoing.forbid hook exited with status 1 [255] outgoing hooks work for local clones $ cd .. $ cat > a/.hg/hgrc < [hooks] > preoutgoing = printenv.py preoutgoing > outgoing = printenv.py outgoing > EOF $ hg clone a c preoutgoing hook: HG_SOURCE=clone outgoing hook: HG_NODE=0000000000000000000000000000000000000000 HG_SOURCE=clone updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -rf c preoutgoing hook can prevent outgoing changes for local clones $ echo "preoutgoing.forbid = printenv.py preoutgoing.forbid 1" >> a/.hg/hgrc $ hg clone a zzz preoutgoing hook: HG_SOURCE=clone preoutgoing.forbid hook: HG_SOURCE=clone abort: preoutgoing.forbid hook exited with status 1 [255] $ cd "$TESTTMP/b" $ cat > hooktests.py < from mercurial import error > > uncallable = 0 > > def printargs(args): > args.pop('ui', None) > args.pop('repo', None) > a = list(args.items()) > a.sort() > print 'hook args:' > for k, v in a: > print ' ', k, v > > def passhook(**args): > printargs(args) > > def failhook(**args): > printargs(args) > return True > > class LocalException(Exception): > pass > > def raisehook(**args): > raise LocalException('exception from hook') > > def aborthook(**args): > raise error.Abort('raise abort from hook') > > def brokenhook(**args): > return 1 + {} > > def verbosehook(ui, **args): > ui.note('verbose output from hook\n') > > def printtags(ui, repo, **args): > print sorted(repo.tags()) > > class container: > unreachable = 1 > EOF test python hooks #if windows $ PYTHONPATH="$TESTTMP/b;$PYTHONPATH" #else $ PYTHONPATH="$TESTTMP/b:$PYTHONPATH" #endif $ export PYTHONPATH $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.broken = python:hooktests.brokenhook' >> ../a/.hg/hgrc $ hg pull ../a 2>&1 | grep 'raised an exception' error: preoutgoing.broken hook raised an exception: unsupported operand type(s) for +: 'int' and 'dict' $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.raise = python:hooktests.raisehook' >> ../a/.hg/hgrc $ hg pull ../a 2>&1 | grep 'raised an exception' error: preoutgoing.raise hook raised an exception: exception from hook $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.abort = python:hooktests.aborthook' >> ../a/.hg/hgrc $ hg pull ../a pulling from ../a searching for changes error: preoutgoing.abort hook failed: raise abort from hook abort: raise abort from hook [255] $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.fail = python:hooktests.failhook' >> ../a/.hg/hgrc $ hg pull ../a pulling from ../a searching for changes hook args: hooktype preoutgoing source pull abort: preoutgoing.fail hook failed [255] $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.uncallable = python:hooktests.uncallable' >> ../a/.hg/hgrc $ hg pull ../a pulling from ../a searching for changes abort: preoutgoing.uncallable hook is invalid ("hooktests.uncallable" is not callable) [255] $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.nohook = python:hooktests.nohook' >> ../a/.hg/hgrc $ hg pull ../a pulling from ../a searching for changes abort: preoutgoing.nohook hook is invalid ("hooktests.nohook" is not defined) [255] $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.nomodule = python:nomodule' >> ../a/.hg/hgrc $ hg pull ../a pulling from ../a searching for changes abort: preoutgoing.nomodule hook is invalid ("nomodule" not in a module) [255] $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.badmodule = python:nomodule.nowhere' >> ../a/.hg/hgrc $ hg pull ../a pulling from ../a searching for changes abort: preoutgoing.badmodule hook is invalid (import of "nomodule" failed) [255] $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.unreachable = python:hooktests.container.unreachable' >> ../a/.hg/hgrc $ hg pull ../a pulling from ../a searching for changes abort: preoutgoing.unreachable hook is invalid (import of "hooktests.container" failed) [255] $ echo '[hooks]' > ../a/.hg/hgrc $ echo 'preoutgoing.pass = python:hooktests.passhook' >> ../a/.hg/hgrc $ hg pull ../a pulling from ../a searching for changes hook args: hooktype preoutgoing source pull adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files adding remote bookmark quux (run 'hg update' to get a working copy) make sure --traceback works $ echo '[hooks]' > .hg/hgrc $ echo 'commit.abort = python:hooktests.aborthook' >> .hg/hgrc $ echo aa > a $ hg --traceback commit -d '0 0' -ma 2>&1 | grep '^Traceback' Traceback (most recent call last): $ cd .. $ hg init c $ cd c $ cat > hookext.py < def autohook(**args): > print "Automatically installed hook" > > def reposetup(ui, repo): > repo.ui.setconfig("hooks", "commit.auto", autohook) > EOF $ echo '[extensions]' >> .hg/hgrc $ echo 'hookext = hookext.py' >> .hg/hgrc $ touch foo $ hg add foo $ hg ci -d '0 0' -m 'add foo' Automatically installed hook $ echo >> foo $ hg ci --debug -d '0 0' -m 'change foo' committing files: foo committing manifest committing changelog committed changeset 1:52998019f6252a2b893452765fcb0a47351a5708 calling hook commit.auto: hgext_hookext.autohook Automatically installed hook $ hg showconfig hooks hooks.commit.auto= (glob) test python hook configured with python:[file]:[hook] syntax $ cd .. $ mkdir d $ cd d $ hg init repo $ mkdir hooks $ cd hooks $ cat > testhooks.py < def testhook(**args): > print 'hook works' > EOF $ echo '[hooks]' > ../repo/.hg/hgrc $ echo "pre-commit.test = python:`pwd`/testhooks.py:testhook" >> ../repo/.hg/hgrc $ cd ../repo $ hg commit -d '0 0' hook works nothing changed [1] $ echo '[hooks]' > .hg/hgrc $ echo "update.ne = python:`pwd`/nonexistent.py:testhook" >> .hg/hgrc $ echo "pre-identify.npmd = python:`pwd`/:no_python_module_dir" >> .hg/hgrc $ hg up null loading update.ne hook failed: abort: No such file or directory: $TESTTMP/d/repo/nonexistent.py [255] $ hg id loading pre-identify.npmd hook failed: abort: No module named repo! [255] $ cd ../../b make sure --traceback works on hook import failure $ cat > importfail.py < import somebogusmodule > # dereference something in the module to force demandimport to load it > somebogusmodule.whatever > EOF $ echo '[hooks]' > .hg/hgrc $ echo 'precommit.importfail = python:importfail.whatever' >> .hg/hgrc $ echo a >> a $ hg --traceback commit -ma 2>&1 | egrep -v '^( +File| [a-zA-Z(])' exception from first failed import attempt: Traceback (most recent call last): ImportError: No module named somebogusmodule exception from second failed import attempt: Traceback (most recent call last): ImportError: No module named hgext_importfail Traceback (most recent call last): HookLoadError: precommit.importfail hook is invalid (import of "importfail" failed) abort: precommit.importfail hook is invalid (import of "importfail" failed) Issue1827: Hooks Update & Commit not completely post operation commit and update hooks should run after command completion. The largefiles use demonstrates a recursive wlock, showing the hook doesn't run until the final release (and dirstate flush). $ echo '[hooks]' > .hg/hgrc $ echo 'commit = hg id' >> .hg/hgrc $ echo 'update = hg id' >> .hg/hgrc $ echo bb > a $ hg ci -ma 223eafe2750c tip $ hg up 0 --config extensions.largefiles= cb9a9f314b8b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved make sure --verbose (and --quiet/--debug etc.) are propagated to the local ui that is passed to pre/post hooks $ echo '[hooks]' > .hg/hgrc $ echo 'pre-identify = python:hooktests.verbosehook' >> .hg/hgrc $ hg id cb9a9f314b8b $ hg id --verbose calling hook pre-identify: hooktests.verbosehook verbose output from hook cb9a9f314b8b Ensure hooks can be prioritized $ echo '[hooks]' > .hg/hgrc $ echo 'pre-identify.a = python:hooktests.verbosehook' >> .hg/hgrc $ echo 'pre-identify.b = python:hooktests.verbosehook' >> .hg/hgrc $ echo 'priority.pre-identify.b = 1' >> .hg/hgrc $ echo 'pre-identify.c = python:hooktests.verbosehook' >> .hg/hgrc $ hg id --verbose calling hook pre-identify.b: hooktests.verbosehook verbose output from hook calling hook pre-identify.a: hooktests.verbosehook verbose output from hook calling hook pre-identify.c: hooktests.verbosehook verbose output from hook cb9a9f314b8b new tags must be visible in pretxncommit (issue3210) $ echo 'pretxncommit.printtags = python:hooktests.printtags' >> .hg/hgrc $ hg tag -f foo ['a', 'foo', 'tip'] post-init hooks must not crash (issue4983) This also creates the `to` repo for the next test block. $ cd .. $ cat << EOF >> hgrc-with-post-init-hook > [hooks] > post-init = printenv.py post-init > EOF $ HGRCPATH=hgrc-with-post-init-hook hg init to post-init hook: HG_ARGS=init to HG_OPTS={'insecure': None, 'remotecmd': '', 'ssh': ''} HG_PATS=['to'] HG_RESULT=0 new commits must be visible in pretxnchangegroup (issue3428) $ echo '[hooks]' >> to/.hg/hgrc $ echo 'prechangegroup = hg --traceback tip' >> to/.hg/hgrc $ echo 'pretxnchangegroup = hg --traceback tip' >> to/.hg/hgrc $ echo a >> to/a $ hg --cwd to ci -Ama adding a $ hg clone to from updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo aa >> from/a $ hg --cwd from ci -mb $ hg --cwd from push pushing to $TESTTMP/to (glob) searching for changes changeset: 0:cb9a9f314b8b tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files changeset: 1:9836a07b9b9d tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b $ cd .. pretxnclose hook failure should abort the transaction $ hg init txnfailure $ cd txnfailure $ touch a && hg commit -Aqm a $ cat >> .hg/hgrc < [hooks] > pretxnclose.error = exit 1 > EOF $ hg strip -r 0 --config extensions.strip= 0 files updated, 0 files merged, 1 files removed, 0 files unresolved saved backup bundle to * (glob) transaction abort! rollback completed strip failed, full bundle stored in * (glob) abort: pretxnclose.error hook exited with status 1 [255] $ hg recover no interrupted transaction available [1] mercurial-3.7.3/tests/test-completion.t0000644000175000017500000002025412676531525017607 0ustar mpmmpm00000000000000Show all commands except debug commands $ hg debugcomplete add addremove annotate archive backout bisect bookmarks branch branches bundle cat clone commit config copy diff export files forget graft grep heads help identify import incoming init locate log manifest merge outgoing parents paths phase pull push recover remove rename resolve revert rollback root serve status summary tag tags tip unbundle update verify version Show all commands that start with "a" $ hg debugcomplete a add addremove annotate archive Do not show debug commands if there are other candidates $ hg debugcomplete d diff Show debug commands if there are no other candidates $ hg debugcomplete debug debugancestor debugapplystreamclonebundle debugbuilddag debugbundle debugcheckstate debugcommands debugcomplete debugconfig debugcreatestreamclonebundle debugdag debugdata debugdate debugdeltachain debugdirstate debugdiscovery debugextensions debugfileset debugfsinfo debuggetbundle debugignore debugindex debugindexdot debuginstall debugknown debuglabelcomplete debuglocks debugmergestate debugnamecomplete debugobsolete debugpathcomplete debugpushkey debugpvec debugrebuilddirstate debugrebuildfncache debugrename debugrevlog debugrevspec debugsetparents debugsub debugsuccessorssets debugwalk debugwireargs Do not show the alias of a debug command if there are other candidates (this should hide rawcommit) $ hg debugcomplete r recover remove rename resolve revert rollback root Show the alias of a debug command if there are no other candidates $ hg debugcomplete rawc Show the global options $ hg debugcomplete --options | sort --config --cwd --debug --debugger --encoding --encodingmode --help --hidden --noninteractive --profile --quiet --repository --time --traceback --verbose --version -R -h -q -v -y Show the options for the "serve" command $ hg debugcomplete --options serve | sort --accesslog --address --certificate --cmdserver --config --cwd --daemon --daemon-pipefds --debug --debugger --encoding --encodingmode --errorlog --help --hidden --ipv6 --name --noninteractive --pid-file --port --prefix --profile --quiet --repository --stdio --style --templates --time --traceback --verbose --version --web-conf -6 -A -E -R -a -d -h -n -p -q -t -v -y Show an error if we use --options with an ambiguous abbreviation $ hg debugcomplete --options s hg: command 's' is ambiguous: serve showconfig status summary [255] Show all commands + options $ hg debugcommands add: include, exclude, subrepos, dry-run annotate: rev, follow, no-follow, text, user, file, date, number, changeset, line-number, ignore-all-space, ignore-space-change, ignore-blank-lines, include, exclude, template clone: noupdate, updaterev, rev, branch, pull, uncompressed, ssh, remotecmd, insecure commit: addremove, close-branch, amend, secret, edit, interactive, include, exclude, message, logfile, date, user, subrepos diff: rev, change, text, git, nodates, noprefix, show-function, reverse, ignore-all-space, ignore-space-change, ignore-blank-lines, unified, stat, root, include, exclude, subrepos export: output, switch-parent, rev, text, git, nodates forget: include, exclude init: ssh, remotecmd, insecure log: follow, follow-first, date, copies, keyword, rev, removed, only-merges, user, only-branch, branch, prune, patch, git, limit, no-merges, stat, graph, style, template, include, exclude merge: force, rev, preview, tool pull: update, force, rev, bookmark, branch, ssh, remotecmd, insecure push: force, rev, bookmark, branch, new-branch, ssh, remotecmd, insecure remove: after, force, subrepos, include, exclude serve: accesslog, daemon, daemon-pipefds, errorlog, port, address, prefix, name, web-conf, webdir-conf, pid-file, stdio, cmdserver, templates, style, ipv6, certificate status: all, modified, added, removed, deleted, clean, unknown, ignored, no-status, copies, print0, rev, change, include, exclude, subrepos, template summary: remote update: clean, check, date, rev, tool addremove: similarity, subrepos, include, exclude, dry-run archive: no-decode, prefix, rev, type, subrepos, include, exclude backout: merge, commit, no-commit, parent, rev, edit, tool, include, exclude, message, logfile, date, user bisect: reset, good, bad, skip, extend, command, noupdate bookmarks: force, rev, delete, rename, inactive, template branch: force, clean branches: active, closed, template bundle: force, rev, branch, base, all, type, ssh, remotecmd, insecure cat: output, rev, decode, include, exclude config: untrusted, edit, local, global copy: after, force, include, exclude, dry-run debugancestor: debugapplystreamclonebundle: debugbuilddag: mergeable-file, overwritten-file, new-file debugbundle: all, spec debugcheckstate: debugcommands: debugcomplete: options debugcreatestreamclonebundle: debugdag: tags, branches, dots, spaces debugdata: changelog, manifest, dir debugdate: extended debugdeltachain: changelog, manifest, dir, template debugdirstate: nodates, datesort debugdiscovery: old, nonheads, ssh, remotecmd, insecure debugextensions: template debugfileset: rev debugfsinfo: debuggetbundle: head, common, type debugignore: debugindex: changelog, manifest, dir, format debugindexdot: changelog, manifest, dir debuginstall: debugknown: debuglabelcomplete: debuglocks: force-lock, force-wlock debugmergestate: debugnamecomplete: debugobsolete: flags, record-parents, rev, date, user debugpathcomplete: full, normal, added, removed debugpushkey: debugpvec: debugrebuilddirstate: rev, minimal debugrebuildfncache: debugrename: rev debugrevlog: changelog, manifest, dir, dump debugrevspec: optimize debugsetparents: debugsub: rev debugsuccessorssets: debugwalk: include, exclude debugwireargs: three, four, five, ssh, remotecmd, insecure files: rev, print0, include, exclude, template, subrepos graft: rev, continue, edit, log, force, currentdate, currentuser, date, user, tool, dry-run grep: print0, all, text, follow, ignore-case, files-with-matches, line-number, rev, user, date, include, exclude heads: rev, topo, active, closed, style, template help: extension, command, keyword, system identify: rev, num, id, branch, tags, bookmarks, ssh, remotecmd, insecure import: strip, base, edit, force, no-commit, bypass, partial, exact, prefix, import-branch, message, logfile, date, user, similarity incoming: force, newest-first, bundle, rev, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos locate: rev, print0, fullpath, include, exclude manifest: rev, all, template outgoing: force, rev, newest-first, bookmarks, branch, patch, git, limit, no-merges, stat, graph, style, template, ssh, remotecmd, insecure, subrepos parents: rev, style, template paths: template phase: public, draft, secret, force, rev recover: rename: after, force, include, exclude, dry-run resolve: all, list, mark, unmark, no-status, tool, include, exclude, template revert: all, date, rev, no-backup, interactive, include, exclude, dry-run rollback: dry-run, force root: tag: force, local, rev, remove, edit, message, date, user tags: template tip: patch, git, style, template unbundle: update verify: version: $ hg init a $ cd a $ echo fee > fee $ hg ci -q -Amfee $ hg tag fee $ mkdir fie $ echo dead > fie/dead $ echo live > fie/live $ hg bookmark fo $ hg branch -q fie $ hg ci -q -Amfie $ echo fo > fo $ hg branch -qf default $ hg ci -q -Amfo $ echo Fum > Fum $ hg ci -q -AmFum $ hg bookmark Fum Test debugpathcomplete $ hg debugpathcomplete f fee fie fo $ hg debugpathcomplete -f f fee fie/dead fie/live fo $ hg rm Fum $ hg debugpathcomplete -r F Fum Test debugnamecomplete $ hg debugnamecomplete Fum default fee fie fo tip $ hg debugnamecomplete f fee fie fo mercurial-3.7.3/tests/test-rebase-parameters.t0000644000175000017500000002131112676531525021033 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > > [phases] > publish=False > > [alias] > tglog = log -G --template "{rev}: '{desc}' {branches}\n" > EOF $ hg init a $ cd a $ hg unbundle "$TESTDIR/bundles/rebase.hg" adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up tip 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo I > I $ hg ci -AmI adding I $ hg tglog @ 8: 'I' | o 7: 'H' | | o 6: 'G' |/| o | 5: 'F' | | | o 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ cd .. These fail: $ hg clone -q -u . a a1 $ cd a1 $ hg rebase -s 8 -d 7 nothing to rebase [1] $ hg rebase --continue --abort abort: cannot use both abort and continue [255] $ hg rebase --continue --collapse abort: cannot use collapse with continue or abort [255] $ hg rebase --continue --dest 4 abort: abort and continue do not allow specifying revisions [255] $ hg rebase --base 5 --source 4 abort: cannot specify both a source and a base [255] $ hg rebase --rev 5 --source 4 abort: cannot specify both a revision and a source [255] $ hg rebase --base 5 --rev 4 abort: cannot specify both a revision and a base [255] $ hg rebase --rev '1 & !1' empty "rev" revision set - nothing to rebase [1] $ hg rebase --source '1 & !1' empty "source" revision set - nothing to rebase [1] $ hg rebase --base '1 & !1' empty "base" revision set - can't compute rebase set [1] $ hg rebase nothing to rebase - working directory parent is also destination [1] $ hg rebase -b. nothing to rebase - e7ec4e813ba6 is both "base" and destination [1] $ hg up -q 7 $ hg rebase --traceback nothing to rebase - working directory parent is already an ancestor of destination e7ec4e813ba6 [1] $ hg rebase -b. nothing to rebase - "base" 02de42196ebe is already an ancestor of destination e7ec4e813ba6 [1] $ hg rebase --dest '1 & !1' abort: empty revision set [255] These work: Rebase with no arguments (from 3 onto 8): $ hg up -q -C 3 $ hg rebase rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglog @ 8: 'D' | o 7: 'C' | o 6: 'B' | o 5: 'I' | o 4: 'H' | | o 3: 'G' |/| o | 2: 'F' | | | o 1: 'E' |/ o 0: 'A' Try to rollback after a rebase (fail): $ hg rollback no rollback information available [1] $ cd .. Rebase with base == '.' => same as no arguments (from 3 onto 8): $ hg clone -q -u 3 a a2 $ cd a2 $ hg rebase --base . rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a2/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglog @ 8: 'D' | o 7: 'C' | o 6: 'B' | o 5: 'I' | o 4: 'H' | | o 3: 'G' |/| o | 2: 'F' | | | o 1: 'E' |/ o 0: 'A' $ cd .. Rebase with dest == branch(.) => same as no arguments (from 3 onto 8): $ hg clone -q -u 3 a a3 $ cd a3 $ hg rebase --dest 'branch(.)' rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a3/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglog @ 8: 'D' | o 7: 'C' | o 6: 'B' | o 5: 'I' | o 4: 'H' | | o 3: 'G' |/| o | 2: 'F' | | | o 1: 'E' |/ o 0: 'A' $ cd .. Specify only source (from 2 onto 8): $ hg clone -q -u . a a4 $ cd a4 $ hg rebase --source 'desc("C")' rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a4/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob) $ hg tglog o 8: 'D' | o 7: 'C' | @ 6: 'I' | o 5: 'H' | | o 4: 'G' |/| o | 3: 'F' | | | o 2: 'E' |/ | o 1: 'B' |/ o 0: 'A' $ cd .. Specify only dest (from 3 onto 6): $ hg clone -q -u 3 a a5 $ cd a5 $ hg rebase --dest 6 rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a5/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglog @ 8: 'D' | o 7: 'C' | o 6: 'B' | | o 5: 'I' | | | o 4: 'H' | | o | 3: 'G' |\| | o 2: 'F' | | o | 1: 'E' |/ o 0: 'A' $ cd .. Specify only base (from 1 onto 8): $ hg clone -q -u . a a6 $ cd a6 $ hg rebase --base 'desc("D")' rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a6/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglog o 8: 'D' | o 7: 'C' | o 6: 'B' | @ 5: 'I' | o 4: 'H' | | o 3: 'G' |/| o | 2: 'F' | | | o 1: 'E' |/ o 0: 'A' $ cd .. Specify source and dest (from 2 onto 7): $ hg clone -q -u . a a7 $ cd a7 $ hg rebase --source 2 --dest 7 rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob) $ hg tglog o 8: 'D' | o 7: 'C' | | @ 6: 'I' |/ o 5: 'H' | | o 4: 'G' |/| o | 3: 'F' | | | o 2: 'E' |/ | o 1: 'B' |/ o 0: 'A' $ cd .. Specify base and dest (from 1 onto 7): $ hg clone -q -u . a a8 $ cd a8 $ hg rebase --base 3 --dest 7 rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a8/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglog o 8: 'D' | o 7: 'C' | o 6: 'B' | | @ 5: 'I' |/ o 4: 'H' | | o 3: 'G' |/| o | 2: 'F' | | | o 1: 'E' |/ o 0: 'A' $ cd .. Specify only revs (from 2 onto 8) $ hg clone -q -u . a a9 $ cd a9 $ hg rebase --rev 'desc("C")::' rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a9/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob) $ hg tglog o 8: 'D' | o 7: 'C' | @ 6: 'I' | o 5: 'H' | | o 4: 'G' |/| o | 3: 'F' | | | o 2: 'E' |/ | o 1: 'B' |/ o 0: 'A' $ cd .. Rebasing both a single revision and a merge in one command $ hg clone -q -u . a aX $ cd aX $ hg rebase -r 3 -r 6 rebasing 3:32af7686d403 "D" rebasing 6:eea13746799a "G" saved backup bundle to $TESTTMP/aX/.hg/strip-backup/eea13746799a-ad273fd6-backup.hg (glob) $ cd .. Test --tool parameter: $ hg init b $ cd b $ echo c1 > c1 $ hg ci -Am c1 adding c1 $ echo c2 > c2 $ hg ci -Am c2 adding c2 $ hg up -q 0 $ echo c2b > c2 $ hg ci -Am c2b adding c2 created new head $ cd .. $ hg clone -q -u . b b1 $ cd b1 $ hg rebase -s 2 -d 1 --tool internal:local rebasing 2:e4e3f3546619 "c2b" (tip) note: rebase of 2:e4e3f3546619 created no changes to commit saved backup bundle to $TESTTMP/b1/.hg/strip-backup/e4e3f3546619-b0841178-backup.hg (glob) $ hg cat c2 c2 $ cd .. $ hg clone -q -u . b b2 $ cd b2 $ hg rebase -s 2 -d 1 --tool internal:other rebasing 2:e4e3f3546619 "c2b" (tip) saved backup bundle to $TESTTMP/b2/.hg/strip-backup/e4e3f3546619-b0841178-backup.hg (glob) $ hg cat c2 c2b $ cd .. $ hg clone -q -u . b b3 $ cd b3 $ hg rebase -s 2 -d 1 --tool internal:fail rebasing 2:e4e3f3546619 "c2b" (tip) unresolved conflicts (see hg resolve, then hg rebase --continue) [1] $ hg summary parent: 1:56daeba07f4b c2 parent: 2:e4e3f3546619 tip c2b branch: default commit: 1 modified, 1 unresolved (merge) update: (current) phases: 3 draft rebase: 0 rebased, 1 remaining (rebase --continue) $ hg resolve -l U c2 $ hg resolve -m c2 (no more unresolved files) continue: hg rebase --continue $ hg rebase -c --tool internal:fail rebasing 2:e4e3f3546619 "c2b" (tip) note: rebase of 2:e4e3f3546619 created no changes to commit saved backup bundle to $TESTTMP/b3/.hg/strip-backup/e4e3f3546619-b0841178-backup.hg (glob) $ hg rebase -i abort: interactive history editing is supported by the 'histedit' extension (see "hg --config extensions.histedit= help -e histedit") [255] $ hg rebase --interactive abort: interactive history editing is supported by the 'histedit' extension (see "hg --config extensions.histedit= help -e histedit") [255] $ cd .. No common ancestor $ hg init separaterepo $ cd separaterepo $ touch a $ hg commit -Aqm a $ hg up -q null $ touch b $ hg commit -Aqm b $ hg rebase -d 0 nothing to rebase from d7486e00c6f1 to 3903775176ed [1] $ cd .. mercurial-3.7.3/tests/test-gpg.t0000644000175000017500000000200112676531525016201 0ustar mpmmpm00000000000000#require gpg Test the GPG extension $ cat <> $HGRCPATH > [extensions] > gpg= > > [gpg] > cmd=gpg --no-permission-warning --no-secmem-warning --no-auto-check-trustdb --homedir "$TESTDIR/gpg" > EOF $ hg init r $ cd r $ echo foo > foo $ hg ci -Amfoo adding foo $ hg sigs $ HGEDITOR=cat hg sign -e 0 signing 0:e63c23eaa88a Added signature for changeset e63c23eaa88a HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: added .hgsigs $ hg sigs hgtest 0:e63c23eaa88ae77967edcf4ea194d31167c478b0 $ hg sigcheck 0 e63c23eaa88a is signed by: hgtest verify that this test has not modified the trustdb.gpg file back in the main hg working dir $ md5sum.py "$TESTDIR/gpg/trustdb.gpg" f6b9c78c65fa9536e7512bb2ceb338ae */gpg/trustdb.gpg (glob) don't leak any state to next test run $ rm -f "$TESTDIR/gpg/random_seed" $ cd .. mercurial-3.7.3/tests/test-devel-warnings.t0000644000175000017500000001131412676531525020360 0ustar mpmmpm00000000000000 $ cat << EOF > buggylocking.py > """A small extension that tests our developer warnings > """ > > from mercurial import cmdutil, repair, revset > > cmdtable = {} > command = cmdutil.command(cmdtable) > > @command('buggylocking', [], '') > def buggylocking(ui, repo): > tr = repo.transaction('buggy') > lo = repo.lock() > wl = repo.wlock() > wl.release() > lo.release() > > @command('properlocking', [], '') > def properlocking(ui, repo): > """check that reentrance is fine""" > wl = repo.wlock() > lo = repo.lock() > tr = repo.transaction('proper') > tr2 = repo.transaction('proper') > lo2 = repo.lock() > wl2 = repo.wlock() > wl2.release() > lo2.release() > tr2.close() > tr.close() > lo.release() > wl.release() > > @command('nowaitlocking', [], '') > def nowaitlocking(ui, repo): > lo = repo.lock() > wl = repo.wlock(wait=False) > wl.release() > lo.release() > > @command('stripintr', [], '') > def stripintr(ui, repo): > lo = repo.lock() > tr = repo.transaction('foobar') > try: > repair.strip(repo.ui, repo, [repo['.'].node()]) > finally: > lo.release() > @command('oldanddeprecated', [], '') > def oldanddeprecated(ui, repo): > """test deprecation warning API""" > def foobar(ui): > ui.deprecwarn('foorbar is deprecated, go shopping', '42.1337') > foobar(ui) > > def oldstylerevset(repo, subset, x): > return list(subset) > > revset.symbols['oldstyle'] = oldstylerevset > EOF $ cat << EOF >> $HGRCPATH > [extensions] > buggylocking=$TESTTMP/buggylocking.py > [devel] > all-warnings=1 > EOF $ hg init lock-checker $ cd lock-checker $ hg buggylocking devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:11 (buggylocking) devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:13 (buggylocking) $ cat << EOF >> $HGRCPATH > [devel] > all=0 > check-locks=1 > EOF $ hg buggylocking devel-warn: transaction with no lock at: $TESTTMP/buggylocking.py:11 (buggylocking) devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:13 (buggylocking) $ hg buggylocking --traceback devel-warn: transaction with no lock at: */hg:* in * (glob) */mercurial/dispatch.py:* in run (glob) */mercurial/dispatch.py:* in dispatch (glob) */mercurial/dispatch.py:* in _runcatch (glob) */mercurial/dispatch.py:* in _dispatch (glob) */mercurial/dispatch.py:* in runcommand (glob) */mercurial/dispatch.py:* in _runcommand (glob) */mercurial/dispatch.py:* in checkargs (glob) */mercurial/dispatch.py:* in (glob) */mercurial/util.py:* in check (glob) $TESTTMP/buggylocking.py:* in buggylocking (glob) devel-warn: "wlock" acquired after "lock" at: */hg:* in * (glob) */mercurial/dispatch.py:* in run (glob) */mercurial/dispatch.py:* in dispatch (glob) */mercurial/dispatch.py:* in _runcatch (glob) */mercurial/dispatch.py:* in _dispatch (glob) */mercurial/dispatch.py:* in runcommand (glob) */mercurial/dispatch.py:* in _runcommand (glob) */mercurial/dispatch.py:* in checkargs (glob) */mercurial/dispatch.py:* in (glob) */mercurial/util.py:* in check (glob) $TESTTMP/buggylocking.py:* in buggylocking (glob) $ hg properlocking $ hg nowaitlocking $ echo a > a $ hg add a $ hg commit -m a $ hg stripintr saved backup bundle to $TESTTMP/lock-checker/.hg/strip-backup/cb9a9f314b8b-cc5ccb0b-backup.hg (glob) abort: programming error: cannot strip from inside a transaction (contact your extension maintainer) [255] $ hg log -r "oldstyle()" -T '{rev}\n' devel-warn: revset "oldstyle" use list instead of smartset, (upgrade your code) at: */mercurial/revset.py:* (mfunc) (glob) 0 $ hg oldanddeprecated devel-warn: foorbar is deprecated, go shopping (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:53 (oldanddeprecated) $ hg oldanddeprecated --traceback devel-warn: foorbar is deprecated, go shopping (compatibility will be dropped after Mercurial-42.1337, update your code.) at: */hg:* in (glob) */mercurial/dispatch.py:* in run (glob) */mercurial/dispatch.py:* in dispatch (glob) */mercurial/dispatch.py:* in _runcatch (glob) */mercurial/dispatch.py:* in _dispatch (glob) */mercurial/dispatch.py:* in runcommand (glob) */mercurial/dispatch.py:* in _runcommand (glob) */mercurial/dispatch.py:* in checkargs (glob) */mercurial/dispatch.py:* in (glob) */mercurial/util.py:* in check (glob) $TESTTMP/buggylocking.py:* in oldanddeprecated (glob) $ cd .. mercurial-3.7.3/tests/test-diffdir.t0000644000175000017500000000271712676531525017051 0ustar mpmmpm00000000000000 $ hg init $ touch a $ hg add a $ hg ci -m "a" $ echo 123 > b $ hg add b $ hg diff --nodates diff -r 3903775176ed b --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +123 $ hg diff --nodates -r tip diff -r 3903775176ed b --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +123 $ echo foo > a $ hg diff --nodates diff -r 3903775176ed a --- a/a +++ b/a @@ -0,0 +1,1 @@ +foo diff -r 3903775176ed b --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +123 $ hg diff -r "" hg: parse error: empty query [255] $ hg diff -r tip -r "" hg: parse error: empty query [255] Remove a file that was added via merge. Since the file is not in parent 1, it should not be in the diff. $ hg ci -m 'a=foo' a $ hg co -Cq null $ echo 123 > b $ hg add b $ hg ci -m "b" created new head $ hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg rm -f a $ hg diff --nodates Rename a file that was added via merge. Since the rename source is not in parent 1, the diff should be relative to /dev/null $ hg co -Cq 2 $ hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg mv a a2 $ hg diff --nodates diff -r cf44b38435e5 a2 --- /dev/null +++ b/a2 @@ -0,0 +1,1 @@ +foo $ hg diff --nodates --git diff --git a/a2 b/a2 new file mode 100644 --- /dev/null +++ b/a2 @@ -0,0 +1,1 @@ +foo mercurial-3.7.3/tests/test-eol-patch.t0000644000175000017500000001576312676531525017323 0ustar mpmmpm00000000000000Test EOL patching $ cat >> $HGRCPATH < [diff] > git = 1 > EOF Set up helpers $ seteol () { > if [ $1 = "LF" ]; then > EOL='\n' > else > EOL='\r\n' > fi > } $ makerepo () { > seteol $1 > echo > echo "# ==== setup $1 repository ====" > echo '% hg init' > hg init repo > cd repo > cat > .hgeol < [repository] > native = $1 > [patterns] > unix.txt = LF > win.txt = CRLF > **.txt = native > EOF > printf "first\r\nsecond\r\nthird\r\n" > win.txt > printf "first\nsecond\nthird\n" > unix.txt > printf "first${EOL}second${EOL}third${EOL}" > native.txt > hg commit --addremove -m 'checkin' > cd .. > } $ dotest () { > seteol $1 > echo > echo "% hg clone repo repo-$1" > hg clone --noupdate repo repo-$1 > cd repo-$1 > cat > .hg/hgrc < [extensions] > eol = > [eol] > native = $1 > EOF > hg update > echo '% native.txt' > cat native.txt > echo '% unix.txt' > cat unix.txt > echo '% win.txt' > cat win.txt > printf "first${EOL}third${EOL}" > native.txt > printf "first\r\nthird\r\n" > win.txt > printf "first\nthird\n" > unix.txt > echo '% hg diff' > hg diff > p > cat p > echo '% hg revert' > hg revert --all > echo '% hg import' > hg import -m 'patch' p > echo '% native.txt' > cat native.txt > echo '% unix.txt' > cat unix.txt > echo '% win.txt' > cat win.txt > echo '% hg diff -c tip' > hg diff -c tip > cd .. > rm -r repo-$1 > } Run tests $ makerepo LF # ==== setup LF repository ==== % hg init adding .hgeol adding native.txt adding unix.txt adding win.txt $ dotest LF % hg clone repo repo-LF 4 files updated, 0 files merged, 0 files removed, 0 files unresolved % native.txt first second third % unix.txt first second third % win.txt first\r (esc) second\r (esc) third\r (esc) % hg diff diff --git a/native.txt b/native.txt --- a/native.txt +++ b/native.txt @@ -1,3 +1,2 @@ first -second third diff --git a/unix.txt b/unix.txt --- a/unix.txt +++ b/unix.txt @@ -1,3 +1,2 @@ first -second third diff --git a/win.txt b/win.txt --- a/win.txt +++ b/win.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) % hg revert reverting native.txt reverting unix.txt reverting win.txt % hg import applying p % native.txt first third % unix.txt first third % win.txt first\r (esc) third\r (esc) % hg diff -c tip diff --git a/native.txt b/native.txt --- a/native.txt +++ b/native.txt @@ -1,3 +1,2 @@ first -second third diff --git a/unix.txt b/unix.txt --- a/unix.txt +++ b/unix.txt @@ -1,3 +1,2 @@ first -second third diff --git a/win.txt b/win.txt --- a/win.txt +++ b/win.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) $ dotest CRLF % hg clone repo repo-CRLF 4 files updated, 0 files merged, 0 files removed, 0 files unresolved % native.txt first\r (esc) second\r (esc) third\r (esc) % unix.txt first second third % win.txt first\r (esc) second\r (esc) third\r (esc) % hg diff diff --git a/native.txt b/native.txt --- a/native.txt +++ b/native.txt @@ -1,3 +1,2 @@ first -second third diff --git a/unix.txt b/unix.txt --- a/unix.txt +++ b/unix.txt @@ -1,3 +1,2 @@ first -second third diff --git a/win.txt b/win.txt --- a/win.txt +++ b/win.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) % hg revert reverting native.txt reverting unix.txt reverting win.txt % hg import applying p % native.txt first\r (esc) third\r (esc) % unix.txt first third % win.txt first\r (esc) third\r (esc) % hg diff -c tip diff --git a/native.txt b/native.txt --- a/native.txt +++ b/native.txt @@ -1,3 +1,2 @@ first -second third diff --git a/unix.txt b/unix.txt --- a/unix.txt +++ b/unix.txt @@ -1,3 +1,2 @@ first -second third diff --git a/win.txt b/win.txt --- a/win.txt +++ b/win.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) $ rm -r repo $ makerepo CRLF # ==== setup CRLF repository ==== % hg init adding .hgeol adding native.txt adding unix.txt adding win.txt $ dotest LF % hg clone repo repo-LF 4 files updated, 0 files merged, 0 files removed, 0 files unresolved % native.txt first second third % unix.txt first second third % win.txt first\r (esc) second\r (esc) third\r (esc) % hg diff diff --git a/native.txt b/native.txt --- a/native.txt +++ b/native.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) diff --git a/unix.txt b/unix.txt --- a/unix.txt +++ b/unix.txt @@ -1,3 +1,2 @@ first -second third diff --git a/win.txt b/win.txt --- a/win.txt +++ b/win.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) % hg revert reverting native.txt reverting unix.txt reverting win.txt % hg import applying p % native.txt first third % unix.txt first third % win.txt first\r (esc) third\r (esc) % hg diff -c tip diff --git a/native.txt b/native.txt --- a/native.txt +++ b/native.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) diff --git a/unix.txt b/unix.txt --- a/unix.txt +++ b/unix.txt @@ -1,3 +1,2 @@ first -second third diff --git a/win.txt b/win.txt --- a/win.txt +++ b/win.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) $ dotest CRLF % hg clone repo repo-CRLF 4 files updated, 0 files merged, 0 files removed, 0 files unresolved % native.txt first\r (esc) second\r (esc) third\r (esc) % unix.txt first second third % win.txt first\r (esc) second\r (esc) third\r (esc) % hg diff diff --git a/native.txt b/native.txt --- a/native.txt +++ b/native.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) diff --git a/unix.txt b/unix.txt --- a/unix.txt +++ b/unix.txt @@ -1,3 +1,2 @@ first -second third diff --git a/win.txt b/win.txt --- a/win.txt +++ b/win.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) % hg revert reverting native.txt reverting unix.txt reverting win.txt % hg import applying p % native.txt first\r (esc) third\r (esc) % unix.txt first third % win.txt first\r (esc) third\r (esc) % hg diff -c tip diff --git a/native.txt b/native.txt --- a/native.txt +++ b/native.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) diff --git a/unix.txt b/unix.txt --- a/unix.txt +++ b/unix.txt @@ -1,3 +1,2 @@ first -second third diff --git a/win.txt b/win.txt --- a/win.txt +++ b/win.txt @@ -1,3 +1,2 @@ first\r (esc) -second\r (esc) third\r (esc) $ rm -r repo mercurial-3.7.3/tests/test-subrepo-relative-path.t0000644000175000017500000000556112676531525021664 0ustar mpmmpm00000000000000#require killdaemons Preparing the subrepository 'sub' $ hg init sub $ echo sub > sub/sub $ hg add -R sub adding sub/sub (glob) $ hg commit -R sub -m "sub import" Preparing the 'main' repo which depends on the subrepo 'sub' $ hg init main $ echo main > main/main $ echo "sub = ../sub" > main/.hgsub $ hg clone sub main/sub updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg add -R main adding main/.hgsub (glob) adding main/main (glob) $ hg commit -R main -m "main import" Cleaning both repositories, just as a clone -U $ hg up -C -R sub null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg up -C -R main null 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ rm -rf main/sub hide outer repo $ hg init Serving them both using hgweb $ printf '[paths]\n/main = main\nsub = sub\n' > webdir.conf $ hg serve --webdir-conf webdir.conf -a localhost -p $HGPORT \ > -A /dev/null -E /dev/null --pid-file hg.pid -d $ cat hg.pid >> $DAEMON_PIDS Clone main from hgweb $ hg clone "http://localhost:$HGPORT/main" cloned requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 3 changes to 3 files updating to branch default cloning subrepo sub from http://localhost:$HGPORT/sub requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files 3 files updated, 0 files merged, 0 files removed, 0 files unresolved Checking cloned repo ids $ hg id -R cloned fdfeeb3e979e tip $ hg id -R cloned/sub 863c1745b441 tip subrepo debug for 'main' clone $ hg debugsub -R cloned path sub source ../sub revision 863c1745b441bd97a8c4a096e87793073f4fb215 $ killdaemons.py subrepo paths with ssh urls $ hg clone -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/cloned sshclone requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 3 changes to 3 files updating to branch default cloning subrepo sub from ssh://user@dummy/sub requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R sshclone push -e "python \"$TESTDIR/dummyssh\"" ssh://user@dummy/`pwd`/cloned pushing to ssh://user@dummy/$TESTTMP/cloned pushing subrepo sub to ssh://user@dummy/$TESTTMP/sub searching for changes no changes found searching for changes no changes found [1] $ cat dummylog Got arguments 1:user@dummy 2:hg -R cloned serve --stdio Got arguments 1:user@dummy 2:hg -R sub serve --stdio Got arguments 1:user@dummy 2:hg -R $TESTTMP/cloned serve --stdio Got arguments 1:user@dummy 2:hg -R $TESTTMP/sub serve --stdio mercurial-3.7.3/tests/test-debugindexdot.t0000644000175000017500000000066312676531525020265 0ustar mpmmpm00000000000000Just exercise debugindexdot Create a short file history including a merge. $ hg init t $ cd t $ echo a > a $ hg ci -qAm t1 -d '0 0' $ echo a >> a $ hg ci -m t2 -d '1 0' $ hg up -qC 0 $ echo b >> a $ hg ci -m t3 -d '2 0' created new head $ HGMERGE=true hg merge -q $ hg ci -m merge -d '3 0' $ hg debugindexdot .hg/store/data/a.i digraph G { -1 -> 0 0 -> 1 0 -> 2 2 -> 3 1 -> 3 } $ cd .. mercurial-3.7.3/tests/test-treediscovery-legacy.t0000644000175000017500000002163412676531525021572 0ustar mpmmpm00000000000000#require killdaemons Tests discovery against servers without getbundle support: $ cat >> $HGRCPATH < [ui] > logtemplate="{rev} {node|short}: {desc} {branches}\n" > EOF $ cp $HGRCPATH $HGRCPATH-withcap $ CAP="getbundle known changegroupsubset bundle2" $ . "$TESTDIR/notcapable" $ cp $HGRCPATH $HGRCPATH-nocap $ cp $HGRCPATH-withcap $HGRCPATH Prep for test server without branchmap support $ CAP="branchmap" $ . "$TESTDIR/notcapable" $ cp $HGRCPATH $HGRCPATH-nocap-branchmap $ cp $HGRCPATH-withcap $HGRCPATH Setup HTTP server control: $ remote=http://localhost:$HGPORT/ $ export remote $ tstart() { > echo '[web]' > $1/.hg/hgrc > echo 'push_ssl = false' >> $1/.hg/hgrc > echo 'allow_push = *' >> $1/.hg/hgrc > cp $HGRCPATH-nocap $HGRCPATH > hg serve -R $1 -p $HGPORT -d --pid-file=hg.pid -E errors.log > cat hg.pid >> $DAEMON_PIDS > } $ tstop() { > killdaemons.py > cp $HGRCPATH-withcap $HGRCPATH > } Both are empty: $ hg init empty1 $ hg init empty2 $ tstart empty2 $ hg incoming -R empty1 $remote comparing with http://localhost:$HGPORT/ no changes found [1] $ hg outgoing -R empty1 $remote comparing with http://localhost:$HGPORT/ no changes found [1] $ hg pull -R empty1 $remote pulling from http://localhost:$HGPORT/ no changes found $ hg push -R empty1 $remote pushing to http://localhost:$HGPORT/ no changes found [1] $ tstop Base repo: $ hg init main $ cd main $ hg debugbuilddag -mo '+2:tbase @name1 +3:thead1 A $ hg ci -Am A adding A $ cd .. $ hg clone rlocal rremote updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd rlocal $ echo B > B $ hg ci -Am B adding B $ cd .. $ tstart rremote $ cd rlocal $ hg incoming $remote comparing with http://localhost:$HGPORT/ searching for changes no changes found [1] $ hg outgoing $remote comparing with http://localhost:$HGPORT/ searching for changes 1 27547f69f254: B $ hg pull $remote pulling from http://localhost:$HGPORT/ searching for changes no changes found $ hg push $remote pushing to http://localhost:$HGPORT/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files $ hg outgoing $remote comparing with http://localhost:$HGPORT/ searching for changes no changes found [1] $ cd .. $ tstop mercurial-3.7.3/tests/test-docker-packaging.t0000644000175000017500000000157712676531525020636 0ustar mpmmpm00000000000000#require test-repo slow docker Ensure debuild doesn't run the testsuite, as that could get silly. $ DEB_BUILD_OPTIONS=nocheck $ export DEB_BUILD_OPTIONS $ OUTPUTDIR=`pwd` $ export OUTPUTDIR $ cd "$TESTDIR"/.. $ make docker-debian-jessie > $OUTPUTDIR/build.log 2>&1 $ cd $OUTPUTDIR $ ls *.deb mercurial-common_*.deb (glob) mercurial_*.deb (glob) We check debian package contents with portable tools so that when we're on non-debian machines we can still test the packages that are built using docker. main deb should have .so but no .py $ ar x mercurial_*.deb $ tar tf data.tar* | egrep '(localrepo|parsers)' ./usr/lib/python2.7/dist-packages/mercurial/parsers*.so (glob) mercurial-common should have .py but no .so or .pyc $ ar x mercurial-common_*.deb $ tar tf data.tar* | egrep '(localrepo|parsers)' ./usr/lib/python2.7/dist-packages/mercurial/localrepo.py mercurial-3.7.3/tests/test-demandimport.py.out0000644000175000017500000000123712676531525021114 0ustar mpmmpm00000000000000os = os.system = os = util = util.system = util = util.system = hgweb = hgweb_mod = hgweb = fred = re = fred = fred.sub = fred = re = re.stderr = ', mode 'w' at 0x?> re = node = mercurial-3.7.3/tests/test-tag.t0000644000175000017500000004532512676531525016217 0ustar mpmmpm00000000000000 $ hg init test $ cd test $ echo a > a $ hg add a $ hg commit -m "test" $ hg history changeset: 0:acb14030fe0a tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: test $ hg tag ' ' abort: tag names cannot consist entirely of whitespace [255] (this tests also that editor is not invoked, if '--edit' is not specified) $ HGEDITOR=cat hg tag "bleah" $ hg history changeset: 1:d4f0d2909abc tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag bleah for changeset acb14030fe0a changeset: 0:acb14030fe0a tag: bleah user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: test $ echo foo >> .hgtags $ hg tag "bleah2" abort: working copy of .hgtags is changed (please commit .hgtags manually) [255] $ hg revert .hgtags $ hg tag -r 0 x y z y y z abort: tag names must be unique [255] $ hg tag tap nada dot tip abort: the name 'tip' is reserved [255] $ hg tag . abort: the name '.' is reserved [255] $ hg tag null abort: the name 'null' is reserved [255] $ hg tag "bleah" abort: tag 'bleah' already exists (use -f to force) [255] $ hg tag "blecch" "bleah" abort: tag 'bleah' already exists (use -f to force) [255] $ hg tag --remove "blecch" abort: tag 'blecch' does not exist [255] $ hg tag --remove "bleah" "blecch" "blough" abort: tag 'blecch' does not exist [255] $ hg tag -r 0 "bleah0" $ hg tag -l -r 1 "bleah1" $ hg tag gack gawk gorp $ hg tag -f gack $ hg tag --remove gack gorp $ hg tag "bleah " abort: tag 'bleah' already exists (use -f to force) [255] $ hg tag " bleah" abort: tag 'bleah' already exists (use -f to force) [255] $ hg tag " bleah" abort: tag 'bleah' already exists (use -f to force) [255] $ hg tag -r 0 " bleahbleah " $ hg tag -r 0 " bleah bleah " $ cat .hgtags acb14030fe0a21b60322c440ad2d20cf7685a376 bleah acb14030fe0a21b60322c440ad2d20cf7685a376 bleah0 336fccc858a4eb69609a291105009e484a6b6b8d gack 336fccc858a4eb69609a291105009e484a6b6b8d gawk 336fccc858a4eb69609a291105009e484a6b6b8d gorp 336fccc858a4eb69609a291105009e484a6b6b8d gack 799667b6f2d9b957f73fa644a918c2df22bab58f gack 799667b6f2d9b957f73fa644a918c2df22bab58f gack 0000000000000000000000000000000000000000 gack 336fccc858a4eb69609a291105009e484a6b6b8d gorp 0000000000000000000000000000000000000000 gorp acb14030fe0a21b60322c440ad2d20cf7685a376 bleahbleah acb14030fe0a21b60322c440ad2d20cf7685a376 bleah bleah $ cat .hg/localtags d4f0d2909abc9290e2773c08837d70c1794e3f5a bleah1 tagging on a non-head revision $ hg update 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg tag -l localblah $ hg tag "foobar" abort: not at a branch head (use -f to force) [255] $ hg tag -f "foobar" $ cat .hgtags acb14030fe0a21b60322c440ad2d20cf7685a376 foobar $ cat .hg/localtags d4f0d2909abc9290e2773c08837d70c1794e3f5a bleah1 acb14030fe0a21b60322c440ad2d20cf7685a376 localblah $ hg tag -l 'xx > newline' abort: '\n' cannot be used in a name [255] $ hg tag -l 'xx:xx' abort: ':' cannot be used in a name [255] cloning local tags $ cd .. $ hg -R test log -r0:5 changeset: 0:acb14030fe0a tag: bleah tag: bleah bleah tag: bleah0 tag: bleahbleah tag: foobar tag: localblah user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: test changeset: 1:d4f0d2909abc tag: bleah1 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag bleah for changeset acb14030fe0a changeset: 2:336fccc858a4 tag: gawk user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag bleah0 for changeset acb14030fe0a changeset: 3:799667b6f2d9 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag gack, gawk, gorp for changeset 336fccc858a4 changeset: 4:154eeb7c0138 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag gack for changeset 799667b6f2d9 changeset: 5:b4bb47aaff09 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Removed tag gack, gorp $ hg clone -q -rbleah1 test test1 $ hg -R test1 parents --style=compact 1[tip] d4f0d2909abc 1970-01-01 00:00 +0000 test Added tag bleah for changeset acb14030fe0a $ hg clone -q -r5 test#bleah1 test2 $ hg -R test2 parents --style=compact 5[tip] b4bb47aaff09 1970-01-01 00:00 +0000 test Removed tag gack, gorp $ hg clone -q -U test#bleah1 test3 $ hg -R test3 parents --style=compact $ cd test Issue601: hg tag doesn't do the right thing if .hgtags or localtags doesn't end with EOL $ python << EOF > f = file('.hg/localtags'); last = f.readlines()[-1][:-1]; f.close() > f = file('.hg/localtags', 'w'); f.write(last); f.close() > EOF $ cat .hg/localtags; echo acb14030fe0a21b60322c440ad2d20cf7685a376 localblah $ hg tag -l localnewline $ cat .hg/localtags; echo acb14030fe0a21b60322c440ad2d20cf7685a376 localblah c2899151f4e76890c602a2597a650a72666681bf localnewline $ python << EOF > f = file('.hgtags'); last = f.readlines()[-1][:-1]; f.close() > f = file('.hgtags', 'w'); f.write(last); f.close() > EOF $ hg ci -m'broken manual edit of .hgtags' $ cat .hgtags; echo acb14030fe0a21b60322c440ad2d20cf7685a376 foobar $ hg tag newline $ cat .hgtags; echo acb14030fe0a21b60322c440ad2d20cf7685a376 foobar a0eea09de1eeec777b46f2085260a373b2fbc293 newline tag and branch using same name $ hg branch tag-and-branch-same-name marked working directory as branch tag-and-branch-same-name (branches are permanent and global, did you want a bookmark?) $ hg ci -m"discouraged" $ hg tag tag-and-branch-same-name warning: tag tag-and-branch-same-name conflicts with existing branch name test custom commit messages $ cat > editor.sh << '__EOF__' > echo "==== before editing" > cat "$1" > echo "====" > echo "custom tag message" > "$1" > echo "second line" >> "$1" > __EOF__ at first, test saving last-message.txt (test that editor is not invoked before transaction starting) $ cat > .hg/hgrc << '__EOF__' > [hooks] > # this failure occurs before editor invocation > pretag.test-saving-lastmessage = false > __EOF__ $ rm -f .hg/last-message.txt $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg tag custom-tag -e abort: pretag.test-saving-lastmessage hook exited with status 1 [255] $ test -f .hg/last-message.txt [1] (test that editor is invoked and commit message is saved into "last-message.txt") $ cat >> .hg/hgrc << '__EOF__' > [hooks] > pretag.test-saving-lastmessage = > # this failure occurs after editor invocation > pretxncommit.unexpectedabort = false > __EOF__ (this tests also that editor is invoked, if '--edit' is specified, regardless of '--message') $ rm -f .hg/last-message.txt $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg tag custom-tag -e -m "foo bar" ==== before editing foo bar HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'tag-and-branch-same-name' HG: changed .hgtags ==== note: commit message saved in .hg/last-message.txt transaction abort! rollback completed abort: pretxncommit.unexpectedabort hook exited with status 1 [255] $ cat .hg/last-message.txt custom tag message second line $ cat >> .hg/hgrc << '__EOF__' > [hooks] > pretxncommit.unexpectedabort = > __EOF__ $ hg status .hgtags M .hgtags $ hg revert --no-backup -q .hgtags then, test custom commit message itself $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg tag custom-tag -e ==== before editing Added tag custom-tag for changeset 75a534207be6 HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'tag-and-branch-same-name' HG: changed .hgtags ==== $ hg log -l1 --template "{desc}\n" custom tag message second line local tag with .hgtags modified $ hg tag hgtags-modified $ hg rollback repository tip rolled back to revision 13 (undo commit) working directory now based on revision 13 $ hg st M .hgtags ? .hgtags.orig ? editor.sh $ hg tag --local baz $ hg revert --no-backup .hgtags tagging when at named-branch-head that's not a topo-head $ hg up default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge -t internal:local 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m 'merge named branch' $ hg up 13 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg tag new-topo-head tagging on null rev $ hg up null 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg tag nullrev abort: not at a branch head (use -f to force) [255] $ hg init empty $ hg tag -R empty nullrev abort: cannot tag null revision [255] $ hg tag -R empty -r 00000000000 -f nulltag abort: cannot tag null revision [255] $ cd .. tagging on an uncommitted merge (issue2542) $ hg init repo-tag-uncommitted-merge $ cd repo-tag-uncommitted-merge $ echo c1 > f1 $ hg ci -Am0 adding f1 $ echo c2 > f2 $ hg ci -Am1 adding f2 $ hg co -q 0 $ hg branch b1 marked working directory as branch b1 (branches are permanent and global, did you want a bookmark?) $ hg ci -m2 $ hg up default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge b1 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg tag t1 abort: uncommitted merge [255] $ hg status $ hg tag --rev 1 t2 abort: uncommitted merge [255] $ hg tag --rev 1 --local t3 $ hg tags -v tip 2:2a156e8887cc t3 1:c3adabd1a5f4 local $ cd .. commit hook on tag used to be run without write lock - issue3344 $ hg init repo-tag $ touch repo-tag/test $ hg -R repo-tag commit -A -m "test" adding test $ hg init repo-tag-target $ cat > "$TESTTMP/issue3344.sh" < hg push "$TESTTMP/repo-tag-target" > EOF $ hg -R repo-tag --config hooks.commit="sh ../issue3344.sh" tag tag pushing to $TESTTMP/repo-tag-target (glob) searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files automatically merge resolvable tag conflicts (i.e. tags that differ in rank) create two clones with some different tags as well as some common tags check that we can merge tags that differ in rank $ hg init repo-automatic-tag-merge $ cd repo-automatic-tag-merge $ echo c0 > f0 $ hg ci -A -m0 adding f0 $ hg tag tbase $ hg up -qr '.^' $ hg log -r 'wdir()' -T "{latesttagdistance}\n" 1 $ hg up -q $ hg log -r 'wdir()' -T "{latesttagdistance}\n" 2 $ cd .. $ hg clone repo-automatic-tag-merge repo-automatic-tag-merge-clone updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo-automatic-tag-merge-clone $ echo c1 > f1 $ hg ci -A -m1 adding f1 $ hg tag t1 t2 t3 $ hg tag --remove t2 $ hg tag t5 $ echo c2 > f2 $ hg ci -A -m2 adding f2 $ hg tag -f t3 $ cd ../repo-automatic-tag-merge $ echo c3 > f3 $ hg ci -A -m3 adding f3 $ hg tag -f t4 t5 t6 $ hg up -q '.^' $ hg log -r 'wdir()' -T "{changessincelatesttag} changes since {latesttag}\n" 1 changes since t4:t5:t6 $ hg log -r '.' -T "{changessincelatesttag} changes since {latesttag}\n" 0 changes since t4:t5:t6 $ echo c5 > f3 $ hg log -r 'wdir()' -T "{changessincelatesttag} changes since {latesttag}\n" 1 changes since t4:t5:t6 $ hg up -qC $ hg tag --remove t5 $ echo c4 > f4 $ hg log -r '.' -T "{changessincelatesttag} changes since {latesttag}\n" 2 changes since t4:t6 $ hg log -r '.' -T "{latesttag % '{latesttag}\n'}" t4 t6 $ hg log -r '.' -T "{latesttag('t4') % 'T: {tag}, C: {changes}, D: {distance}\n'}" T: t4, C: 2, D: 2 $ hg log -r '.' -T "{latesttag('re:\d') % 'T: {tag}, C: {changes}, D: {distance}\n'}" T: t4, C: 2, D: 2 T: t6, C: 2, D: 2 $ hg log -r . -T '{join(latesttag(), "*")}\n' t4*t6 $ hg ci -A -m4 adding f4 $ hg log -r 'wdir()' -T "{changessincelatesttag} changes since {latesttag}\n" 4 changes since t4:t6 $ hg tag t2 $ hg tag -f t6 $ cd ../repo-automatic-tag-merge-clone $ hg pull pulling from $TESTTMP/repo-automatic-tag-merge (glob) searching for changes adding changesets adding manifests adding file changes added 6 changesets with 6 changes to 3 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg merge --tool internal:tagmerge merging .hgtags 2 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status M .hgtags M f3 M f4 $ hg resolve -l R .hgtags $ cat .hgtags 9aa4e1292a27a248f8d07339bed9931d54907be7 t4 9aa4e1292a27a248f8d07339bed9931d54907be7 t6 9aa4e1292a27a248f8d07339bed9931d54907be7 t6 09af2ce14077a94effef208b49a718f4836d4338 t6 6cee5c8f3e5b4ae1a3996d2f6489c3e08eb5aea7 tbase 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t1 929bca7b18d067cbf3844c3896319a940059d748 t2 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t2 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t3 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t2 0000000000000000000000000000000000000000 t2 875517b4806a848f942811a315a5bce30804ae85 t5 9aa4e1292a27a248f8d07339bed9931d54907be7 t5 9aa4e1292a27a248f8d07339bed9931d54907be7 t5 0000000000000000000000000000000000000000 t5 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t3 79505d5360b07e3e79d1052e347e73c02b8afa5b t3 check that the merge tried to minimize the diff with the first merge parent $ hg diff --git -r 'p1()' .hgtags diff --git a/.hgtags b/.hgtags --- a/.hgtags +++ b/.hgtags @@ -1,9 +1,17 @@ +9aa4e1292a27a248f8d07339bed9931d54907be7 t4 +9aa4e1292a27a248f8d07339bed9931d54907be7 t6 +9aa4e1292a27a248f8d07339bed9931d54907be7 t6 +09af2ce14077a94effef208b49a718f4836d4338 t6 6cee5c8f3e5b4ae1a3996d2f6489c3e08eb5aea7 tbase 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t1 +929bca7b18d067cbf3844c3896319a940059d748 t2 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t2 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t3 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t2 0000000000000000000000000000000000000000 t2 875517b4806a848f942811a315a5bce30804ae85 t5 +9aa4e1292a27a248f8d07339bed9931d54907be7 t5 +9aa4e1292a27a248f8d07339bed9931d54907be7 t5 +0000000000000000000000000000000000000000 t5 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t3 79505d5360b07e3e79d1052e347e73c02b8afa5b t3 detect merge tag conflicts $ hg update -C -r tip 3 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg tag t7 $ hg update -C -r 'first(sort(head()))' 3 files updated, 0 files merged, 2 files removed, 0 files unresolved $ printf "%s %s\n" `hg log -r . --template "{node} t7"` >> .hgtags $ hg commit -m "manually add conflicting t7 tag" $ hg merge --tool internal:tagmerge merging .hgtags automatic .hgtags merge failed the following 1 tags are in conflict: t7 automatic tag merging of .hgtags failed! (use 'hg resolve --tool :merge' or another merge tool of your choice) 2 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg resolve -l U .hgtags $ cat .hgtags 6cee5c8f3e5b4ae1a3996d2f6489c3e08eb5aea7 tbase 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t1 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t2 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t3 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t2 0000000000000000000000000000000000000000 t2 875517b4806a848f942811a315a5bce30804ae85 t5 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t3 79505d5360b07e3e79d1052e347e73c02b8afa5b t3 ea918d56be86a4afc5a95312e8b6750e1428d9d2 t7 $ cd .. handle the loss of tags $ hg clone repo-automatic-tag-merge-clone repo-merge-lost-tags updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo-merge-lost-tags $ echo c5 > f5 $ hg ci -A -m5 adding f5 $ hg tag -f t7 $ hg update -r 'p1(t7)' 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ printf '' > .hgtags $ hg commit -m 'delete all tags' created new head $ hg log -r 'max(t7::)' changeset: 17:ffe462b50880 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Added tag t7 for changeset fd3a9e394ce3 $ hg update -r 'max(t7::)' 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge -r tip --tool internal:tagmerge merging .hgtags 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg resolve -l R .hgtags $ cat .hgtags 6cee5c8f3e5b4ae1a3996d2f6489c3e08eb5aea7 tbase 0000000000000000000000000000000000000000 tbase 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t1 0000000000000000000000000000000000000000 t1 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t2 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t3 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t2 0000000000000000000000000000000000000000 t2 875517b4806a848f942811a315a5bce30804ae85 t5 0000000000000000000000000000000000000000 t5 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t3 79505d5360b07e3e79d1052e347e73c02b8afa5b t3 0000000000000000000000000000000000000000 t3 ea918d56be86a4afc5a95312e8b6750e1428d9d2 t7 0000000000000000000000000000000000000000 t7 ea918d56be86a4afc5a95312e8b6750e1428d9d2 t7 fd3a9e394ce3afb354a496323bf68ac1755a30de t7 also check that we minimize the diff with the 1st merge parent $ hg diff --git -r 'p1()' .hgtags diff --git a/.hgtags b/.hgtags --- a/.hgtags +++ b/.hgtags @@ -1,12 +1,17 @@ 6cee5c8f3e5b4ae1a3996d2f6489c3e08eb5aea7 tbase +0000000000000000000000000000000000000000 tbase 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t1 +0000000000000000000000000000000000000000 t1 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t2 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t3 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t2 0000000000000000000000000000000000000000 t2 875517b4806a848f942811a315a5bce30804ae85 t5 +0000000000000000000000000000000000000000 t5 4f3e9b90005b68b4d8a3f4355cedc302a8364f5c t3 79505d5360b07e3e79d1052e347e73c02b8afa5b t3 +0000000000000000000000000000000000000000 t3 ea918d56be86a4afc5a95312e8b6750e1428d9d2 t7 +0000000000000000000000000000000000000000 t7 ea918d56be86a4afc5a95312e8b6750e1428d9d2 t7 fd3a9e394ce3afb354a496323bf68ac1755a30de t7 mercurial-3.7.3/tests/test-url-rev.t0000644000175000017500000001651612676531525017040 0ustar mpmmpm00000000000000Test basic functionality of url#rev syntax $ hg init repo $ cd repo $ echo a > a $ hg ci -qAm 'add a' $ hg branch foo marked working directory as branch foo (branches are permanent and global, did you want a bookmark?) $ echo >> a $ hg ci -m 'change a' $ cd .. $ hg clone 'repo#foo' clone adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files updating to branch foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --cwd clone heads changeset: 1:cd2a86ecc814 branch: foo tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change a changeset: 0:1f0dee641bb7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add a $ hg --cwd clone parents changeset: 1:cd2a86ecc814 branch: foo tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change a $ cat clone/.hg/hgrc # example repository config (see "hg help config" for more info) [paths] default = $TESTTMP/repo#foo (glob) # path aliases to other clones of this repo in URLs or filesystem paths # (see "hg help config.paths" for more info) # # default-push = ssh://jdoe@example.net/hg/jdoes-fork # my-fork = ssh://jdoe@example.net/hg/jdoes-fork # my-clone = /home/jdoe/jdoes-clone [ui] # name and email (local to this repository, optional), e.g. # username = Jane Doe Changing original repo: $ cd repo $ echo >> a $ hg ci -m 'new head of branch foo' $ hg up -qC default $ echo bar > bar $ hg ci -qAm 'add bar' $ hg log changeset: 3:4cd725637392 tag: tip parent: 0:1f0dee641bb7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add bar changeset: 2:faba9097cad4 branch: foo user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: new head of branch foo changeset: 1:cd2a86ecc814 branch: foo user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change a changeset: 0:1f0dee641bb7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add a $ hg -q outgoing '../clone' 2:faba9097cad4 3:4cd725637392 $ hg summary --remote --config paths.default='../clone' parent: 3:4cd725637392 tip add bar branch: default commit: (clean) update: (current) phases: 4 draft remote: 2 outgoing $ hg -q outgoing '../clone#foo' 2:faba9097cad4 $ hg summary --remote --config paths.default='../clone#foo' parent: 3:4cd725637392 tip add bar branch: default commit: (clean) update: (current) phases: 4 draft remote: 1 outgoing $ hg -q --cwd ../clone incoming '../repo#foo' 2:faba9097cad4 $ hg --cwd ../clone summary --remote --config paths.default='../repo#foo' parent: 1:cd2a86ecc814 tip change a branch: foo commit: (clean) update: (current) remote: 1 or more incoming $ hg -q push '../clone#foo' $ hg --cwd ../clone heads changeset: 2:faba9097cad4 branch: foo tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: new head of branch foo changeset: 0:1f0dee641bb7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add a $ hg -q --cwd ../clone incoming '../repo#foo' [1] $ hg --cwd ../clone summary --remote --config paths.default='../repo#foo' parent: 1:cd2a86ecc814 change a branch: foo commit: (clean) update: 1 new changesets (update) remote: (synced) $ cd .. $ cd clone $ hg rollback repository tip rolled back to revision 1 (undo push) $ hg -q incoming 2:faba9097cad4 $ hg -q pull $ hg heads changeset: 2:faba9097cad4 branch: foo tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: new head of branch foo changeset: 0:1f0dee641bb7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add a Pull should not have updated: $ hg parents -q 1:cd2a86ecc814 Going back to the default branch: $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg parents changeset: 0:1f0dee641bb7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add a No new revs, no update: $ hg pull -qu $ hg parents -q 0:1f0dee641bb7 $ hg rollback repository tip rolled back to revision 1 (undo pull) $ hg parents -q 0:1f0dee641bb7 Pull -u takes us back to branch foo: $ hg pull -qu $ hg parents changeset: 2:faba9097cad4 branch: foo tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: new head of branch foo $ hg rollback repository tip rolled back to revision 1 (undo pull) working directory now based on revision 0 $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg parents -q 0:1f0dee641bb7 $ hg heads -q 1:cd2a86ecc814 0:1f0dee641bb7 $ hg pull -qur default default $ hg parents changeset: 3:4cd725637392 tag: tip parent: 0:1f0dee641bb7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add bar $ hg heads changeset: 3:4cd725637392 tag: tip parent: 0:1f0dee641bb7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add bar changeset: 2:faba9097cad4 branch: foo user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: new head of branch foo Test handling of invalid urls $ hg id http://foo/?bar abort: unsupported URL component: "bar" [255] $ cd .. Test handling common incoming revisions between "default" and "default-push" $ hg -R clone rollback repository tip rolled back to revision 1 (undo pull) working directory now based on revision 0 $ cd repo $ hg update -q -C default $ echo modified >> bar $ hg commit -m "new head to push current default head" $ hg -q push -r ".^1" '../clone' $ hg -q outgoing '../clone' 2:faba9097cad4 4:d515801a8f3d $ hg summary --remote --config paths.default='../clone#default' --config paths.default-push='../clone#foo' parent: 4:d515801a8f3d tip new head to push current default head branch: default commit: (clean) update: (current) phases: 1 draft remote: 1 outgoing $ hg summary --remote --config paths.default='../clone#foo' --config paths.default-push='../clone' parent: 4:d515801a8f3d tip new head to push current default head branch: default commit: (clean) update: (current) phases: 1 draft remote: 2 outgoing $ hg summary --remote --config paths.default='../clone' --config paths.default-push='../clone#foo' parent: 4:d515801a8f3d tip new head to push current default head branch: default commit: (clean) update: (current) phases: 1 draft remote: 1 outgoing $ hg clone -q -r 0 . ../another $ hg -q outgoing '../another#default' 3:4cd725637392 4:d515801a8f3d $ hg summary --remote --config paths.default='../another#default' --config paths.default-push='../clone#default' parent: 4:d515801a8f3d tip new head to push current default head branch: default commit: (clean) update: (current) phases: 1 draft remote: 1 outgoing $ cd .. mercurial-3.7.3/tests/test-up-local-change.t0000644000175000017500000001320612676531525020374 0ustar mpmmpm00000000000000 $ HGMERGE=true; export HGMERGE $ hg init r1 $ cd r1 $ echo a > a $ hg addremove adding a $ hg commit -m "1" $ hg clone . ../r2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../r2 $ hg up 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo abc > a $ hg diff --nodates diff -r c19d34741b0a a --- a/a +++ b/a @@ -1,1 +1,1 @@ -a +abc $ cd ../r1 $ echo b > b $ echo a2 > a $ hg addremove adding b $ hg commit -m "2" $ cd ../r2 $ hg -q pull ../r1 $ hg status M a $ hg parents changeset: 0:c19d34741b0a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 $ hg --debug up searching for copies back to rev 1 unmatched files in other: b resolving manifests branchmerge: False, force: False, partial: False ancestor: c19d34741b0a, local: c19d34741b0a+, remote: 1e71731e6fbb preserving a for resolve of a b: remote created -> g getting b a: versions differ -> m (premerge) picked tool 'true' for a (binary False symlink False changedelete False) merging a my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a a: versions differ -> m (merge) picked tool 'true' for a (binary False symlink False changedelete False) my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a launching merge tool: true *$TESTTMP/r2/a* * * (glob) merge tool returned: 0 1 files updated, 1 files merged, 0 files removed, 0 files unresolved $ hg parents changeset: 1:1e71731e6fbb tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 $ hg --debug up 0 resolving manifests branchmerge: False, force: False, partial: False ancestor: 1e71731e6fbb, local: 1e71731e6fbb+, remote: c19d34741b0a preserving a for resolve of a b: other deleted -> r removing b a: versions differ -> m (premerge) picked tool 'true' for a (binary False symlink False changedelete False) merging a my a@1e71731e6fbb+ other a@c19d34741b0a ancestor a@1e71731e6fbb a: versions differ -> m (merge) picked tool 'true' for a (binary False symlink False changedelete False) my a@1e71731e6fbb+ other a@c19d34741b0a ancestor a@1e71731e6fbb launching merge tool: true *$TESTTMP/r2/a* * * (glob) merge tool returned: 0 0 files updated, 1 files merged, 1 files removed, 0 files unresolved $ hg parents changeset: 0:c19d34741b0a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 $ hg parents changeset: 0:c19d34741b0a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 $ hg --debug up searching for copies back to rev 1 unmatched files in other: b resolving manifests branchmerge: False, force: False, partial: False ancestor: c19d34741b0a, local: c19d34741b0a+, remote: 1e71731e6fbb preserving a for resolve of a b: remote created -> g getting b a: versions differ -> m (premerge) picked tool 'true' for a (binary False symlink False changedelete False) merging a my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a a: versions differ -> m (merge) picked tool 'true' for a (binary False symlink False changedelete False) my a@c19d34741b0a+ other a@1e71731e6fbb ancestor a@c19d34741b0a launching merge tool: true *$TESTTMP/r2/a* * * (glob) merge tool returned: 0 1 files updated, 1 files merged, 0 files removed, 0 files unresolved $ hg parents changeset: 1:1e71731e6fbb tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 $ hg -v history changeset: 1:1e71731e6fbb tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: a b description: 2 changeset: 0:c19d34741b0a user: test date: Thu Jan 01 00:00:00 1970 +0000 files: a description: 1 $ hg diff --nodates diff -r 1e71731e6fbb a --- a/a +++ b/a @@ -1,1 +1,1 @@ -a2 +abc create a second head $ cd ../r1 $ hg up 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo b2 > b $ echo a3 > a $ hg addremove adding b $ hg commit -m "3" created new head $ cd ../r2 $ hg -q pull ../r1 $ hg status M a $ hg parents changeset: 1:1e71731e6fbb user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 2 $ hg --debug up abort: uncommitted changes (commit and merge, or update --clean to discard changes) [255] test conflicting untracked files $ hg up -qC 0 $ echo untracked > b $ hg st ? b $ hg up 1 b: untracked file differs abort: untracked files in working directory differ from files in requested revision [255] $ rm b test conflicting untracked ignored file $ hg up -qC 0 $ echo ignored > .hgignore $ hg add .hgignore $ hg ci -m 'add .hgignore' created new head $ echo ignored > ignored $ hg add ignored $ hg ci -m 'add ignored file' $ hg up -q 'desc("add .hgignore")' $ echo untracked > ignored $ hg st $ hg up 'desc("add ignored file")' ignored: untracked file differs abort: untracked files in working directory differ from files in requested revision [255] test a local add $ cd .. $ hg init a $ hg init b $ echo a > a/a $ echo a > b/a $ hg --cwd a commit -A -m a adding a $ cd b $ hg add a $ hg pull -u ../a pulling from ../a requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg st $ cd .. mercurial-3.7.3/tests/hghave.py0000644000175000017500000003233512676531525016113 0ustar mpmmpm00000000000000from __future__ import absolute_import import errno import os import re import socket import stat import subprocess import sys import tempfile tempprefix = 'hg-hghave-' checks = { "true": (lambda: True, "yak shaving"), "false": (lambda: False, "nail clipper"), } def check(name, desc): def decorator(func): checks[name] = (func, desc) return func return decorator def checkfeatures(features): result = { 'error': [], 'missing': [], 'skipped': [], } for feature in features: negate = feature.startswith('no-') if negate: feature = feature[3:] if feature not in checks: result['missing'].append(feature) continue check, desc = checks[feature] try: available = check() except Exception: result['error'].append('hghave check failed: %s' % feature) continue if not negate and not available: result['skipped'].append('missing feature: %s' % desc) elif negate and available: result['skipped'].append('system supports %s' % desc) return result def require(features): """Require that features are available, exiting if not.""" result = checkfeatures(features) for missing in result['missing']: sys.stderr.write('skipped: unknown feature: %s\n' % missing) for msg in result['skipped']: sys.stderr.write('skipped: %s\n' % msg) for msg in result['error']: sys.stderr.write('%s\n' % msg) if result['missing']: sys.exit(2) if result['skipped'] or result['error']: sys.exit(1) def matchoutput(cmd, regexp, ignorestatus=False): """Return the match object if cmd executes successfully and its output is matched by the supplied regular expression. """ r = re.compile(regexp) try: p = subprocess.Popen( cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except OSError as e: if e.errno != errno.ENOENT: raise ret = -1 ret = p.wait() s = p.stdout.read() return (ignorestatus or not ret) and r.search(s) @check("baz", "GNU Arch baz client") def has_baz(): return matchoutput('baz --version 2>&1', r'baz Bazaar version') @check("bzr", "Canonical's Bazaar client") def has_bzr(): try: import bzrlib return bzrlib.__doc__ is not None except ImportError: return False @check("bzr114", "Canonical's Bazaar client >= 1.14") def has_bzr114(): try: import bzrlib return (bzrlib.__doc__ is not None and bzrlib.version_info[:2] >= (1, 14)) except ImportError: return False @check("cvs", "cvs client/server") def has_cvs(): re = r'Concurrent Versions System.*?server' return matchoutput('cvs --version 2>&1', re) and not has_msys() @check("cvs112", "cvs client/server >= 1.12") def has_cvs112(): re = r'Concurrent Versions System \(CVS\) 1.12.*?server' return matchoutput('cvs --version 2>&1', re) and not has_msys() @check("darcs", "darcs client") def has_darcs(): return matchoutput('darcs --version', r'2\.[2-9]', True) @check("mtn", "monotone client (>= 1.0)") def has_mtn(): return matchoutput('mtn --version', r'monotone', True) and not matchoutput( 'mtn --version', r'monotone 0\.', True) @check("eol-in-paths", "end-of-lines in paths") def has_eol_in_paths(): try: fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix, suffix='\n\r') os.close(fd) os.remove(path) return True except (IOError, OSError): return False @check("execbit", "executable bit") def has_executablebit(): try: EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix) try: os.close(fh) m = os.stat(fn).st_mode & 0o777 new_file_has_exec = m & EXECFLAGS os.chmod(fn, m ^ EXECFLAGS) exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0o777) == m) finally: os.unlink(fn) except (IOError, OSError): # we don't care, the user probably won't be able to commit anyway return False return not (new_file_has_exec or exec_flags_cannot_flip) @check("icasefs", "case insensitive file system") def has_icasefs(): # Stolen from mercurial.util fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix) os.close(fd) try: s1 = os.stat(path) d, b = os.path.split(path) p2 = os.path.join(d, b.upper()) if path == p2: p2 = os.path.join(d, b.lower()) try: s2 = os.stat(p2) return s2 == s1 except OSError: return False finally: os.remove(path) @check("fifo", "named pipes") def has_fifo(): if getattr(os, "mkfifo", None) is None: return False name = tempfile.mktemp(dir='.', prefix=tempprefix) try: os.mkfifo(name) os.unlink(name) return True except OSError: return False @check("killdaemons", 'killdaemons.py support') def has_killdaemons(): return True @check("cacheable", "cacheable filesystem") def has_cacheable_fs(): from mercurial import util fd, path = tempfile.mkstemp(dir='.', prefix=tempprefix) os.close(fd) try: return util.cachestat(path).cacheable() finally: os.remove(path) @check("lsprof", "python lsprof module") def has_lsprof(): try: import _lsprof _lsprof.Profiler # silence unused import warning return True except ImportError: return False @check("gettext", "GNU Gettext (msgfmt)") def has_gettext(): return matchoutput('msgfmt --version', 'GNU gettext-tools') @check("git", "git command line client") def has_git(): return matchoutput('git --version 2>&1', r'^git version') @check("docutils", "Docutils text processing library") def has_docutils(): try: from docutils.core import publish_cmdline publish_cmdline # silence unused import return True except ImportError: return False def getsvnversion(): m = matchoutput('svn --version --quiet 2>&1', r'^(\d+)\.(\d+)') if not m: return (0, 0) return (int(m.group(1)), int(m.group(2))) @check("svn15", "subversion client and admin tools >= 1.5") def has_svn15(): return getsvnversion() >= (1, 5) @check("svn13", "subversion client and admin tools >= 1.3") def has_svn13(): return getsvnversion() >= (1, 3) @check("svn", "subversion client and admin tools") def has_svn(): return matchoutput('svn --version 2>&1', r'^svn, version') and \ matchoutput('svnadmin --version 2>&1', r'^svnadmin, version') @check("svn-bindings", "subversion python bindings") def has_svn_bindings(): try: import svn.core version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR if version < (1, 4): return False return True except ImportError: return False @check("p4", "Perforce server and client") def has_p4(): return (matchoutput('p4 -V', r'Rev\. P4/') and matchoutput('p4d -V', r'Rev\. P4D/')) @check("symlink", "symbolic links") def has_symlink(): if getattr(os, "symlink", None) is None: return False name = tempfile.mktemp(dir='.', prefix=tempprefix) try: os.symlink(".", name) os.unlink(name) return True except (OSError, AttributeError): return False @check("hardlink", "hardlinks") def has_hardlink(): from mercurial import util fh, fn = tempfile.mkstemp(dir='.', prefix=tempprefix) os.close(fh) name = tempfile.mktemp(dir='.', prefix=tempprefix) try: util.oslink(fn, name) os.unlink(name) return True except OSError: return False finally: os.unlink(fn) @check("tla", "GNU Arch tla client") def has_tla(): return matchoutput('tla --version 2>&1', r'The GNU Arch Revision') @check("gpg", "gpg client") def has_gpg(): return matchoutput('gpg --version 2>&1', r'GnuPG') @check("unix-permissions", "unix-style permissions") def has_unix_permissions(): d = tempfile.mkdtemp(dir='.', prefix=tempprefix) try: fname = os.path.join(d, 'foo') for umask in (0o77, 0o07, 0o22): os.umask(umask) f = open(fname, 'w') f.close() mode = os.stat(fname).st_mode os.unlink(fname) if mode & 0o777 != ~umask & 0o666: return False return True finally: os.rmdir(d) @check("unix-socket", "AF_UNIX socket family") def has_unix_socket(): return getattr(socket, 'AF_UNIX', None) is not None @check("root", "root permissions") def has_root(): return getattr(os, 'geteuid', None) and os.geteuid() == 0 @check("pyflakes", "Pyflakes python linter") def has_pyflakes(): return matchoutput("sh -c \"echo 'import re' 2>&1 | pyflakes\"", r":1: 're' imported but unused", True) @check("pygments", "Pygments source highlighting library") def has_pygments(): try: import pygments pygments.highlight # silence unused import warning return True except ImportError: return False @check("json", "some json module available") def has_json(): try: import json json.dumps return True except ImportError: try: import simplejson as json json.dumps return True except ImportError: pass return False @check("outer-repo", "outer repo") def has_outer_repo(): # failing for other reasons than 'no repo' imply that there is a repo return not matchoutput('hg root 2>&1', r'abort: no repository found', True) @check("ssl", ("(python >= 2.6 ssl module and python OpenSSL) " "OR python >= 2.7.9 ssl")) def has_ssl(): try: import ssl if getattr(ssl, 'create_default_context', False): return True import OpenSSL OpenSSL.SSL.Context return True except ImportError: return False @check("sslcontext", "python >= 2.7.9 ssl") def has_sslcontext(): try: import ssl ssl.SSLContext return True except (ImportError, AttributeError): return False @check("defaultcacerts", "can verify SSL certs by system's CA certs store") def has_defaultcacerts(): from mercurial import sslutil return sslutil._defaultcacerts() != '!' @check("windows", "Windows") def has_windows(): return os.name == 'nt' @check("system-sh", "system() uses sh") def has_system_sh(): return os.name != 'nt' @check("serve", "platform and python can manage 'hg serve -d'") def has_serve(): return os.name != 'nt' # gross approximation @check("test-repo", "running tests from repository") def has_test_repo(): t = os.environ["TESTDIR"] return os.path.isdir(os.path.join(t, "..", ".hg")) @check("tic", "terminfo compiler and curses module") def has_tic(): try: import curses curses.COLOR_BLUE return matchoutput('test -x "`which tic`"', '') except ImportError: return False @check("msys", "Windows with MSYS") def has_msys(): return os.getenv('MSYSTEM') @check("aix", "AIX") def has_aix(): return sys.platform.startswith("aix") @check("osx", "OS X") def has_osx(): return sys.platform == 'darwin' @check("docker", "docker support") def has_docker(): pat = r'A self-sufficient runtime for linux containers\.' if matchoutput('docker --help', pat): if 'linux' not in sys.platform: # TODO: in theory we should be able to test docker-based # package creation on non-linux using boot2docker, but in # practice that requires extra coordination to make sure # $TESTTEMP is going to be visible at the same path to the # boot2docker VM. If we figure out how to verify that, we # can use the following instead of just saying False: # return 'DOCKER_HOST' in os.environ return False return True return False @check("debhelper", "debian packaging tools") def has_debhelper(): dpkg = matchoutput('dpkg --version', "Debian `dpkg' package management program") dh = matchoutput('dh --help', 'dh is a part of debhelper.', ignorestatus=True) dh_py2 = matchoutput('dh_python2 --help', 'other supported Python versions') return dpkg and dh and dh_py2 @check("absimport", "absolute_import in __future__") def has_absimport(): import __future__ from mercurial import util return util.safehasattr(__future__, "absolute_import") @check("py3k", "running with Python 3.x") def has_py3k(): return 3 == sys.version_info[0] @check("pure", "running with pure Python code") def has_pure(): return any([ os.environ.get("HGMODULEPOLICY") == "py", os.environ.get("HGTEST_RUN_TESTS_PURE") == "--pure", ]) @check("slow", "allow slow tests") def has_slow(): return os.environ.get('HGTEST_SLOW') == 'slow' @check("hypothesis", "is Hypothesis installed") def has_hypothesis(): try: import hypothesis hypothesis.given return True except ImportError: return False mercurial-3.7.3/tests/test-mq-qrefresh.t0000644000175000017500000002114012676531525017663 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [extensions] > mq = > [diff] > nodates = 1 > EOF $ hg init a $ cd a $ mkdir 1 2 $ echo 'base' > 1/base $ echo 'base' > 2/base $ hg ci -Ambase adding 1/base adding 2/base $ hg qnew -mmqbase mqbase $ echo 'patched' > 1/base $ echo 'patched' > 2/base $ hg qrefresh $ hg qdiff diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched $ hg qdiff . diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched $ cat .hg/patches/mqbase # HG changeset patch # Parent e7af5904b465cd1f4f3cf6b26fe14e8db6f63eaa mqbase diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched $ echo 'patched again' > base $ hg qrefresh 1 $ hg qdiff diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched $ hg qdiff . diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched $ cat .hg/patches/mqbase # HG changeset patch # Parent e7af5904b465cd1f4f3cf6b26fe14e8db6f63eaa mqbase diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched qrefresh . in subdir: $ ( cd 1 ; hg qrefresh . ) $ hg qdiff diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched $ hg qdiff . diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched $ cat .hg/patches/mqbase # HG changeset patch # Parent e7af5904b465cd1f4f3cf6b26fe14e8db6f63eaa mqbase diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched qrefresh in hg-root again: $ hg qrefresh $ hg qdiff diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched $ hg qdiff . diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched $ cat .hg/patches/mqbase # HG changeset patch # Parent e7af5904b465cd1f4f3cf6b26fe14e8db6f63eaa mqbase diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched qrefresh --short tests: $ echo 'orphan' > orphanchild $ hg add orphanchild $ hg qrefresh nonexistentfilename # clear patch nonexistentfilename: * (glob) $ hg diff -c qtip $ hg qrefresh --short 1/base $ hg qrefresh --short 2/base $ hg qdiff diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 orphanchild --- /dev/null +++ b/orphanchild @@ -0,0 +1,1 @@ +orphan $ cat .hg/patches/mqbase # HG changeset patch # Parent e7af5904b465cd1f4f3cf6b26fe14e8db6f63eaa mqbase diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched $ hg st A orphanchild ? base diff shows what is not in patch: $ hg diff diff -r ???????????? orphanchild (glob) --- /dev/null +++ b/orphanchild @@ -0,0 +1,1 @@ +orphan Before starting exclusive tests: $ cat .hg/patches/mqbase # HG changeset patch # Parent e7af5904b465cd1f4f3cf6b26fe14e8db6f63eaa mqbase diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched Exclude 2/base: $ hg qref -s -X 2/base $ cat .hg/patches/mqbase # HG changeset patch # Parent e7af5904b465cd1f4f3cf6b26fe14e8db6f63eaa mqbase diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched status shows 2/base as dirty: $ hg status M 2/base A orphanchild ? base Remove 1/base and add 2/base again but not orphanchild: $ hg qref -s -X orphanchild -X 1/base 2/base orphanchild $ cat .hg/patches/mqbase # HG changeset patch # Parent e7af5904b465cd1f4f3cf6b26fe14e8db6f63eaa mqbase diff -r e7af5904b465 2/base --- a/2/base +++ b/2/base @@ -1,1 +1,1 @@ -base +patched Add 1/base with include filter - and thus remove 2/base from patch: $ hg qref -s -I 1/ o* */* $ cat .hg/patches/mqbase # HG changeset patch # Parent e7af5904b465cd1f4f3cf6b26fe14e8db6f63eaa mqbase diff -r e7af5904b465 1/base --- a/1/base +++ b/1/base @@ -1,1 +1,1 @@ -base +patched $ cd .. Test qrefresh --git losing copy metadata: $ hg init repo $ cd repo $ echo "[diff]" >> .hg/hgrc $ echo "git=True" >> .hg/hgrc $ echo a > a $ hg ci -Am adda adding a $ hg copy a ab $ echo b >> ab $ hg copy a ac $ echo c >> ac Capture changes: $ hg qnew -f p1 $ hg qdiff diff --git a/a b/ab copy from a copy to ab --- a/a +++ b/ab @@ -1,1 +1,2 @@ a +b diff --git a/a b/ac copy from a copy to ac --- a/a +++ b/ac @@ -1,1 +1,2 @@ a +c Refresh and check changes again: $ hg qrefresh $ hg qdiff diff --git a/a b/ab copy from a copy to ab --- a/a +++ b/ab @@ -1,1 +1,2 @@ a +b diff --git a/a b/ac copy from a copy to ac --- a/a +++ b/ac @@ -1,1 +1,2 @@ a +c $ cd .. Issue1441: qrefresh confused after hg rename: $ hg init repo-1441 $ cd repo-1441 $ echo a > a $ hg add a $ hg qnew -f p $ hg mv a b $ hg qrefresh $ hg qdiff diff -r 000000000000 b --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +a $ cd .. Issue2025: qrefresh does not honor filtering options when tip != qtip: $ hg init repo-2025 $ cd repo-2025 $ echo a > a $ echo b > b $ hg ci -qAm addab $ echo a >> a $ echo b >> b $ hg qnew -f patch $ hg up -qC 0 $ echo c > c $ hg ci -qAm addc $ hg up -qC 1 refresh with tip != qtip: $ hg --config diff.nodates=1 qrefresh -I b $ hg st M a $ cat b b b $ cat .hg/patches/patch # HG changeset patch # Parent 1a60229be7ac3e4a7f647508e99b87bef1f03593 diff -r 1a60229be7ac b --- a/b +++ b/b @@ -1,1 +1,2 @@ b +b $ cd .. Issue1441 with git patches: $ hg init repo-1441-git $ cd repo-1441-git $ echo "[diff]" >> .hg/hgrc $ echo "git=True" >> .hg/hgrc $ echo a > a $ hg add a $ hg qnew -f p $ hg mv a b $ hg qrefresh $ hg qdiff --nodates diff --git a/b b/b new file mode 100644 --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +a $ cd .. Refresh with bad usernames. Mercurial used to abort on bad usernames, but only after writing the bad name into the patch. $ hg init bad-usernames $ cd bad-usernames $ touch a $ hg add a $ hg qnew a $ hg qrefresh -u 'foo > bar' transaction abort! rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: username 'foo\nbar' contains a newline! [255] $ rm a $ cat .hg/patches/a # HG changeset patch # Parent 0000000000000000000000000000000000000000 diff --git a/a b/a new file mode 100644 $ hg qpush applying a now at: a $ hg qrefresh -u ' ' transaction abort! rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: empty username! [255] $ cat .hg/patches/a # HG changeset patch # Parent 0000000000000000000000000000000000000000 diff --git a/a b/a new file mode 100644 $ cd .. Refresh with phase data: $ cd repo $ echo 'babar' >> a $ hg qnew -m 'update a' p2.diff $ hg phase p2.diff 2: draft $ echo 'beber' >> a $ hg qref $ hg phase p2.diff 2: draft $ hg phase --force --secret p2.diff $ echo 'bibir' >> a $ hg qref $ hg phase p2.diff 2: secret $ cd .. mercurial-3.7.3/tests/test-diff-color.t0000644000175000017500000001061312676531525017460 0ustar mpmmpm00000000000000Setup $ cat <> $HGRCPATH > [color] > mode = ansi > [extensions] > color = > EOF $ hg init repo $ cd repo $ cat > a < c > c > a > a > b > a > a > c > c > EOF $ hg ci -Am adda adding a $ cat > a < c > c > a > a > dd > a > a > c > c > EOF default context $ hg diff --nodates --color=always \x1b[0;1mdiff -r cf9f4ba66af2 a\x1b[0m (esc) \x1b[0;31;1m--- a/a\x1b[0m (esc) \x1b[0;32;1m+++ b/a\x1b[0m (esc) \x1b[0;35m@@ -2,7 +2,7 @@\x1b[0m (esc) c a a \x1b[0;31m-b\x1b[0m (esc) \x1b[0;32m+dd\x1b[0m (esc) a a c --unified=2 $ hg diff --nodates -U 2 --color=always \x1b[0;1mdiff -r cf9f4ba66af2 a\x1b[0m (esc) \x1b[0;31;1m--- a/a\x1b[0m (esc) \x1b[0;32;1m+++ b/a\x1b[0m (esc) \x1b[0;35m@@ -3,5 +3,5 @@\x1b[0m (esc) a a \x1b[0;31m-b\x1b[0m (esc) \x1b[0;32m+dd\x1b[0m (esc) a a diffstat $ hg diff --stat --color=always a | 2 \x1b[0;32m+\x1b[0m\x1b[0;31m-\x1b[0m (esc) 1 files changed, 1 insertions(+), 1 deletions(-) $ cat <> $HGRCPATH > record = > [ui] > interactive = true > [diff] > git = True > EOF #if execbit record $ chmod +x a $ hg record --color=always -m moda a < y > y > EOF \x1b[0;1mdiff --git a/a b/a\x1b[0m (esc) \x1b[0;36;1mold mode 100644\x1b[0m (esc) \x1b[0;36;1mnew mode 100755\x1b[0m (esc) 1 hunks, 1 lines changed \x1b[0;33mexamine changes to 'a'? [Ynesfdaq?]\x1b[0m y (esc) \x1b[0;35m@@ -2,7 +2,7 @@ c\x1b[0m (esc) c a a \x1b[0;31m-b\x1b[0m (esc) \x1b[0;32m+dd\x1b[0m (esc) a a c \x1b[0;33mrecord this change to 'a'? [Ynesfdaq?]\x1b[0m y (esc) $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg rollback repository tip rolled back to revision 0 (undo commit) working directory now based on revision 0 qrecord $ hg qrecord --color=always -m moda patch < y > y > EOF \x1b[0;1mdiff --git a/a b/a\x1b[0m (esc) \x1b[0;36;1mold mode 100644\x1b[0m (esc) \x1b[0;36;1mnew mode 100755\x1b[0m (esc) 1 hunks, 1 lines changed \x1b[0;33mexamine changes to 'a'? [Ynesfdaq?]\x1b[0m y (esc) \x1b[0;35m@@ -2,7 +2,7 @@ c\x1b[0m (esc) c a a \x1b[0;31m-b\x1b[0m (esc) \x1b[0;32m+dd\x1b[0m (esc) a a c \x1b[0;33mrecord this change to 'a'? [Ynesfdaq?]\x1b[0m y (esc) $ hg qpop -a popping patch patch queue now empty #endif issue3712: test colorization of subrepo diff $ hg init sub $ echo b > sub/b $ hg -R sub commit -Am 'create sub' adding b $ echo 'sub = sub' > .hgsub $ hg add .hgsub $ hg commit -m 'add subrepo sub' $ echo aa >> a $ echo bb >> sub/b $ hg diff --color=always -S \x1b[0;1mdiff --git a/a b/a\x1b[0m (esc) \x1b[0;31;1m--- a/a\x1b[0m (esc) \x1b[0;32;1m+++ b/a\x1b[0m (esc) \x1b[0;35m@@ -7,3 +7,4 @@\x1b[0m (esc) a c c \x1b[0;32m+aa\x1b[0m (esc) \x1b[0;1mdiff --git a/sub/b b/sub/b\x1b[0m (esc) \x1b[0;31;1m--- a/sub/b\x1b[0m (esc) \x1b[0;32;1m+++ b/sub/b\x1b[0m (esc) \x1b[0;35m@@ -1,1 +1,2 @@\x1b[0m (esc) b \x1b[0;32m+bb\x1b[0m (esc) test tabs $ cat >> a < one tab > two tabs > end tab > mid tab > all tabs > EOF $ hg diff --nodates --color=always \x1b[0;1mdiff --git a/a b/a\x1b[0m (esc) \x1b[0;31;1m--- a/a\x1b[0m (esc) \x1b[0;32;1m+++ b/a\x1b[0m (esc) \x1b[0;35m@@ -7,3 +7,9 @@\x1b[0m (esc) a c c \x1b[0;32m+aa\x1b[0m (esc) \x1b[0;32m+\x1b[0m \x1b[0;32mone tab\x1b[0m (esc) \x1b[0;32m+\x1b[0m \x1b[0;32mtwo tabs\x1b[0m (esc) \x1b[0;32m+end tab\x1b[0m\x1b[0;1;41m \x1b[0m (esc) \x1b[0;32m+mid\x1b[0m \x1b[0;32mtab\x1b[0m (esc) \x1b[0;32m+\x1b[0m \x1b[0;32mall\x1b[0m \x1b[0;32mtabs\x1b[0m\x1b[0;1;41m \x1b[0m (esc) $ echo "[color]" >> $HGRCPATH $ echo "diff.tab = bold magenta" >> $HGRCPATH $ hg diff --nodates --color=always \x1b[0;1mdiff --git a/a b/a\x1b[0m (esc) \x1b[0;31;1m--- a/a\x1b[0m (esc) \x1b[0;32;1m+++ b/a\x1b[0m (esc) \x1b[0;35m@@ -7,3 +7,9 @@\x1b[0m (esc) a c c \x1b[0;32m+aa\x1b[0m (esc) \x1b[0;32m+\x1b[0m\x1b[0;1;35m \x1b[0m\x1b[0;32mone tab\x1b[0m (esc) \x1b[0;32m+\x1b[0m\x1b[0;1;35m \x1b[0m\x1b[0;32mtwo tabs\x1b[0m (esc) \x1b[0;32m+end tab\x1b[0m\x1b[0;1;41m \x1b[0m (esc) \x1b[0;32m+mid\x1b[0m\x1b[0;1;35m \x1b[0m\x1b[0;32mtab\x1b[0m (esc) \x1b[0;32m+\x1b[0m\x1b[0;1;35m \x1b[0m\x1b[0;32mall\x1b[0m\x1b[0;1;35m \x1b[0m\x1b[0;32mtabs\x1b[0m\x1b[0;1;41m \x1b[0m (esc) $ cd .. mercurial-3.7.3/tests/bundles/0000755000175000017500000000000012676531544015726 5ustar mpmmpm00000000000000mercurial-3.7.3/tests/bundles/issue4438-r1.hg0000644000175000017500000001006712676531525020244 0ustar mpmmpm00000000000000HG10BZh91AY&SY\¤9òÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿà“î>}Â@´€ÁI€`& i0L™ &˜M¢bišhja0˜ÀÔÉêb6‚`É Œ ¦õ 4˜$‘¢d4ÐÄz#Ljz›F‰êf“eOõ=2=O yOª™ 4„ÐÄÓ@dhÓM !“M4Á2h4L@ˆ“&T‘†ô¨Ó&IM¡iê@@ 4A¡ 4F€ 4€ € 4£A‘ hÐ šmL™4zЉú’)䦀€ T‘DhÄÑ£F¤Éµ @`˜M©“)äÍSeY¥Ó"ìHÙ"ÝS×ã3ê¹48S€ÂwœáTUÎ F#1ì L7§ÅÎbyÿž{E ý)Õo7.ð %` EñäŒ|(I€B€Eñ%‰¥^úp®=Z|‹{N! 7F¢jJ¸•|Jœºc䙌4ˆÊׂk39´&—¼àÊgL@q&A"sËŸÉ¥6Œò¶¨F©RDD r×a­MžbY‹R”¢exEm¸cV¹ÁzáŒq  @À@|ŒbXýN¦ÑÿFœ„X!^‚VàfG >ãÍyLJ¢£ZŽ«îºûî+­yõH ®¼8®>ÖÁ"Ž@j:ú«ë…ï„ï‚â…cÁðîàñsœ;ñ sœáØ÷ÊÕè™çqÀUÛe¸u ÐÚ£]„:´¯IôÚƒ Qò‘ŒŽñ]·‚Õ+ª–æ¦Ý”ÂXï»JØ»¦Éá¹tVgú}+úKn ¦sËE²©¦ÁÃnÓ+òúÅ/‹Ó4Î9KO,n:u]\.—¥6étºC‡8páÝÈÄ’I$’I$’I$’I$’I$’I$’I$’I$’I$’I$’I'`Œ¦·èÜ!½Cè:Lry­xʲàã?7ÀÛ£f©,ŠM”]ÚdrE×I#ØVµo“LûÐ(I–ui]iVˆ}¹x¬‹Ü¢  ðŧÕ¦XؔӶÁ¿äï9 óˆŸpY7]˜k•ÅÑQ¨ÃDDX‘ÊrA‰^ÙilQ]¤’ÆÌ££ª¯aü7éÕEKEÅÜ_"ÊÃ=ÇæñáÄ;¯QB eѬ"†Ãµï oFF{AÞYX…iUTÜ?R…ê,mK›w›E…ÒQ¥yDº k–öT'Êô›´kÊj”tªmTs\tÚ¢Ë1JÒ¤kŸÊÂÓ8ncDÉ1*£Ó‚ A//2‰‚ A‰}Ë‚ð“aÛÅ-æ·Í[bü¥‘8º2MJ!gmŸKëgÔÆÙ(Ý/ ,èHÕ! –B@¢!ABL¡8h_R3¯]ÿ_íÝuÙDª.Œ†â9O¡‘uÝ5åç3 +ì}O™ö"^S¢×%+|Ò(Í»(_TF¹¡T•Š¿U˜aÛw ™c¡× »(¾3ûŠYض…ÅX•f}%û)‰÷î0«3Âræ¡¥rXU™{HHHºÊû:†‚`TÕé¼ó) 1ÓdÇž¹¬ÓÊ·Àöjù—Åù—hJ†HÒK}µ” poÐ €pÄb1€Œ@F ‰…cÄc¸Dï'™:¯$š½z@9ƺ'>Sm53½/w¶ifžiìÓM¤Õ!†'§³0º@$æ@ç̹ÝÈÅ iìÑ4öU ¤zhÞ“Zµ\}FTш¢ÙÐ÷ ©–j(¢‹4ÒÍUïzLšyî‰G8îÎÀ„À E<ÕkË5‹NÍQ¤‰ìÌØâà¹%ÏÞj÷Jœ %/±5‹4Y¦Ô­Zä2jÌ8ɺ ¬¹0Y¦•V¯5‹34p´I²FÜÉAÆ4®~æ˜`Ì!¹SDôµ4ÑLìÏ{%#&úLѲŽÔˆ;G0ÄF@·:h‡Ëj{jhµmmlÒ(£|‰3fÊÀ닊€~ÅðÇ£b °4m»^öS«lA ò½!TaWyk•U9´|€…²`×dsžþë`ðž‹Í=†óqˆˆ„7U–t–ÖA S²·»m[³µÎ*ä¡0ÅI‡j[Uô¨ÎJ­µNËpòýøô DDV™åùLnÔ:¿ìå)Mãü5¶AÓX…=ŠC_ä·ïÑ’`Wµ¡ly¸Á1 •qü?–5_yîãb¦*u<‚E8ª·‰U]Ö·¡WQ±¹ÇyPpÞcÏC‡õ»Ìó6õ’ª›ÓQÕ7ðß(”IUéØlXº ÊEþV¤kÁyjºa©j›vI"¯4ê¨÷VTTajh^Ú}¾F/évqó%ÝFÕ‰s•Ü)OÑÖq{bœD8 Ó¸øÍmK´Ì"YkK5 J°¸®ÍS†«¥”v©ü1Ž»ÕZÚWðú’ÒØ·¾ã,ÏicRˆÈÊê‚üQ,{àM¦«vØ;cb¸«. B‹Ï[Ö§Ú˜_œ¦û¦¥QAx(‡Hjœ%ÊT^Ò†Ÿ}«»X¨½'’ôn‚Mâ?u•$­´.*Œ÷Ÿñxj`œc×ÕEV+ö±íºª{ðZÞË‚‹‹ß&Ú$5ÉvV—ûz,+šÝ(´\öŸ«Îy.b..=ÈDÄËÁ í&å1‰(ÝòHPŸ ŠŠ*5ZŠŠŠŠñŒc–Ôkx‰½GÞ|P=É­r ‚YçŽÀ¬ëË®¨&·ÂªíaBoSOoßO=YN§M²êK~õ×9¬aF©ÏyôI5WX–Ýyhçø¥ZF ©‹K)ômlç#^¤‡Àey4Ë¢¢¨-’y¾XÖuÅÆRœ45ùÛ`WtCÃgžbМO)I),<†¡$ÜÅõe}ìûáK§]7´[¤' ‚nQ1?ËÎhÝe•YjÚ5aVJ6HM ”ú™T„†­Ãlæe_[Tû"™}1@fÙw¾‘dà‚[e"‘óª•ï>äã~µ®ÊªìõþÆÍLr^{Ì›¨§+ÍÒiÃaòÜ™K¿Ã"¿¬Œkþ5ì3Âð¿ºÈì+ â¶÷Xg Ã6m”ç, •µBM1ÛïX[¶y¿lÚF¹X~o]ݨD Ù¡;(P{ Oò{+»˜ç§Y×a²FÁy  hÐhPAVB(D úô!–§Àãµìëžì³”ºd,øç¨ ƒNÑ© æ3)åEꮫãÖ½51 ÇPÝÖaþ[6I<¾1 }?œí1+»¨Å]ŸÕÿ’)'õwÚ&Õ@qYÕUy}H¥)ºPîʦ¾gžò––E¶~ ³ÐUÖ÷iô%evÌ3Š’|Šëü2jâ¤ú¨Ú.Ž3÷[fõH¤u‡¼fV¦ ñ¿òìµÐè­â µ(éõõÈ]T5Ñ•\ÓMê Ø9I§e¼qå-³xèåÉ_ÔEšw]å­++›ZÙ´û\ײŠJEž`]&é]c§”¬®©Š>•½xÏ í/.ûX´5 C–Ó¤khYÖÍ6ðªÊ!Õ}-"Y‡QÙ52™(ê¾7¢Ú6. uhF°ì#4•w\…í(Ÿl–T£ªÔ9ª].’»+‹òü£¬è¸¸¶ BM¯MÊâ·ª ,£cgÓkSþ`aB¿,o­:±#Q­Ãx¼G^—ªÐß3o%‹mW×Ηg¦ ÒÚß:oÙ$ª2 z1§RU¿läP¸ÍòÞ–wV…R›b™UŠ %[µuÜI¢žóÂ1ËšB+œô”…á´¿V9N`UfÍú>vrT$4ÛÖs–„ƒânY¼hÖwÄ¥R¥Xg‚Œ]Ï…©oŸùMdnzA[qå}ó<Œy ÓÉ|«³À1 ËÊ1Œš5 •Y–'¢––švQ12Hˆ»^̯ 󢵫é…õ c˜¦í¼c„ËxÛ3­BqŽt[vù))Hkžûœ¯¸ï­‰dŸÂÞæ:N‹ £¯ŽÚ’ì>§Mñ>åEcT¤û<Ý¡9ŠËF÷Y&-eX——¼‹eç•âRÓªëÄ8 à¯"ÖÖ9ã4L./ÙƒVZ5©qXV jV_ºiü´¯å¸^_£·/®BލºO ܶ)§ ë4jëÈ­ôÔ4³®Æ³.‚êܨt;ú""ºÄ"bvVyËÚ}|GAEk‘±±ìÚuî£WçyÏO3OAଊj:;¹©*ÓjÕ²¬‹–®*ks¶§´O‘QuWTS¾í, ÓÎ^äõís*™kV„ûˆ¬(»Ò[Ý6…ýD´„ëÏi]÷YOxïuêªLã–°«Í£òcÇP|ÌêŒÌ° ú “rÖÆb\Ü…Èfä»ÓG;ª29’]Õ%DWßšõUö1|nå¼{ªõ{×h›øš Ö ªGˆ×4)õÏcÆQÛK£ThùJÞ)Æ{M¢Êè¾ÇMÙfÛ÷ö{ ¢Lݪo½ÛwVçö\“JÚ´¨;ëÏ­jrΗ_ßBà·¸ê2qQK­ÍÂà±­®"Ò¿ê»]нg ëºéÄêÜí¾×Ö³*…ÈcSΊ¶å"6iDRh_ ÃU\%¹ã0 ë,ñ”DúÜÒ°Š»Ë`_kñs•GY²^—¦Ñó»¯x­Â…bP]§)ÌQ[àà/NôÏ8ìJþË8í{„“Y&]·^“.£¶«¹ìž¨ˆˆ}ï]žD¹k«¨å¿5ÍŸgÚ^÷‘ EIg\£hk»”À§Y…@b”õ=²fU…û{  –H/jóP²¤ÌŠÞì+I6¦ffÚL4 ¢M­{btÒKb]Ör•g¸À£–öÁaº¤(ŠÓò[˜¦ñEñ:®«ùgÞÛ~¡¿Tã°öÑ“û¼Öô*¯¡Ÿ~Ž+ä© J湩q=–‘Ù}m2*+Hâ$”q зŠÿ®ÊmGH%[êê³õ~.£²¿2Ëú“LÁ.*ØL¢ÝÄ\]Mô¸m›ãXÞJ..šÓ²éµÇk^’=‰q•†Á¯v×'1µHÈÖÕ”LM‘rqØ5‰¡kq|G%[f4“Œõ%ɵz‹ŠeŒpšqxq–ɯyj+ÊÒè ²R'lººOÁŠŠÅ*+Kœº9©6]{L8Ée¥Üd×פÓ~p¡cÙæMk×A<¥¾WqšrV•‰ÿÅÜ‘N$i@mercurial-3.7.3/tests/bundles/tampered.hg0000644000175000017500000000222312676531525020045 0ustar mpmmpm00000000000000HG10BZh91AY&SYgA¦>ÿÿÞÿhï¿ÿýj¿ûû?ïýýgù½ŒoÝÎÔ>·Š·÷;_õÐo%p2%)µž(ÓMQ¦©êÑ£MMШѠ442yG¤hÁ ”Aê€õ=Fž§¤ 7ª44h  ¦@h &š`ši‚&ŒƒFd0†™Hjjz¡4z€€4 „ƒ@À€@i€É¦€˜&š`€É£ Ñ„ !¦@’”dÕ=Lž£Ò4ô†Òz& ÚžQå 4 †¨¨z  4Ð@MF2oTxæ¥kãV 1wî+ÀY¦ #äõ2ߪØFzûªÜà ÖãE‰]_^àH'"ðãaBDÈH®Æ2mîñêÞ³ÑBËø$ÄŸ±!Db ŠÑêÕ+åCdîÙ¦€Ê•U °çY7k¨­µ´BqWòÁ¤”.ÁT5ÈW¡ˆÑ¹ÍTFÞ”y/”€ãö†ršyx‡õ`Nþ‰8pä l,oZo;zXÇ+Êfv° §ÜjœyHÁÏàd¦ûp*°bªŠÒ«÷g“{ C*††â(Ê4 Cx1ÄЯr¹GJ5®=Dº\ÑÊÄÂp¢@I2*¶L\öºU¤!!|!R@ÕPú£T$%0MpˆÍER%C³Að%Ì•ycŠ-ê]¥*5I2À5À6TK ½1‚ |àñ AJˆ(YEP-5DxA‡$RP¥W(©CÀJF©cše”"N‰°Øƒ\Ê‚ !݇ˆ™Ÿ!J2 r8Aª tÈéçL¶Ä¨8U¾…W+vCÚ³•Ÿ`¶D„YÁE},X_TM†¯îÀù…Ú2öÓõrê‘YÄuòï¯s˜ p¥úoBºÐ&Uó™˜È’Û#I’Ó#I’Û×Hs¬{œVW`$—' 4ý]Šç(ÂBPJ¿q§¢ÑE!4Äޚʧ…ŠJ „°±¦sVó°µ­ðdTà 5 !;¬[ì+pƒôµ¿ÝÒ…Ù£:ºD,íC¡¤1BÁ^³« iœ„­!ä2ª”ìsÙ%uøP4,èAÙÍ>E ®4¹|ü@O9§iÀBÐÔXÑM(dBÎdÈän›`‚Ù·¨C9yû¼íû…ÒÍÃ]]à‚}²‡3žG^¡V…îø>h>·–·! è?Cʪg˜5ŒÔèzÃ9|…ÿ0ïÖLÿÄ%ÈŒÛT&*IOä…tp¹ 9špÇÉCvŽË hA U—uã¾Üè7Xè<ØÒ.BŠ"#¾—ke“ äuÃû)ŠêÂ.Qza0?Á/nÂ뜇CñRdøJ»Í"ER)‚uÐtíÂ)²t(Õ—¨(#¡ÔÅÜ‘N$Ðimercurial-3.7.3/tests/bundles/hgweb+obs.hg0000644000175000017500000002467612676531525020137 0ustar mpmmpm00000000000000HG10BZh91AY&SYÔ…\Âÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿà%‹Â}n^”÷»Õö WÙ…O|*öĶÄ%KfFÁ›®á@põBBûìéyô¯Cä¡Ça¡Jh3î_UR’>ÁÝ’%B¢U(P JDÄšb›SÐɦ˜M §¦U?M4ž¦4h 4Ó$§à4˜h„ò§´4Ó!¦††E=“FšO@Ñ0Ѥ̆Èš4ž˜ h5OÓF‚y4iOPif¦¦FSö£ÔÁ253Jx ”óÕ=3OQ…OMM¢d21=G¨dòž£ÔõЦÔh Èzž§¨ @MPÓ@hõê¨z€£@ESöƒCBˆ“#Ò€ Ó@ 4@@hhi *~@H„S z€@Ѐ@'ª§ªz#CCTz4š !£L€‡¨ÄšM&A@hõdÐÔh †ƒ!£F€¦€õh¨ ¥(Mjz2z&šmšdÉOÓi¤Å=SðšiM2§â4bžM4Ó&M4i“AO=&˜L2 ¦M=*lɦ&Ó&Ó&#ÔOF)OFJ0 uüŸ-Jž¯¹Æt¢¿ùènÔÑšl¦â&¶&IÙÿ? ÍŠ©þ(VeØá«|‘ >-y7Œmfç"&L¯oK+ù‘«½Íö¢§M`ÙêœTVÕÂf̧9Îsœç9ÎUèÊR(è%)F¢R>|¥ ¡ùÊRŸS)H(ãå)ÛB¦Ó35¦Rë·wX KW³ DîAGéÌ1jú¾5=v}zÚJz8×JÂrš±ÓõM%ZÙħÐã1aoÓ@Ä© gŸHbE¼ys²eIgÁÁD 1+FS$­žÝ)1-+7)OF,x³:ä:iE¢ôE€?Dûhi†æDQcœ$Or€*4:H¯íM`½ÝÐÒÐÓW:³‡lò%u¼(‘çœ?ª¥yadþÎ+(QôoQ@ícÆNÖÝ‹hð#Ç«‰°0²1 € ]YºJƒt¹âEi¥U$¡\8ì1ëÞmëiéÎp*ªDþþÐHÃèF.,/òd0°+o/-L˜š«ÍS+²µ‹GgW¬õă&IˆB~B„Ú]´UÎIbÆœ¡B^ü!*èHP”4Û¤6‡®Âm%ÉCm%)58T&'D&âP¡&”DI"«$¤Fp» T€§ÈDPH©‡ç(‡@)˜øþ9€VÍÕŒæœí8Ä X/BCéÅŒÒ!wåçvLAó2$‘LFÇ•óJÄ®T§¢cm f×éLÌ]Ž_“ÛWx~§Ç¤à[¯Üm*ßf–t¬ÿ“>ÙKržêo/ 1þiM‘€DDø…— :ËhZꦿõ ô®‚¸° SÌ’5‘j•‚­ÚÌò3|šn>Zù±úóÕ'î{ç£øg©‘trytZ£j7ÿ§À`Š|AåÅ>3î¡áÉù«֗ø»FŽ8 ƒ{y}×E9¬…‰Ë$t²™£)IɨlD+˨ղXíJæE(¡M‚å›oœd‡\„…Á…‚ws8÷¼ "â›W«î^%S’#fd  ݤ N eN"ÜÂW ²dã/•ÆÞâ¸Éç8ƒÆóª°Â1…­³¢”‘̂׉վªÇ ç:7%¸è­Ðyœ¹ÓÝUr‡Eö^UäDFÐð'ìfXÒèiþÆñ¦º‰¹I$’I$ã´š †<“HLg:òuv´®L¤?@³>[t?2²ƒuëë "mµIÑWñVIã÷ÑYvÖ ö¢•àõŒW#Þ<ò§Qþi ¢hÖmixŽ5]Jjù>±ûÎ#çú ¹>´-ÜkÒ *Á£ç§w ¯¾¥2OS†Ó[ÒPóÛÚ‡{Óu©Nm[Cˆm7ØHˆÊȃ/¸ºS_ÆÍìtïÓpªyåëó×6ô“?ÚVúz]‰®4G§ÀïŒÙÄþÙà  Õ\bWĆ®A4YÍd(¦ÊÄ|•ïÒ%§ºò¿õx~Y»à•¾›ßð=NQÒÏ`§"dÉ’kŽß?µ(´ø.V²€¤ß&gÎA§2R˜Ù™«ñÕ—½c½óA&†Ûî}¨ÿe¬õýKFMT°Ù*3FÏ8YôwòÜS¸ÜhK€Éí~ÿKÐÜi,c,gMOd1§m+áo4¥SÙ0߬œŠÇÊZf»ÈÎ@36J†giŸï³'n°ár¿d.¦?*{¯VÍpÕù—s;ðúæ¸Ô²ŠîzƈøŽªÆ@»׺ÞÃË%"ý! —c)ÒDG‹"ÅÅõè5\0qe½€b?Äþ¿Oø‘¡»—žóîÁk˜3øüg# WRa±„¦¯ª÷¦'þ¾yJJ–^E¯Ð|bqŒ¢""€»÷šÝõ_!ãpy´Íí»ß_ºHKæí¢ òx¶p#êÙãè.Q¯E÷Í}BlAKd+÷ŒPDE¾kTPo´ õaÛ ¼Û«ËfKDº„DA͸;Sïr<“vn?åq@^;Ð%‘±,SÆ2õr–ÅN^{†|‘©özàŽfšОTkŒðJ„B$9c:ƒ }C#»ê²Â8–ÆTŇ5¾¹¸Y/œ°–š› ?7´íÏjëy0q„b½ça¤ 4áJ±µ6¢'H5UÛéÉhî˪L‡ŒY7}ÕX–ô›Ì/›Sc:Ö „¢•#cÌTÒŽð(ªÞAFv‹$ÜîË™1dÂÞrÈØ“G#ž´Èΰ±a@ÉsÜ”,¦!lQ0B8 ×É€[€éøåáÿO¶Eë€=üeQ LM.š>ûæ_OÈìøòo{2úfa³'o_ðæÙ™gÓ­›’ï’~*Ø|nŽ4„Ä5EÜsãï‹HÚ¹æ¬7 |L¾/&ÍYh,Ò¹3G$ŠHåc‘„rd£(Þ!‡G}9‹MrƵ5­2†¹ÉhŒË›¦‡WRªs8âñu‘–eˆjT„CjÚ ¸WIKPIÛÖ€&lgv[§Û¨piªUÞÃy¹³qK„=,ånýFùÀí$î—a£’mè˜h5gò«¶qm™è̵|¾&/€ÌBX‚ÊÉ&¡0o5Ùê$HN&1‘,ÕÏ#*‚òÜŸÇâ:Ñ\y²¨T‰ëi•–m:&Ž(”:@:dᛜþ¥«öª(¼»G©Lƒ ‰³Xóq¯„+‚Ÿ`663Æ"ƒw¶ü#ÆÓ‹JÓ¢V”D'˜fâªëš„ode‘vìàîzAm>uTÁ›7È; 1o -»?ü̶ Kf”Ë&„fS¦—h⃘éºÍ飿µ¬¡&Ì(93F¡`!|R-‘Îxo`ã§ Ó<2:à¯–Ž®Žzn€©%ØÚ¹Lâ×ÐdxëmM²;²" æ3s¶b­|Û/ƒ¸ŠLéôÊ&öÊЃ/‡—‡Û@?»òŸ˜»w¯ÓyèaåDH«Äózò€þAöüÀ3áÆ;½–çÞAðtï§Ww®mœ½Åûí=ôÒhK…ÄÆŒÍ¾@}óe-8J&Í%­´6|à‡1Ä´6†ar•ëlaå "²Rd0ÐÞuÅ´‚36™6 âeã†h3ÞéN«Ÿ6xݬüÝ/¦‘Òä}ûZΪçUŽ¥'p'”µUÐ)\¬XÖxÕ‰ñŒR"I^™åôäÒ£õ{«b'IÍ/¹1Š¹ÌŸ:ZÄÓº3ÓÚl¸¨ òizFl.:T[_-£QT IP­ÔgZ›ÀàV½w‚ùR*<–õ øßEõ¿sLì{Ï™ç@8` ø¦e“Ïœº^u¹Þ¤w)AQDÔCÄi££økBíꊓô >LﮕÁ‹žÈO9*‰š@‘ë¶-d^ŒÇLæP]ø!X½èÚ Á»bÏÏ_2Æ&oÎÛ™}5Èö‚ÑvU8zŠ¢ªfîKκ^¥ÇÙ´ÊqˆazÍ4®kAŠ­Þ…¤´»Ë(IêøÚõ2ƨ¤ðy+ÖéD±ÌžÙ:›e›c˜µéÃÈ€{ô÷¿sUZG8N í€#'ã¯mÌÜ^MN;}mœÜ*S Ùd¥ůd¾,9S é*ÊáRé4X¦A²\¯šÁÜ•4Ä>WÛTªÊ§ e‹ªÛ]´ÙƒO»o8åÚK,“´Ž”I Þ.›7‹ë˜€ÅÂ-n(g ÏmÎp! ä¦.ö¢>a ánÃ…#–5»¸³bÀªè«aÅ®P"U²É)‡ÝƒèïŒE(<Òƒ ätÁÒH€åã¤q0S[ØWKVhpR½Êùªá•œ,ÓXªH&sÑSVefc Sœè4È!ŒlŽ&h‚…ušÙo±ëYáõjo¶Ô®3m.—†ë‰çÐ:¹Æi9¥’MrWL?ÝÚçšË’oMÂÙ­ïJRQ&¸ éBÙÌfMršÛЦ“ iu”>þÕ x‘:–œnÖr„¨LTª2o6…¥Ÿ™ ¨ã;Nˆ5¹/·žæ¦Óùhl6ÐRÛÿµÛ³-§sàre';ZÚÏ}o’‘æj.ÛŒ„ìiëjà kïù>t>÷âðµ¸û^⸧4€_IÓF·º0³‘TcÈÐn &U24Œ$æh§b[Ýyu­Äµ¡Ô0P1:ð guEKX[ˆpk"ÖYžusÔÌŸ C›:ÄÃT!ñ²Ç&@3wιVurfrÂï%kÜž. µcn[Ô%³™|nÙHÞÊ(‰@÷â`+.ßGÀNÝ'{€ °ù/YE^Þ5ÓßDæ0ÌI9µîuÀÞ8‹R0än¦ýÃ"¬6JP¡&1¼Z²F¥'},&S€5èãm6j.–C4ñáË µ÷'ø_†ðkžon³³©zÇÄi:ªéeoý±iÜoà~íâ «»Ä Š„.0Yï?ušƒEÐ2º´ŽjˆQfÁ«bÃ-š På´:ìí8‹Ež;Ǫ%é(8#+ hÅ‘õ,ö¢É•]Êêƒ5¹»;½‚õ±³_n1DÚ,È&i²ÕŒ¦CwªM¾q;%Ân¬jÿaË—._¨õxÐÕ!ÔuaãP½Oή\úŽ _6Yˆé(ë4LÂQyR08WWg®¼õ‘Ì4ž77Ýó"8Ðé݃­/©œLžþ,Ì}Ò÷× ïÂM»1Í„ Žû³Ï—ü`ô?ÒòŸ¶ùÎAË—(ßkŽ8ìíuº»¿ƒ¨Õ[ï€;ÞÚw1žç ‹5B÷0h<_c›÷C¾<í²dÊ pêmžñÀI*ã+b=åÍ8mhö³¦Õ˜ØÆ1!¶6WŒ]ÛñÀ›‹GAèMµ\޶ hŠùDÓ¹›C“ã ‚Â6(ǘ^38Ñ‚*Õë3t¢5é–ºŽÍSÆÓTìé§k4Ëk‹ñ´¦–n€kd¤.%whÂ¥‚÷C0: ÌÖä)¨Rp1¿aE€QRÞ#8UU9õµŠ‚g·»§„HGúÚýôÐJŸŠ¡•z¨À<-ˈ–Azå ÓqË9‹ëæ¦}Šé`¦'WYÞ €)D *¹ÿM·*0Ì2ƒ•ú±UÎDDµŒ.Íçï­ 0ZéÂ’sçh3òð]Ĥ·2;dz K,ƒèl=(pø¿ÁÌéÐÑΓw+ZHüÊRShjtzš›Þiè´;â|m‚a€Êâã¦hx<ƒ$ÇgYÌ¥èv•*d–ÀÇÐxÖ®ÙqnÝüU§Ë™½†«æ¸¢Qïóø«d>þíöA†s˜]°uôîïÔŸŒîa¸ãwžý{ÞÁ‘’íwÃZåHz„DO—1ð3(¼þT¸#cK÷Þ“ª €x’Œ 8;qBUh¬£dwýꄟš§é£ÄãrhEé""+_úi Ïtö÷œÛA¾¶|§Ão뻫D_ïæü¬>iôS/…ª‡°¯MY qÒ÷ÂÖtž;£¨@<¥ ¹Àæ#gúu_A"šBvŠà 1<%c76‚‹ ¡•ÎÞf—ápP6Q±¬_/Þ¦ûýý#·Yæk·†ù ®‹Íb°?¾Ç:ËĬÙ^¡lcxñ\†‹„ º¨o\¨drað ßãFW¾›‡ §òWÃ8Õb-ÿ0³û¡©{³«à؉5Sfå†ä›9=\ìN—ßSó š S/4,á˜Ü0ì+ïèV£€u±V¼3­¢âvûçÁþ^õ Ür{OgÔê¬ú§¿îž!ë”&lò^›oÁ¾tÒµJÝ.GYÒyý#]^#JïÕmW× œ ¯¥—Jñ_ÙïWu,ª i»Zÿd}ü\–¨ ª8K©y°DEcE¯DSà9ÀU°Þ½T☞NÓ¬¸xõ‚”ÿëçeŽžˆ¾ÇË“›8ijMRó£84&Èâ_ ݳƒ^&¢kRMÓÏ~As'ËL0;ùK¬T[€{‚ <=Ù€¦_|ê^{µjC°»ÚîwsæŸèN•’¹åÀ`!"÷ɰ: LPžÖ«"æ‹kƒ¼ŠÔèSvZj»Ží»Û:2½Rßš6­†Åb…³mZT R2˜!RÞ°v¦UÅ(yÅ—2K2~’k,…3LèrfSgÚw-‹ZÀÜY{ÝÕî¿xPšøݯ§µF¶›ñGˆâ´— áyoz¾ó`ÅÜRUðØUF¥kép÷b€¬VÙÖÓcÒƒJ Ésy/GZ˜Í³—ÍpGňw.–tnæ3§„LpVÂM’û°og¤Ô¸m³ìxãõ€>kÊ&j ³}eÏr[€¦FBéª-]Iv¯çæ½€±ø`¨ÚÜ’Ób'øñqûç?ˆSú˜·µ—ø¶xë¦,§\ró˜ê÷Bc˜ _+¯ü±ÌKeìlÑÐqŒDXu—$E‚$Ž)K®©m¡mÀÂ̶e,ßZ—?¤õÓ‡ v IÚ‹œˆ×yëÑ©ÝÄØ³Üs Nlù¨ÃÛOÆbÕÝBoE)éG"`¹ù´ØˆÛ_aÞÃ{Ç-´!Øjd  ÿé ø‡®èkü~ͧÉ0ÓŠ(¢fph䈾¾öa{AíQ(Ü`ØH©.šîÀ~öù6¾O§|Ô¹˜ïҠƵjÕ¢µjÕŠÕ«VnXgÈöoëňW¼¤•Ó0.‰¿ âEӢȆ„;ê’çàf7óÁ){àþV…o“—~ìX–K¬Â°:àï•‚ hg¼Kîϰ4øOË’qÎé;S4}…4ˆŠœD|G3jÄý¨¯™ˆ€NJÅŸ§æ#цóÂ*—a ™½ö?Øn¿¦¾Ôk}[±X†²fs©Áí_óäPÒù¤½yY ‰/¹þQ¼¶”˜#XIèž®ë:äÙN'¯—⣶îoA¥=_:±ü"ßC À ]¬9H×tËþßp}ZÆ ïzQ5±³êý³¸ððžù@rØßd!ˆ=ý©ÏûÑ`/(ˆŒæPŠ hrµ=À‰Z¶FƒÁî<ÓÜFާž¹ýü>á<xn ÕЯë±~yIîúÇwJˆˆ¸wÎÖ6iî¶aöÕ\ƒ‹Æ•á2Ïöî0§M‡?çH}f׳Ÿþ/'æNÏuÕ#•Î]ì!öÊkºîvŒ¶ë¡15·¢æ›~Z"#u<LaŽU~÷¨ÈòÏOòNæë…̪è9—{—ìZ‰ì¤4¶ú—¡øª¾qbÊZýã oò„xxýÁ±5Ê() 5jØ;ˆU†õô1iº…’‰ª÷ÌÒÖ‰eˆLÚˆ£ÏTblØ‚¸A] þ6ÿL?;5í»&W¥ë÷‚òGJà¯öƒ/%§³Òâ˜@ÇóGЕ æ;㢷êœö ¦?Yδ*Ð[*ºkª¥¬&Ï8ñŠ3òÿgƒ[ãzù“Á»b©›þܽ’¾N+BŽˆÒóoî+³Š×K-I-þ«-G^p!ÐÞ(ËÌáMÕÕ:¼•ÂïeŽÒ$(r{KÓw‹ë8×ÅL[e(ˆ‰Õßµ ’“ ýáþ!{ íðQ¥¶Ðµn3>8zæmVܶÌ'4ÑAÞ¤2îÁ}»é¶Ž>ûT÷|8a?•á>ç‡PàÒ¦ÿ¢ Æ,bMj·5ÁýSk}döÒ¨vaotè”%l”,E!íxµâ% ZÃå­šêr„¢6Sú½}LðÑ QËO.E%Ƈõ†8¨ó4Ü=aŒ0¶äsnÿð1 ÍyS669Ë1qõ8åó`Qàbë¡* ¬'˜·«ußÚ÷IÚÖZá§%õsÞ,W*ÌVqÞ”Á—9²ÞA˜‡*°y§N\y÷ËcÓï~:¶u"NwÉÉ}S‰•)|ÌDOtçrcå÷ò~7Å›3ô& o÷ßž ¯ÖÛ[Ì’±÷“º{]æ€þÑ›w¨g¢ðøž¤ w}›“~ˆ  5ºËû!¶DU;™W€F`B3-§sò«H ×Ï”ÈN¸U$ÞR\,+2k÷­dÀvÐínªÄrL;Q³möt…ç{K¤/™Ž©ÏÑV¡n×™½™`>£ÎÀ}îþrÉÌÉëc¯«^7gí!¹<ÿ&VRf{C†­áùZ:Zi—µgË@^˜W>¸2û‰ÀtA˜FÉâ¯_‹UHokè—F–ä ý¯Ú¾§$>Çì?æÄ¹~ŒŸ·æŸ›˜*ERm’­ŒÓBê9Ýû[ðèìÓ¹ ëv·Žã“ä2ëKAßìbºjå³ycŸË¬~„ÙS -u_ö¬\½ Cô ˜d-¿eµÚHw^ Øk3ˆMØ÷r|aK;¾ƒ½(á4ÕòÜ£Õñ1±MŒÿë|ûâÛÔ7üG?4µä/Öv°ü¾ŽA¦æ©;­úœgÔ9]À‰€xÝÊ®K“œKlÜõE²Ä »ÀÄŒ_¡L!LȺ6!ÐDD§Ö黎ŷºÓ³ù²pöŽìÊy¥ï«\y–\Çß ~°ÚKN|ɤߨ€L—Lõ¢Ää(;“ fð÷$DE.´î>3AªåwR¤bND~囥ÜËeÍçK £Ñ˜íW,è÷öãºÎ)ÃLgµ¤ì^+g§Hf%æyf–Ás<£‡öu>–Â\õeÚ”þ·Óî&)‡C]ÄΔ7=†Š¿ùI_æn‰x—çu§‹¨Òcƒ}¸mŒdv™ý)ý>^¯*ñW©q¸ìèr÷ÎÕý'ݼæ.«DD{'U^šíäÂS C^zÝßÙ­0úèÂN™òTšãÑ D¶D­ýìݶ»@?< ¤™cáaÁsïa­†‘º¾ëöûÛQ ìôYFÍ@bþª9³?b¥šÿ†ÜÏn»ÏÎéÿjÙµø0>Þ6ËævÕúö´y[®$*õËXU3ö»Ù@´Ù·ð¡ÙCáØýAð„QAQ¾ì͈÷¹Ô+´èÆ`—Ù‡:ën‘ oìºïˆK1eÚC2ð—ä9 W#8ó]ÖúàpìÒvàë‡) gIPj܆½¸(0¡ÂkL1¢qAò—§˜|pμjÀ”™EAýlÂz£~RÚRþz†#¤5œ Ù4^kš{ _yÛz°Ãœý³eôr<Ú/¶áÈá ñ{NãˆoÑ޾݅٬E”Á„çacaÀaÄ_>áb- LJäOcix÷.ÛŸÒCб÷$¬'L,2x Ôsk’§Œ™£øµ4éRÚ`ó4[£¨çe–îq9E+Ü/sŒQ9Òxˆˆ‰º‰îwG{ªÚ•8®R’Ûk.ò¦w¤¹/š7b×)‹ºQ,pq@®ÓìY„}„-(svÏYœ4ö^G µÆjD ²é—T©lÃz•ê(5h Úö^d"w=æBx6Ô ·î¬g°Ÿ’V±9<‰…·ì¾N]÷}f ®ÌÝ·–ñ ØÍQØ{>˜ðs …½ü)^?:ÔÜÉçûÃ,žòZÁËß„û”z5W«—‘ ÑTþå7ô9›øê×ÐÞ‹^V”­à«ºƒ‰TäœÍ…JÅCHïÁeôXdvy è‚ÝûxK„ìòtöB]X“–ÙëÂPDDƒôgÓ?‡×ç9A¸_%ÂØ «vªøˆy/×7uê¦Ó=W~¤C¥&86ÇwìmUÀDD]>ªr"#0ˆˆúV£ò(ÍéšØŸÕV[CÕ® ö¦ºÝ¡T@^1‰x\Ñ’ÍI†º9}Uòd>ŽôáÛÒo 8>Ä”>#eª‰vŠO˜Z£Ôdkdd_éhd5IÞ¦CP~x¶ˆ‚ïŸn¾Í½"h¶ špýC> ¼àÇÌç‡[²¨þS4uWç¦\ŠÀ—€bŽ%Á…Æy»¤ù6.ˆˆ«Ô‹¯ƒÕ:;}RœˆˆÙ¼vSý¯ÑïÙ du䟹‚VÕ•Þ…ðµY÷Ã_¢±æz5׊M'#7{×[ùö]Ç¿òMuöÝ䥞öõ,ëª4 ݶËêÔe–þ3/V[~ãv:"uÍW¹Ê_õ©ÉùPó”5…rJ­[¢h]G­`O*þ…;£+R†^±Þ±Ò‡Ì4ëî2P}K‡.¿Á‚ðÈì~¡ó¥ëÈ?Ò÷x`ÐõÙaíåÇ""$“\¸ˆ‰D)+¾ ΂"GòqÖ0K9Þ´q‚r”"‚´8.ÊÛ¦%¨´?ø]ÉáB@;Rpmercurial-3.7.3/tests/bundles/test-no-symlinks.hg0000644000175000017500000000105612676531525021507 0ustar mpmmpm00000000000000HG10BZh91AY&SYLüÿþ»Ð3U ÿõlfÄrÿÿÞ˜@ûZ¬ Á–“A áˆÈ"°yŒ ¯T†‰´ÈÒ hõ=&КA£C@¡M 2=Fš4ò=M=OMB$Ð24Ðh‚(ÏP™††ƒ&š`Aˆ “†L€>Áõ 3ýÝØ6áÀ@9f Hˆp0sQþD"JªÔ@€¹Œs¼pØ’¾I½þ؉ø®xüêAè˰¥éÕÅQÈ!ú r*º†93“¯‚#Ãô¯'Û(/hcº~Cƒ%´Ï"l4Ëí|мg–Å¥„ªxKkÂCì| èÊ8)ˆq…2É 3ªŠ\'0$ê)LjV¨h*‰Á"% €‹5)*÷ÚõºlüŽMzééç¶–äp&h<¡%€öõ3^Êé÷Îv"y’•tnÙD†ŒLXP‘@‘Pð$Þpø£ÊÒ!G81#bòé”ZNK~`W’&ö˜pàâÆA‘E A hg ci -Am A echo B > B hg ci -Am B echo C > C hg ci -Am C echo D > D hg ci -Am D hg up -q -C 0 echo E > E hg ci -Am E hg up -q -C 0 echo F > F hg ci -Am F hg merge -r 4 hg ci -m G hg up -q -C 5 echo H > H hg ci -Am H hg bundle -a ../rebase.hg cd .. rm -Rf rebase mercurial-3.7.3/tests/bundles/issue4438-r2.hg0000644000175000017500000000743012676531525020245 0ustar mpmmpm00000000000000HG10BZh91AY&SYøóÉÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿà s܈5€€‚ L0ÐFÒ`¦¦LÒ`4Ð`i€Œè™LÀa4À0&F™0Ò0‰@#OM4h˜ÒmM4ÀžFƒCLÔÓG¡¨†“ €˜“L!“!„ Ó# FÉ„i€À¨ƒ7ª†5 €Ô€ 4€Òd4ÓÕ"1Q¡21¡dôLŒ€M4ÄÍ# 4™¤ôš`ŒÀŒ&ÈÀ˜MLŒ#0` è “4ba4`@*©4˜Œ4& “& €Ôɦd4iƒHž “ÄÄdSF¡€)²f‘2˜Â™0 11i6ša4ш16¦¦ÓjmMM” vÖCbj7¶Ì?sˆŸ(!¬µÞr`êÀ ©ÛNMö~¿ñ1µšÀBÖ´A_ ãIö„3/úÈ›ª»$l+h'r\°c‰*aB_”†RSo÷®c;*­.dX: ´úNÑd§é“2®Ë"Q’’L™–ðdÉ5uãFôe¤é~uB(¤¡K¥T¸Ž0±ËÃLrA$ĸ8×$@’ÃG‘åP&G­*”S$@‹BK!"åØ×Í‚2aP#3,¹Q 3 ,`Ae0Ñ ‚Á€0`ÁúÒ™3/%’¦™3)u}”óï×ùýý;þA ÀôÀ>ó‡'žQhÐÂû.Q(žŠ÷? ÏGN‰çF…Î…ÑâB#? /¨ðÂã?z÷¬û R‘‰mR.á¥UØõe÷8,z¬È0+°“jfÝ o\fu@Á&Ø´X¬WDR0O•sS×õ=Á| 3VÀ>Õ¡œw*VÕÏlÛ–ePl“*%Ñy‚¥ J…D(Š’ ‘B^@à!mÐ2¯%½hÔŠ'WD•G8.3fÈ(ŸšðÝ<ç¨Ý>µø¢—wâö&µºP¥^òAzH½†yWWëÜæQˆo“‡YæÁye›ÇíX¦yô¬ÊÛ6––ýÔ÷›xË:mÄ[š6…Å`ÖtúNNUD°³èTVuõù« ˜P¹Íå8f©¸hº›ßgbZ5zÜ&º›$\tl|QE-1ö¡’— L2È L2ÍE¹ËoÈÆòÝ|-óéu˜¡O¨Ô&fj2‘#\¡BI/+ñÄMA¤òì.Ã-·SråÒ¨T’ ˜Á€Àâ 0î ㇠°ËM°ËHl¨ª µP‚ýûÐÂQ…TFðCÕ™e–Xi¢áUUB HÁ$„ẌÀì<×põòë,»2£¥I'P!„p|Aµô¡Ãpãbœ(ñeÖe¦[n®9v£´„$7a0”€L `‚=¡Êïñêë,¹.Ü•Bè]…D(‹‡ÐKÅ+©…Za²Ûm6ÁUR„(PyÂŽ‚S0˜I(T®¡ØÛ‰ ÆŒÿ,ý–Ú„//.2:6$HøÿíJ tÖ¶M†dÔk+HÛµï=µvÈ ¿,v*ÃîXûÛMªM£ÜVü([7„ïÇŠ³5îÅã¸ìÛÏ}üêb!ÇÏeߊÜÀ# Ó[ÔcÅQ=·Ô‰ õQ‡{-»ãSuyìºk¾ŽIá×ç~Ý#ë_Ät»ñbš¤Gäa’ËãŽ^¦ºÎƒ ¾¯Ñl^[”â¼ê:Ìbq…u[AW«Ð$Ô++xª«Ž…± q!!œj›„ŸaÑÛ§öô˜æ-WcRÊ»¨ùR25eÕû6.2ê•y ÒìÄ,¯]ì7/1*±5·èô–„l¯ìµ¨Y—=Òo”¥ñ°lÓ®sl“E9Š¢VVMNtO©Ål^šæ|PuZUQrvÉÖƒV–Åi¨Z•÷x¹µO½^v)v©ü±Bmz©ÕµZþT.ÒÛ-jŠ$K-ø"›&™F³:MÓÛ~î[Üw €”¥íºØúSËÓÐ(ÓÉÄÚ1Xó¡·éˆ?ÓèI9 ³2Q8+£”«6ŒSÏ~j7h–N=GÖ¦(^b´°½Æ…å¯K*½^ë¡Ã¨8Œ‹ço™&…pwJÓRõ˜¶ Ê| ÊVWƱ«®½U* ƒÀ\4óØÔÌ“iÔÂÕ-Â’ÖmM(*€pà8™<ç€pà8Àö2MqO=°[GìõÜtSSÍjäÖ­ ³þ*«#ØjËÞ… Í»x,ïªâÔTWY„i]ó²kEe¥lž«änÞ‚NOý¶ —~Úº ‚ìà/(¶aé§|/Uæ €‚Ôý’ÒÛdÓb¡}M ~´¦ÝÃ%ó¦v®3ÂO¨,ÏÑSXWç à´ b毲+# ïÓÌcá`UšÎ·~êâugP°LS÷Z'ÊX×e¹J_U"ÈøY×Ê׺­Zº 6Í2^ŽÊ0jJK½¥*¿EÊ_BÇ.²Ô)eÂC2Ë2†IH¡AqO¡7ˆP^ІM™t_[ZË9j6Vf£ øøÙg-¸´iMšìœê8ËÛÑq“N;ÈoT ’ìG²¯)³pœeé‰~.åÔb—F9Tš¹°Oü•L>×]·E8mj}»F)¯‘ý]5qâªM‹Ãs™GÀã®\w£rªÕpðZRïi:²øŒ¤]·x ´¹8ŽšÚÜ7ɉŽsúÛÖaltÚÇþH·Ê¢Ô•{ëÛíz+ªäÊ®L*¨×¹,ønßZø§´«š˜Ü(I$ͺ¯úų«s;Aí=Æ]0”znºuä¥×utîÖ[.¼}.ŃuÚå¥þœ–¥)^Bô›7¢ª¤Ä7Iø¦˜t£ÏSéõA®c^ó€Ú,é‚fUEÙOt%‘Dè9Šm6YqWוå(ž‚$H‹â”É»×¾·0Mâ1”`R2='š´¾+Ü(W”ótê¬É‹x¹­­kzÚ»´‚¶—míª¾ÅØÖ7z¶:Õ¿L&” r=§Sƒ<ß/­”º ½[Õ§UkMöIF|v­,lZiñ¬eÐ0, î’‘¼Æ10º$Ö×ðÆ£ãûý_{¯y¤P?WÉ<—)Lnåõò(Z”¼¿`«‹‹JÔ?ó¢ L9k‚Àúœ°M8Ìb}wwkóܱ,›Å r9ç,ë+œ››À¨ÑQRèp÷-s*º'\Õ©bXצyŽbß#xñÕåá¼n¦ŸcŸ£rÞªU)†­BæAdZÜFí‰y®»Ì}î3òxI‡~þ”÷Iñ?F©ôªëJ¬¤gŸ’œz­&Å»U¡>Т=Åx®Ô‘ˆbÅܬ¨‹ƒÈwM°òÝšäÁ+î²Úî–gmWF×i]vÕy~¬<×*ü•¤6Ùí¸O™iÚÁÌG$j'ŽË¹k’•Jðáα(¨¯å‡VØG|ç»ç-、„…1špT§øuœ§ —M,Éòâ–W[GQè2nBqV[UIî*nzâŒr*Eñ/í{Úz+£\íçÒŠQ1‹sB¼ªêÍgÔuߪ¢ò™÷ª3k À÷\†%ÁLµìÒMyX.*ÐϬKkzиo9\bÒNª˜ø^Zàª$R)Æ™÷1¯ tnK´nÙŒúvwÑ'=ø¾Õ5X+ îûØ”ë"œG¤äª6 #øz-{³r›×1û¼öñªc×'ªøN¦ê0cá^#®ža™µ¹SZÓòÚ.­(ϪŽÕpmÝôvîËu\ŠÅ鍯…ÁQ4.ÝÔpZÆYØI]XØ8®cTûPÛ&]"¦Ò«JËçvË‹vô9oieqูø*ÏÕ±]×vɵR5÷ ò1FûRMˆ߮ê%ü¸LS*ÿª‘NO2ͲêñÕ#²rß6š‡Mö2*4Sã¹nÝŸdZK’M<·[©NåÇ_'˜±¢ØuIRkÙWdŸÝÂ…}e²UwX^³³J¾ÆAotUé¦eV†F³4,¢ Õ².jy.Ê( ã+ŒóD’vÍ~Âr­HPì¹oxmÒ—[ñ\×Yé3Œú-í±ðª¶,Óó} C†¥¤Ý²VIBé=÷¨¡pȶUä´ËûvýÙ²Q2ܰi´Óìr—†IyKÓïkZ=wŽ¢::öÚ»–¹§X]Ò$I–I(Ôµ &ÐVÞ»TþVÇÏIÉ×]’**Ö߯K8Ó#£·®ºÈ¾tÃæxŠ£·{p\Ø‡Èæ3Žñ¾k¥Õ$Á!»„ÌÎÅq~¡$ý•Gܸ9 kûXßBmgR1éö%¢ü¡Bñ™‡”ñš–iø>¦±HóœWÿrE8Pøómercurial-3.7.3/tests/bundles/darcs1.hg0000644000175000017500000000244212676531525017424 0ustar mpmmpm00000000000000HG10GZxœ½UOlUŸ³Z½@`b«–J·3;óff• m©+”•†-"í¶ðþÍì´ó/3³•–…‹ $zQoĪLÑp)ƽpŒz‘cÂM<ˆ|ófv©–66n6ï›ßûþ¾ß÷½A耟o–­ ÇÿÚøñ³¯wdqá…â¥Ò%á>kûvtÈX1 ‰12 D4 ɦ‰št€LE5e¦†:È•a‹eˆg ãˆ»ÜÀÝQ‡z6ÎcßÝ“ `ÿ¢fˆ²!IRŽÀGrÿ1.ûmo–z±Îýg?©õÃz\óÃ{ªíÁЦѽ•~h{Ö=U®“ ;ŠmöæærRJĺ§¶Ÿ]»|î§™íO~Bë_«¿½SoÉWVáBŒ/¤K†¤JTaŒ+¬ †&› (CT Ø  EDZy»¤ ° ) ¹%)Te¬™XÀ0 ƒ©SÁk²+@‚tE–@‰˜¸¨D–P±h BA©˜ÕDu6«BG:Öä¢ ©¡›ÆE@Y%¦Y…D‡P‘õUû% C"Y1t`v`J  ±ʪ QÑ„†Fd‚Vm¬`t*kDŤéP!!…ÉÔ DŒК€û÷Ⱥ¹eµf ™Ãï/=¸áíˆqîÌö£›/Ýx{÷Wí™!áé HÌÅ4Š÷$Krëz{ ’,IÅ‚"ªŒI±Yä¶5FÜÇ¿_‚>pñ£­þ7‡ßyk± ÜÜÕ¦:;ÿUa¶)®9{º‹Kòú?Îßüö¥ë}pîÉá‰G·©í)¨ãB—8˜dšMÛ¡bH-z"ˆžËUógu31úr)Ö|²–Æ“5 &×ì}1Ó·Û?ZJÍàpêgsÛÒ>n§h¬…šãÀsÓŒeŽPŠSd¥¨”¢ E£!7HÄ`y4nZw9­;†!O1pˆ[θå8üHüDó60¾G˜N}ö§>‘Ÿ¬•ƒÉ “e€ÇM KqSÊ)nÆA9ãÀNCìKcÐ4ï'‚ø8{qÃÑðŽ‚9ŽF2´Öäf/a}/Z:Ù€ï^|óöµ÷~9ìüºiq~áñö ʺ«lPxžƒR³»ùÚ‡|?Ný»O|©æ±Kºs]bo~Æçî‘Cê=‹ié"š 5aÝa;ÃzDŸFçD×¶j1sDTñC =1¡90¶}/CȬBfÊ34ô¨#º>©³øyæTeÙ’ü3ÍXg|ž»s=Sþ¬‰npl–†Q¬§›m¥Š¡W*)b¶³‘my,uwnªšïJõ‡†2ýÎÙV¨hÖkú ù0Û® 5-Og;ißZ–p†ãÀç)´ùØ„t:³ž"V Žù D¯™¢<¶¬ÜêI³V=µl+ŸìôÙdù©v&úf«ä½•c•˜o Úñ¥ —3Á˜µh…ÆÍŠƒ¹ ìO.UìÀ(b5cµº‹"6Ö(‹ÊÞª¾«Æ”YB\£Ëbcß3m«šïq|«Å0®G;8Ë„á‘Ì¿'†VÔ(U˜²+«|bª’«Õ<‹ò Õã£él°ëÕˆü†Ãþ0‹—PÞS7ès¡í´îÒÖU?´‚pürç­ëFý+·ÑÅMû¿¸sæúÏí¹I­ßŠü­Ï´ ßO?öÃÕ'Fž·n|¹¥rëÑSŸžý±Mù7@±³ÉÁíýÿ—SøgS*Õmercurial-3.7.3/tests/bundles/test-keyword.hg0000644000175000017500000000045612676531525020713 0ustar mpmmpm00000000000000HG10BZh91AY&SYNÎ ÿÿìÑÙ€t'ã€?w(!¢™€B ˆÁ!0Ù˜5P iA¦4 ™4~”¨ÓÔØA6 Ð= 4Ô(ÙG¤hÐÐi¦MM€4‘¨Is€)$À¡%" Dì|åU«-×öHˆ)ªià[ ³­¡Çä2ê.ÀòW ec!LoA1xè* ‡D: X¦<V`Cn³H2Z'e ¬#Mþ g{ï01ÓMËïm#±Ó¾)‰¦¬Êê^ypÂÚn‘Ô;'ä£ÀœAagÎfžÀuô.¶Ìˆ€Õ §H¤É¢ $½À¾Ý1ø?âîH§ IÙÂ@mercurial-3.7.3/tests/bundles/test-manifest.hg0000644000175000017500000000112412676531525021026 0ustar mpmmpm00000000000000HG10BZh91AY&SY$6î$ÿÿÿÓÆƒ^éÿåÉF猿ÿÖÚ¡¤kr š¢ä˜ƒ˜'@¹mغ„¥Pi¡é ššˆ`ô€ÈУLšÐ4ÐÍ@Ðjž¦QêjlÕ=&é4=!¡µ hÔ€@"ÉOOJ Œ€€hh©SÒ©é @ŒF€ €hG’†M”ÃQe…<4˜½$šSdBøÈ©•N»8Òp–@ò@̃@ÈRÃAŒÀjtr‚I£BàÖ/$2Ð[¹¾< å‰)ø–Ç@’yÎnÑEw+qe(*"ˆjx‰ÔQò± …R˜2Å^Ä)Äû&·0Dl`#,&½qpù£ª™1PË,è‚ ø)è2í©Ž¼…+)rÎ:©@€É/‡aLˆ/¶ –ˆ#[ð\Bmʼn´Hlá17%ŒTGnèˆ*Cônê˜(< Z±¥4§Ï!K™ÓXtÏõ§°n0ŽÝtvŽHÔm)H±^qCœ(`™ƒ á™[•މ4¤%¬®Ç)ð~-N “‚z³ÐpõZN… à + f±‡¢Ž”ýq — GŠÕ3µzœM¡ì¤zÓ“ }¢¡'ÔEÃÑ#›°1Á¸l/{ƒ[@z%1;ÅŕˀYIôª8`ÑbyoûØzþ3KaƒV<  ¸HïR<Ê!We@{ŽÆm`ä4bh ša2i„hÓ˜a†@ÄȘ™04 Q’jfzj2h€ =CÔi 4 @"hh x”`†0ÓLÐi À &Â0ƒC4Ó& $¤’iâ›&£jz€Éêzž z›S#M= hÓA G¨ÚŒê êmOP4ÍAèÔô' ÔÓÉ=ØL˜Ðˆ‚Ÿr­Ü€Jd‡h @ó,ÄX€âÇ`X¶$q’$ %QÁˆcƒù&«‹e¡ LóÐV ƒ^4`›– Ï,‘…Åñ¸ŠØÆ1˜h€&Ôss ÁZ<#‹b”¡…h8#Äj VY¨›Ô’0© ¡óÊœGQLUGÊahVÙ+âv;GaFV3‚„²+¬êŠ"Ç{ý°qv†ë‹kY8 i È &” Òe“Í™[|Û W>'l»˜/C"mº.¤s ²ñeÐ$'ð™8a’H¦™Á½þS¶C޹2„Ñ‘Uãšk·_=šVó¶ôT4OE’·¥³s³p€óŒµ›”37&k $?IRESwÜ@„whÏŸÁIæ)\2  k$ej: Ô ¾éVØ`›´jÎ`V!ž•½1‘.yª6ˆð·UË7ˆÄÁŠjL̲1’© ÐÒ¹w5 ddhv½ø8#¶äúQÉEQ’iQtŒ4_#çË·èDSÓGÜVxÚ›c¦:vffmo½ ôb]' zƒ´ÏOç;Ë{Îo/§²qÀ+LiƒqÆÞGXPCæŽÔ fLqT*¨TU6`ÃXŒ€’¢Rh•R\ Ÿ\úì´P×™‡'åÅÂèØ œç)⬑ÊTqH´Nó‹ŒøçÃØ«Yh‹œÂ†ˆŽÎèZ,ÛAPzåÊÀ`ÁRÍY§ØéÊâ  ÔÒS¤K¥‹4¥¿ÓÆ&%Î]d½%=­¨-hV.LR¦Ií–H0ðs¡5+˜œ ª$·’(@  UCBƒNî,etôDBY„8J…E"–†È’ƒ ‚1A‚žÔ…,Ï3Kù«†’WòEM'A”ŠcAE$' ¬jD4\.„.;¨(„‘°Æ2é˜v’1pW ì¸BSÌ"L$Ä,’',*(…‘é:"ñ¤Æl½ñ˜÷^XÇÐå}á’´QÄIG"ÊYúæ“lŒJ<¨pŸX àªEîC!<£Û ú=«<柴F„–&5X"Ô£~‰CÑÂÎ…©SSª ¿ ³q󯈞 ñzˆ¸ÇgÍ+ÅHd$„F5êÍ=sR¾O8­§´!BÝedlÓ–´9ÆÔS£Ûœü`‘±·pÑðéȼ°ÜÄwQ„¤l`.¿}Ì ^%àö%Bòá20ŒF-áízQž¦³g¤"ŒH‹ Òe‚HjéjZµ¹f[é©síŽZО i sÏ ù9ôÒÀ³*ƒœâHi!T‰Ý½LÖÞ8ö2¤²a)ŽYH6—È÷¡|Àá¡- Kdrªx%!2NtÊ¢ä:½CŽ4ã6_ª×!A›yC õ¥ŠÇà h [¤ÔÛO(\´M›ÎËß ¤‹ò~Ye’c– ¥zÙu \K9Oá‹2®QvˆÁá&ˆ§.˜J¯8üMÄzAêG†ýÝ|¥´EH±™Jqi¡ŠºT¹*'T¢ïF]vƒÐ”ª—‚zTRDÂ>Ô›qp‘LUŸºÙ†ðŸš¸ e®GʦKôþ½é€,$>Ò^¿Zò +IPZ €a¯ÀZt`ÏUk §NûëC×)õ û°Wš9È©.ŒTfá¼l4BúŒâСþ€dN-D0ã…PãZ3¥Ë*À`)ñÌܬ¦]zr#Qm€~„ŽrQìÒ!”.0³§ÀŒstˆ…ªÖ)h};g½jGÓ(*j¡~Ó²3Ñ£b¥ØOŠb?9~‘.£³03QeB¯¬¯Û|ÈÑ%ˆÔrŽº1‰rÛþ Pò:Tmw—)Ž@ØŒR?PÉç­gøŸ ÐgŽ+ÊÂE¨J « àîð2‰ŒUÇ,îÚS­"’;dp €±Üõ”kÏ©"ÉÕ7#ú Ghí­ÑÌ<Ĩ­ƒ6øU^†ëžK3lÂ$?ü]ÉáB@.9pmercurial-3.7.3/tests/bundles/rename.sh0000755000175000017500000000053212676531525017533 0ustar mpmmpm00000000000000#!/bin/sh # @ 3: 'move2' # | # o 2: 'move1' # | # | o 1: 'change' # |/ # o 0: 'add' hg init copies cd copies echo a > a echo b > b echo c > c hg ci -Am add echo a >> a echo b >> b echo c >> c hg ci -m change hg up -qC 0 hg cp a d hg mv b e hg mv c f hg ci -m move1 hg mv e g hg mv f c hg ci -m move2 hg bundle -a ../renames.hg cd .. mercurial-3.7.3/tests/bundles/rebase.hg0000644000175000017500000000317712676531525017516 0ustar mpmmpm00000000000000HG10BZh91AY&SYèÿõ#]ÿþ¿ßöÿÿ÷ÿ÷ÑÝÿÿïÿÿîÿþ½ÿÿ«ÿßïþç{÷ÐûyE€`À€h4Ñ“FŒC@d†h@4C@¤ÒOTój0e6‘Â1 ™©£M2iê7¨Ò='êi¤Ä14 š 2bd23Ôƒ#A£!£@ÓÄh4ÈÄ€ÔhÐSI4ÐC@ÐÔ4Ð € F€A @@ hÐ4h£&†€È Ð4 €4h†€24ª)é꟩¤dÚhhPý! 4Ó#Ñ3DÉäM¤2i“53"™©èM¨=5=M4b=G¨zjm'¨õ¦ÓDyOPÓLÔòši²‡¤yAéŠi“6TrÎn¾G+šñ|ÞFï ñ¹ÏŒ Ø,ë~µ)ÿâ•ãÇeéwþ˜Ö`¹r0!2k`ƒƒƒ‰ë¸ÕÇžyJrÔöLó)ìÓovº8€¤”%¤–.²$L>.#%PSÃHÁÆB+ †ÅE0¡‹ŒÌj__¡´¶XY¨íËÁAA°Íé ®Â .€:˜%¦! &¹¹®]˜^½å¤Ú3e^ŒcoaJpÊJç²ô4á¶Ð­ºV³×ž—½‘IŠHAÌT2ù£ìsknœT£i^U¤HÁŸ­\@jOÀk#¦îf¢¢¯âM/{Vžu2sšúŽŒKØ'tZ¸uåv¶Ø“hÔXž%—MªÐé—´ˆ¿¬ "À5RK+þSt€…Ðjë”$Ð šf‘L(z3?K6ŠáƒUh ŽˆG¬"TØe,,4Ò‰…T†aop‹`ä±.`bdžÁôKÍô…#ÃQíøõ'°Ã#)©§ÈNÔUXf¬¬h.XRWK-°§-V_„ÉT|. ‘UÔŒTÄÆúÙª$É×›gŽyŸnÓ{Se\AqÇ¢åg>k°µ â;åA*¹zˆù¡)оa/RRPƒŒ8b›…”·Ý›˜¦ZÌÁjÍ ò««32„«Té5é*ÓQ>ëÄ%slÑ섦ufÌl!qïУ'|„HtnÅ…é~³ÙJbç6èAÍ¢^µÍË´Z ÌšÄtªßwÊ? J É.³œBÚÛÖ Ñ!c ¤Ú0Ã4©wKI°Ä \*³¸_,áUo>>ܾ[3ÕsIϳ”kF¨űæ dsœá® wèKÆÇY,,}e ÌÆ¾…hú2c4|z-†Ý'Sá©I½Ö»…1¢ nwÿ’Ò˜W²šú…>yºåÀI-õè œ!Þ©nŠ& ´p-K‘¡Û‘bÁ±cè«$É ±@ÄE‚¶sk÷.?8“·—[z­oMÙ•Xýíoy&ùIп `u)®"qk ö<b¨¡'‘ImjŽ ”%>šÎ±Þz`Ѧ›Bª°R—ˆ¬1¤Æ× ¹s£d´E°`5/õK°y9˜•6I¨ `z‰,c-2‹I÷Qwñ9 îCj×êŽ*´ë[ ¯UI,”¶œ½}€ŽGíì­5IÍ“zz(e¨(,ükRºÚRJVÅ1ty`³ {I³ÙcwX2¸÷“ýÂ68+h¹Oà=h3†\hxy ª‡iÆ¿Muÿ-[1ˆæmÖí+µATOɆ›¢Œ`Çh!>»€Â„Ð^öv£LW:á{ùô·4a—¹[¡ãm×,obd+W4áÚÙÞý÷±IO‚QyQ{wä—Žµ$ÂÐ?Ë{tf{v`6ÍóìÈcä^àÚ¥C”LšX°ïÒž^"bt¯<6æ¦*eN]l8á2²˜}êPšõCgeÁÏcš ²ü4G*´¹xgjÎá¥i¸I»-Š˜Òç¹i—ùNc¼.£’W%êfú&5± §ú“¸½‹ÌN@R¥<™Üô†Ò§ÓKœaÌH}7ÀÞ΄‡ï ¸FàX7‚t‰ñã‡ý¶ä÷~÷*™àž ààþis¤Ñ=Áem-:1²×d„‡ÝÑÎ$wã/“5«0åW"èÐ^gf¶]âYÔäÝ|Š`¢u ¹ñi$€|¦ñ}¡òëÌîå­)Óƒ˜ÿÅÜ‘N$:?ýHÀmercurial-3.7.3/tests/bundles/remote.sh0000755000175000017500000000101012676531525017547 0ustar mpmmpm00000000000000#!/usr/bin/env bash hg init remote cd remote echo "0" >> afile hg add afile hg commit -m "0.0" echo "1" >> afile hg commit -m "0.1" echo "2" >> afile hg commit -m "0.2" echo "3" >> afile hg commit -m "0.3" hg update -C 0 echo "1" >> afile hg commit -m "1.1" echo "2" >> afile hg commit -m "1.2" echo "a line" > fred echo "3" >> afile hg add fred hg commit -m "1.3" hg mv afile adifferentfile hg commit -m "1.3m" hg update -C 3 hg mv afile anotherfile hg commit -m "0.3m" hg bundle -a ../remote.hg cd .. rm -Rf remote mercurial-3.7.3/tests/bundles/renames.hg0000644000175000017500000000310012676531525017671 0ustar mpmmpm00000000000000HG10BZh91AY&SYk—ÀkIÿÿ¿ýSþ?ÿÿ÷ÿÿÿ¿ÿÿúõý~ÿû¿{½¶}»ÿóÐø¨€ Ø RI¨ÓQ¼¤Ù¨ô'©Tõ=OjiƒF)úš˜ÐÈOS#&&h= 2hž€&O@4Ú2˜ô¦Ð#z§µ=#M4e=©ˆe?T@@hMA¡ ¦€È2 €È†¦š F@¤4L&Šd hššdêÔÈ@h¥24 €@Ð44€L€#@ €Ó@Ðhhi 2 †F 2¡ i¦€h€d‘À¸Iç8^!G³3>QjÉ .ËêQ澫çá´ÚœÔ ç:’´©AeÚ’ŽO•;Œ Í”¶¡â@¸ý²Ì²Â’QäRŽS¥HÒi̶g•¯±"ÀD@„%ÁFBH’*"‚ 4$CDM¾dP‰ $ØÁ‰€1)$Ù‰ •Ü€ÒEŒœySg‘£¶ô"ôW ÃˆØÍ׌ô+n²•7@ò[ t•$zdÌ LV'â+!AÐꊭ,J—H’mpGtæ¢Y\ œ º¾•lu Àój7h±”‰ýøzIÙMzÞ´•pŸ¥Ü&#Y¤xEH1*»¶¾¤Âí2 `OZ?4‰¾uPf †˜§ƒfÕàfyÊ¡ Õç´½õ™HÙ)6ÐIi}að—(Aé¤'?¸NT²¡‘}§1bDdh2¨9öÃÃJ·YP©¡âa´=¦Qu&$BŠ£1 ”VÒP­H¹âs‰Æp ÂPNª+–r6:%…‰2,ZF«oÒ„f›H¼ZbÑ‚á•P¨P ”ÕÐû, @Áà‘>‘À&o ¨:J¤Ew4,L(-©ÀÙ["(€|ñLMÓPPpc„ikƒ’ÒUf(ZÚ°šƒh ¬-‹&Ôd•*'!Qi„Ž‘˜I05% òuk QW˜&ñÑaîö ‚ªâ¡"¸[ ºC„h) 9а&±7-&~¤X$>pEÇYŠÛ:ªPi §Vq9¼ØL`¥"RsÕIg N„ jû‘NGäf0—*CŒÔ\µ©ÚÆ ‹N«Æ>š6¦W†²V^ŸÁ–ãÛúÀ• “÷Y” tWT§ß->l¤c4 F•E¥‘=—JrÕïVÂëÎÄ"1û} ëJŽŸ~hPqÚ’·ª ÂpÒéE PÍÀX.j§DÅ.¬Š-¤ì C“ˆ¢Ã•rUFPzP÷<£}(PpmXuCÒÖ±¼^€„_€=.:‘]tám‰À«™ˆ(b+‚@œÃD]@Du…šs kJ°´Ò clA\‹{Ä Â+`¢ððÅ- [UΖñY"álÊ r!;…áA®ÓúâÀC7P~fdU€@‘BDgVY1¼fŠ0 ÂhJUbØÒdrI¥9 ›‚CÊ*Í%¬¤bTvx¤DTh, eÄ—ØöB»nh Si\9Ü$6ŸïFL£&ÞI ¶±}vJßEäûðËl!‹ÅêõU610 >ó|c~¢•ѱÇ%Ñ}EHP)N=ÿ¹íYØWcÌ!Jˆ5“sbJôØ‹!`ÍÖl&„`ÝèpRôÚÕ}$ŽF Svˆ—ûkŸ"”×Å\tTHí©L$ŸL¦Q³†³CšºkQè³Uýóú CXÔîÚÔ–°-¬/`EE±BJÝÒrC_6h¥ç|…š$©2šX_°oÛ’àܘT½ë‹á{ùßÒóÉR³yd(±ëŠ2p¬šª„.èµÒÑù¡8Uçò•jv”{/¹M¶«Xà„^7p,8t°“ÚË¿mcqeEÔ=6àðè~ÖžÒææ÷Óë¿l+Ýr·ýTç ·)±•>Šqå…úÑì…cÿ¾â*:ÄxáÓ% ¿vÌÁÏ8Ì(b!æH`d\'Ý‹Ôe”5Èþ.äŠp¡ ×/€Ömercurial-3.7.3/tests/bundles/issue4041.hg0000644000175000017500000000425012676531525017707 0ustar mpmmpm00000000000000HG10BZh91AY&SYª©3hÿÿÿÿÿÿÿÿÿÿÿÿûÿÿÿÿÿÿwßÿwßgÿoÿÿÿÿôoàñ@ Ù`P¤€EU=$ž£É4Ñêm#LÊb=O)êz¡é¦“OBbCµ44õ‘µB2cQ€4˜dŒL ”Ú#OIê ÊfÓõCF‡¦M6ŒSÈ µD 4h@44 4#&€h€È“Q™ @¡hd1¤4¨Ð€ƒC@ h †€Ð h ÈÉ @@¡ 4C@@22h’%0©ì™I“ƒ4ÔÄiúB6¦É=FjSM6“@É¡šš Ój ©‰„zõýSjdØHÑêb0™ ˜F@ÓM“F›S&€ý(Ó4q.“9ÑùÿžÏs²D|¾Q>ñ–bœ§1¬XìÔ©•‚)Œ‡›ÁF"+¹bmºòÀð =^[þ^8ymåkY Ðò08&Ì©çÖ-“QNÐOÏÐ^äT‘†L̘;Œ–Q‡:®C‰ BòFfűfªðÈ‘Á27£€”ÂCØ ¤ÉnÄÀŒ’-רRÛÍ“JµƒÌÕruÑ p’~¹¸ß-D.–ƒ%‡®æy¼µþxƤ'9² €Á˜3æ8šñ³÷èÒºÙ´Óo+6¦!·ÅV„Ÿ%Í¡ÁAGÈœ‘bÂÛŒ¿5Ê3Bv§íýýaw32t…Öø&ɈúEãþ¢Äe‘Õ£ ²ñM¹²“ › ¦2yð‘aƒGØ,èA¤/é«×¨=岤°¦Ä8¦PaöÁ¡ñ!kNcììâMZ9™™:ÈtÓ¢EO€‚ÁCBO&1Ë\§µ"úÊTyˆ˜Q—YZÑö!¿VÈ䋬GŠX‡ z銴$‹°;¶Dà øï‰jVÁ qÏŠ˜]Ï %J{3œ9œ†s‡ 3º\ÞôÃHƒUt–O’:&Íã©i¡¹¡3´r8d–D & y¦-šðj™#‹³ÏÌøŒxˆ‚… l[jeR™@4Ê”L€‘#b€‡6jcÂì3È3A™Õ "!q÷o0Ñ3fºÚi RŠàú’!%5­JXbM¶ü²ÀÓ´)„'5 L(DSÎéÅÛâzBäÓ™â+¥Ñ„Ó^•ÀYpYR2˜©0&:)œMqML´<ì²ËVPŠXD£hTCZµ‰˜¶¬Y ,ÇNˆ·jp—ˆ ¢ièÉŽ cÚ&ÒÅT'\¯"€÷…Îå$ð²Ï +h[ì®%Pj”ÃS\pSôGX¹eÞÑTú3†UÈ‹¢©Å*eÉ7‹ÑŠ™jôÄ!ÁB€a¶=K—ÜYPÉRŠŸDó-H šB´=ÌdäQ0 íu+Rë Räãm.däM‹|ZÙN´ÁfÔ…‘E"SÚ²˜{e"J¦zšœÉ$b›ÕTú¡ ©)RPp˜R•¶¨N³x\¦™S-‚€Ð—fAn%¹€<ÒúA`ücÂèŠøy#æXè1 Á°£ÃFÆdƒ¥{Íx!N<Äÿ7l›î%6s"˜Ð¹×à⬈Exbø~î©syIn‰rÉÊsNûs|«‡æ„Þ Èä­hÜ2ådN5±m‘á'B(W"öȹ#ÁKGs’ !‡üèâÚÿ]ŠÄ¶a~Óפ^ŒÞs‡—Žƒ!2NA󯃳Œ1,=·9iÖñF<~TâÉBjMë„g‹–t¸+ÎÙ¥èjSgIä´©†d[³>å°Ùz …\Ù?'5% ¼Vkƒè%CŽEgÿàÊNËGX’“0-U“Þ:G‚Æl|N°hï—pÿU‘BY–Ôg/ü¦ï…¥±U,PP ° ÛPý0Ë„YŸÄÖpS¯oŽ3ES®J4Æú°o¼B\Ä{¯%Àñ‹?¿ÙjÃIcrâR¦(/äpZb‚îχnê\)¼6…CšKdÎhK»saÚ}BÕšUt´k®É“¢Æ"b.ÊãZÙÍÒ5ëC‚Ê& ëffå “à6d´V#m QG>ãŠETЧ—Ø+Ú(Þvdfÿ;ª,øØd½Ó€5»§7CgŠ \eYÚN7)†A­*½ÿ7rÌa臠œõ²:-;޲úÆ~ÛØ<œËIgµ¼ÿÖØoõ¯D";œ®5«œt?¥s›b3BnÝs¥Ô ; éLˆ`Ê·=¡J¬“˜áî ›®® f¸Ï–áFãB6¬ë`ïYáˆÇŸø»’)„…P5I˜mercurial-3.7.3/tests/bundles/test-merge-symlinks.hg0000644000175000017500000000166312676531525022176 0ustar mpmmpm00000000000000HG10BZh91AY&SYn¿pU.ÿÿÿ™ã¿^½çZOßÿÿÿþ;ýÁ§]¾ö¯×Øãù®3ûbÀ»©ÁG2ƒRi  Fš= h 4  €h2=!¡ÄÆ‚bÔÍ@4 Ð 4€Ð¦@!4Ñêi C@h$¤‰=A¦j2lšLP4zi SMµhõ=M ÓODhÄÓj21=L˜ƒMš R{½¡ÉV8òéKêBw½mׯŠ!I@AL('@‘t¹µµL[u$Þ]¿ÓöÉûöÓnDœ š@8af˜–ŒLŽÖÚTAÄ1w€€‘R0!¸„ P©ÔHCO©]_<©Èr8Ä@î?¹_]O Þ2!# $Àª3‘EM[G&«ìQçÞ‹ºç“Ù÷µpVòù”fbe¼íý;I¹^ ê&'ÐZO¹G¥d·QÀ$T®-qïðª ýxÄWˆ)Ví Ø©Â/‘ì½¼æúS¼‘e%žBùäl Xp\²Ï<â3J›«Ò•瘹 ‚HøØæcaJâÉ2ÓQ+ÄÚÑÃ6 c@Í€šd£\V4a1–xÏ<ƒéCÊ£°8)ìb¦C†B`¡\2˜â0© ¡<Ì!Ķë #CÈ—YePÊ« ¶½Ù X‚ ¿•õ²ŠÂÃÇSšãŠ H’Ÿb5aŠÒÔ±pÑû=û(¼QBž¸+ŠnÅD¤9ht80«ª.µu”©j]Ø• ÖHZÊØHà@¸ê:hÖFj•¡×wW¼èÈ·§—éD6”Î-)`FP AIL(!FPg@?Qí·8Rhè"œìÑ1àesî4À¹ØAZ£q±&€Açfb]ŸŠôðl·“§êš?ôy¸ÉÓ)’'[ì€ÞÖ œÅU° nd( ð«™`€Š^0› +„QäáB‚cF¡å-hà**¥JoÂ'YÚŒ5ÊÃC?Œ¢Áˆ‹õ ê2ðõžÑ竲‹ï.Y vcb%•û½@°f\/â“äÒ-b/òkMú;A¢Pç£i¨> Wªœ° aúøIp,ËÓ˘#" ÞÇ öÀ±ñƒ˜6PCD…(+°Ë„BXûœ‡Âé©§nôÚ-ƒ$]ÉáBAºýÁTmercurial-3.7.3/tests/bundles/rebase-revset.hg0000644000175000017500000000346412676531525021023 0ustar mpmmpm00000000000000HG10BZh91AY&SYJw_ÆlÿÿÿÛÿÿÿÿÿÿÿÿÿÿÿÿÿÛÿßË÷ÿÿÿýÿþîsµÐ»ÃÙ€Œ•!%H“M¨m!§¨ÐÈ4™ dhFÉ õ44hÉ£C@ ƒ@ôM€É 1†M 2=DÉ hÁ h €h £(SP44ÓMG¨4“CFšÐÉ  4 ÓMƒ@€ @  š€ ¡¡ Ñ €ÐÁ%IOzžSA4bS@ÔÈ@hé@441ÐÐh4h€Ó@Ôƒ&‡˜æ3ºÞoG–ËîÎéÚN{Ú÷´3Å/Íškw¦ãÜä¦8]¿)žÐy€›•ÀÂg€†Ft- CFÀ±‹&N°ÃV°µ"`žÈH=zöC , P jîÚCÅ:fÓ,À:®@BÖbS, (V_ŸÙlå—ö‰2Â@³lŒ ¼TI²»dZ É0X†â#ØÐŽÃ¦z‘- Si”興Š|®.EË™lIRe–­Zµk©z}«°ôáÙÿ9áku\‘Ì"`‡ÖÓÆ‡ ª—û¢RÎ"tL –ÅTnKê9Šœ*o[ÑŠL‘žC½€X̪Ÿ™^ý(\ñ-Ñ={f,Š‚“mk«À·bíqÂÌS”2ægáW&i…BxÈ‚EÈUÅ®3‚¹a ‰òìüL÷à'ÝŠrIªÊÎ ¬ŒCP.7W[ý™(Æ­iEò­;»y…@XEÆdÂM­öÜÓÍu(ÈLí5IQ ;ž-¬DNO"[óF0ý²)'¦VÒWÇ£KÔJAbUºÛ} ñŠÉU0ù*dïz‰=Ùk@sºÆcJǺu>XšEŠÕ  êd£•Ë]‘ÍséáZßz oÀ»g[äB0diJY$¹ƒ¬ƒÔOSøÔéµH{™À0ÇÊ ô <­šoÚz²Œ­ÌsT)x¢kÍ‹SzôÞùX½+XÀ0ò‰ÐiŸðlNsr0Ö@¨Õ÷0¢µªòâä䟜eï̲íÇÃü˜%A•°¯ÍËd*Áæ[CpÂõû›žUƒ¤EB‰ºûôs—Õ3ÄSš46‰E*ÑÙˆºÚ ×ãÐÀè b'Kth›O¢¬/̃![yC`opbáE³sC†Z"m1ku-#ËîC¡@©?/J”‰ênœYpsGý2 È’ˆxdƒŸõDÒ8H‡¼·ÊM«A8M ÜÂÃí2xM¶±n»%Xúœ’qwÒaÊ;H6匨A‹Á+K»•%ûÎ;/Äp}ᡲ̰NîÐáŸEPM¬4Y™î1Ñ6+íAƒÕþ9pMAÊmÑÙE@,ôA€0Tx‹ÁêÎÞÄâòLî 'z/dXy‘4/ ?-{ˆuHkEN ƒ|¢CBˆFÑA 耗"¶(¸ø3*¦¡(ô8%ä¶F¢x?ʈ[Q\ÅC ¬ L?]C<*¤C*£H%X“N£ûÂcâ(àÏ̘«>%à—Ä:#é(‚ÚE¿Ð´Ê¢ˆ5V’[h/ñÁÐFdê?¬ñn¿âîH§  NëøÀmercurial-3.7.3/tests/test-obsolete-tag-cache.t0000644000175000017500000000735112676531525021067 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH << EOF > [extensions] > blackbox= > rebase= > mock=$TESTDIR/mockblackbox.py > > [experimental] > evolution = createmarkers > EOF Create a repo with some tags $ hg init repo $ cd repo $ echo initial > foo $ hg -q commit -A -m initial $ hg tag -m 'test tag' test1 $ echo first > first $ hg -q commit -A -m first $ hg tag -m 'test2 tag' test2 $ hg -q up -r 0 $ echo newhead > newhead $ hg commit -A -m newhead adding newhead created new head $ hg tag -m 'test head 2 tag' head2 $ hg log -G -T '{rev}:{node|short} {tags} {desc}\n' @ 5:2942a772f72a tip test head 2 tag | o 4:042eb6bfcc49 head2 newhead | | o 3:c3cb30f2d2cd test2 tag | | | o 2:d75775ffbc6b test2 first | | | o 1:5f97d42da03f test tag |/ o 0:55482a6fb4b1 test1 initial Trigger tags cache population by doing something that accesses tags info $ hg tags tip 5:2942a772f72a head2 4:042eb6bfcc49 test2 2:d75775ffbc6b test1 0:55482a6fb4b1 $ cat .hg/cache/tags2-visible 5 2942a772f72a444bef4bef13874d515f50fa27b6 042eb6bfcc4909bad84a1cbf6eb1ddf0ab587d41 head2 55482a6fb4b1881fa8f746fd52cf6f096bb21c89 test1 d75775ffbc6bca1794d300f5571272879bd280da test2 Hiding a non-tip changeset should change filtered hash and cause tags recompute $ hg debugobsolete -d '0 0' c3cb30f2d2cd0aae008cc91a07876e3c5131fd22 -u dummyuser $ hg tags tip 5:2942a772f72a head2 4:042eb6bfcc49 test1 0:55482a6fb4b1 $ cat .hg/cache/tags2-visible 5 2942a772f72a444bef4bef13874d515f50fa27b6 f34fbc9a9769ba9eff5aff3d008a6b49f85c08b1 042eb6bfcc4909bad84a1cbf6eb1ddf0ab587d41 head2 55482a6fb4b1881fa8f746fd52cf6f096bb21c89 test1 $ hg blackbox -l 4 1970/01/01 00:00:00 bob (*)> tags (glob) 1970/01/01 00:00:00 bob (*)> 2/2 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 2 tags (glob) 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) Hiding another changeset should cause the filtered hash to change $ hg debugobsolete -d '0 0' d75775ffbc6bca1794d300f5571272879bd280da -u dummyuser $ hg debugobsolete -d '0 0' 5f97d42da03fd56f3b228b03dfe48af5c0adf75b -u dummyuser $ hg tags tip 5:2942a772f72a head2 4:042eb6bfcc49 $ cat .hg/cache/tags2-visible 5 2942a772f72a444bef4bef13874d515f50fa27b6 2fce1eec33263d08a4d04293960fc73a555230e4 042eb6bfcc4909bad84a1cbf6eb1ddf0ab587d41 head2 $ hg blackbox -l 4 1970/01/01 00:00:00 bob (*)> tags (glob) 1970/01/01 00:00:00 bob (*)> 1/1 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2-visible with 1 tags (glob) 1970/01/01 00:00:00 bob (*)> tags exited 0 after * seconds (glob) Resolving tags on an unfiltered repo writes a separate tags cache $ hg --hidden tags tip 5:2942a772f72a head2 4:042eb6bfcc49 test2 2:d75775ffbc6b test1 0:55482a6fb4b1 $ cat .hg/cache/tags2 5 2942a772f72a444bef4bef13874d515f50fa27b6 042eb6bfcc4909bad84a1cbf6eb1ddf0ab587d41 head2 55482a6fb4b1881fa8f746fd52cf6f096bb21c89 test1 d75775ffbc6bca1794d300f5571272879bd280da test2 $ hg blackbox -l 4 1970/01/01 00:00:00 bob (*)> --hidden tags (glob) 1970/01/01 00:00:00 bob (*)> 2/2 cache hits/lookups in * seconds (glob) 1970/01/01 00:00:00 bob (*)> writing .hg/cache/tags2 with 3 tags (glob) 1970/01/01 00:00:00 bob (*)> --hidden tags exited 0 after * seconds (glob) mercurial-3.7.3/tests/test-filebranch.t0000644000175000017500000000772312676531525017541 0ustar mpmmpm00000000000000This test makes sure that we don't mark a file as merged with its ancestor when we do a merge. $ cat < merge > import sys, os > print "merging for", os.path.basename(sys.argv[1]) > EOF $ HGMERGE="python ../merge"; export HGMERGE Creating base: $ hg init a $ cd a $ echo 1 > foo $ echo 1 > bar $ echo 1 > baz $ echo 1 > quux $ hg add foo bar baz quux $ hg commit -m "base" $ cd .. $ hg clone a b updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved Creating branch a: $ cd a $ echo 2a > foo $ echo 2a > bar $ hg commit -m "branch a" Creating branch b: $ cd .. $ cd b $ echo 2b > foo $ echo 2b > baz $ hg commit -m "branch b" We shouldn't have anything but n state here: $ hg debugstate --nodates | grep -v "^n" [1] Merging: $ hg pull ../a pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg merge -v resolving manifests getting bar merging foo merging for foo 1 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ echo 2m > foo $ echo 2b > baz $ echo new > quux $ hg ci -m "merge" main: we should have a merge here: $ hg debugindex --changelog rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 73 ..... 0 cdca01651b96 000000000000 000000000000 (re) 1 73 68 ..... 1 f6718a9cb7f3 cdca01651b96 000000000000 (re) 2 141 68 ..... 2 bdd988058d16 cdca01651b96 000000000000 (re) 3 209 66 ..... 3 d8a521142a3c f6718a9cb7f3 bdd988058d16 (re) log should show foo and quux changed: $ hg log -v -r tip changeset: 3:d8a521142a3c tag: tip parent: 1:f6718a9cb7f3 parent: 2:bdd988058d16 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: foo quux description: merge foo: we should have a merge here: $ hg debugindex foo rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 ..... 0 b8e02f643373 000000000000 000000000000 (re) 1 3 4 ..... 1 2ffeddde1b65 b8e02f643373 000000000000 (re) 2 7 4 ..... 2 33d1fb69067a b8e02f643373 000000000000 (re) 3 11 4 ..... 3 aa27919ee430 2ffeddde1b65 33d1fb69067a (re) bar: we should not have a merge here: $ hg debugindex bar rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 ..... 0 b8e02f643373 000000000000 000000000000 (re) 1 3 4 ..... 2 33d1fb69067a b8e02f643373 000000000000 (re) baz: we should not have a merge here: $ hg debugindex baz rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 ..... 0 b8e02f643373 000000000000 000000000000 (re) 1 3 4 ..... 1 2ffeddde1b65 b8e02f643373 000000000000 (re) quux: we should not have a merge here: $ hg debugindex quux rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 ..... 0 b8e02f643373 000000000000 000000000000 (re) 1 3 5 ..... 3 6128c0f33108 b8e02f643373 000000000000 (re) Manifest entries should match tips of all files: $ hg manifest --debug 33d1fb69067a0139622a3fa3b7ba1cdb1367972e 644 bar 2ffeddde1b65b4827f6746174a145474129fa2ce 644 baz aa27919ee4303cfd575e1fb932dd64d75aa08be4 644 foo 6128c0f33108e8cfbb4e0824d13ae48b466d7280 644 quux Everything should be clean now: $ hg status $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 4 changesets, 10 total revisions $ cd .. mercurial-3.7.3/tests/README0000644000175000017500000000021512676531525015147 0ustar mpmmpm00000000000000To run the tests, do: cd tests/ python run-tests.py See https://mercurial-scm.org/wiki/WritingTests for more information on writing tests. mercurial-3.7.3/tests/test-hgweb-bundle.t0000644000175000017500000000141412676531525017776 0ustar mpmmpm00000000000000#require serve $ hg init server $ cd server $ cat >> .hg/hgrc << EOF > [extensions] > strip= > EOF $ echo 1 > foo $ hg commit -A -m 'first' adding foo $ echo 2 > bar $ hg commit -A -m 'second' adding bar Produce a bundle to use $ hg strip -r 1 0 files updated, 0 files merged, 1 files removed, 0 files unresolved saved backup bundle to $TESTTMP/server/.hg/strip-backup/ed602e697e0f-cc9fff6a-backup.hg (glob) Serve from a bundle file $ hg serve -R .hg/strip-backup/ed602e697e0f-cc9fff6a-backup.hg -d -p $HGPORT --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS Ensure we're serving from the bundle $ (get-with-headers.py localhost:$HGPORT 'file/tip/?style=raw') 200 Script output follows -rw-r--r-- 2 bar -rw-r--r-- 2 foo mercurial-3.7.3/tests/test-http-proxy.t0000644000175000017500000001643112676531525017576 0ustar mpmmpm00000000000000#require serve $ cat << EOF >> $HGRCPATH > [experimental] > # drop me once bundle2 is the default, > # added to get test change early. > bundle2-exp = True > EOF $ hg init a $ cd a $ echo a > a $ hg ci -Ama -d '1123456789 0' adding a $ hg --config server.uncompressed=True serve -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ cd .. $ tinyproxy.py $HGPORT1 localhost >proxy.log 2>&1 > $DAEMON_PIDS url for proxy, stream $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone --uncompressed http://localhost:$HGPORT/ b streaming all changes 3 files to transfer, 303 bytes of data transferred * bytes in * seconds (*/sec) (glob) searching for changes no changes found updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd b $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions $ cd .. url for proxy, pull $ http_proxy=http://localhost:$HGPORT1/ hg --config http_proxy.always=True clone http://localhost:$HGPORT/ b-pull requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd b-pull $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions $ cd .. host:port for proxy $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ c requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved proxy url with user name and password $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ d requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved url with user name and password $ http_proxy=http://user:passwd@localhost:$HGPORT1 hg clone --config http_proxy.always=True http://user:passwd@localhost:$HGPORT/ e requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved bad host:port for proxy $ http_proxy=localhost:$HGPORT2 hg clone --config http_proxy.always=True http://localhost:$HGPORT/ f abort: error: Connection refused [255] do not use the proxy if it is in the no list $ http_proxy=localhost:$HGPORT1 hg clone --config http_proxy.no=localhost http://localhost:$HGPORT/ g requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat proxy.log * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=branchmap HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=stream_out HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D83180e7845de420a1bb46896fd5fe05294f8d629 (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=83180e7845de420a1bb46896fd5fe05294f8d629&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phase%2Cbookmarks (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phase%2Cbookmarks (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phase%2Cbookmarks (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phase%2Cbookmarks (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=capabilities HTTP/1.1" - - (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=batch HTTP/1.1" - - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=getbundle HTTP/1.1" - - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=83180e7845de420a1bb46896fd5fe05294f8d629&listkeys=phase%2Cbookmarks (glob) * - - [*] "GET http://localhost:$HGPORT/?cmd=listkeys HTTP/1.1" - - x-hgarg-1:namespace=phases (glob) mercurial-3.7.3/tests/test-minirst.py.out0000644000175000017500000005066012676531525020122 0ustar mpmmpm00000000000000== paragraphs == 60 column format: ---------------------------------------------------------------------- This is some text in the first paragraph. A small indented paragraph. It is followed by some lines containing random whitespace. The third and final paragraph. ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- This is some text in the first paragraph. A small indented paragraph. It is followed by some lines containing random whitespace. The third and final paragraph. ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------

                        This is some text in the first paragraph.

                        A small indented paragraph. It is followed by some lines containing random whitespace.

                        The third and final paragraph.

                        ---------------------------------------------------------------------- == definitions == 60 column format: ---------------------------------------------------------------------- A Term Definition. The indented lines make up the definition. Another Term Another definition. The final line in the definition determines the indentation, so this will be indented with four spaces. A Nested/Indented Term Definition. ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- A Term Definition. The indented lines make up the definition. Another Term Another definition. The final line in the definition determines the indentation, so this will be indented with four spaces. A Nested/Indented Term Definition. ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------
                        A Term
                        Definition. The indented lines make up the definition.
                        Another Term
                        Another definition. The final line in the definition determines the indentation, so this will be indented with four spaces.
                        A Nested/Indented Term
                        Definition.
                        ---------------------------------------------------------------------- == literals == 60 column format: ---------------------------------------------------------------------- The fully minimized form is the most convenient form: Hello literal world In the partially minimized form a paragraph simply ends with space-double-colon. //////////////////////////////////////// long un-wrapped line in a literal block \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\ This literal block is started with '::', the so-called expanded form. The paragraph with '::' disappears in the final output. ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- The fully minimized form is the most convenient form: Hello literal world In the partially minimized form a paragraph simply ends with space-double-colon. //////////////////////////////////////// long un-wrapped line in a literal block \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\ This literal block is started with '::', the so-called expanded form. The paragraph with '::' disappears in the final output. ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------

                        The fully minimized form is the most convenient form:

                        Hello
                          literal
                            world
                        

                        In the partially minimized form a paragraph simply ends with space-double-colon.

                        ////////////////////////////////////////
                        long un-wrapped line in a literal block
                        \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
                        
                        This literal block is started with '::',
                          the so-called expanded form. The paragraph
                            with '::' disappears in the final output.
                        
                        ---------------------------------------------------------------------- == lists == 60 column format: ---------------------------------------------------------------------- - This is the first list item. Second paragraph in the first list item. - List items need not be separated by a blank line. - And will be rendered without one in any case. We can have indented lists: - This is an indented list item - Another indented list item: - A literal block in the middle of an indented list. (The above is not a list item since we are in the literal block.) Literal block with no indentation (apart from the two spaces added to all literal blocks). 1. This is an enumerated list (first item). 2. Continuing with the second item. (1) foo (2) bar 1) Another 2) List Line blocks are also a form of list: This is the first line. The line continues here. This is the second line. ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- - This is the first list item. Second paragraph in the first list item. - List items need not be separated by a blank line. - And will be rendered without one in any case. We can have indented lists: - This is an indented list item - Another indented list item: - A literal block in the middle of an indented list. (The above is not a list item since we are in the literal block.) Literal block with no indentation (apart from the two spaces added to all literal blocks). 1. This is an enumerated list (first item). 2. Continuing with the second item. (1) foo (2) bar 1) Another 2) List Line blocks are also a form of list: This is the first line. The line continues here. This is the second line. ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------
                        • This is the first list item.

                          Second paragraph in the first list item.

                        • List items need not be separated by a blank line.
                        • And will be rendered without one in any case.

                        We can have indented lists:

                        • This is an indented list item
                        • Another indented list item:
                          - A literal block in the middle
                                of an indented list.
                          
                          (The above is not a list item since we are in the literal block.)
                          
                        Literal block with no indentation (apart from
                        the two spaces added to all literal blocks).
                        
                        1. This is an enumerated list (first item).
                        2. Continuing with the second item.
                        3. foo
                        4. bar
                        5. Another
                        6. List

                        Line blocks are also a form of list:

                        1. This is the first line. The line continues here.
                        2. This is the second line.
                        ---------------------------------------------------------------------- == options == 60 column format: ---------------------------------------------------------------------- There is support for simple option lists, but only with long options: -X --exclude filter an option with a short and long option with an argument -I --include an option with both a short option and a long option --all Output all. --both Output both (this description is quite long). --long Output all day long. --par This option has two paragraphs in its description. This is the first. This is the second. Blank lines may be omitted between options (as above) or left in (as here). The next paragraph looks like an option list, but lacks the two-space marker after the option. It is treated as a normal paragraph: --foo bar baz ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- There is support for simple option lists, but only with long options: -X --exclude filter an option with a short and long option with an argumen t -I --include an option with both a short option and a long option --all Output all. --both Output both (this d escript ion is quite long). --long Output all day long. --par This option has two paragra phs in its des criptio n. This is the first. This is the second. Blank lines may be omitted between options (as above) or left in (as here). The next paragraph looks like an option list, but lacks the two-space marker after the option. It is treated as a normal paragraph: --foo bar baz ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------

                        There is support for simple option lists, but only with long options:

                        -X --exclude filter
                        an option with a short and long option with an argument
                        -I --include
                        an option with both a short option and a long option
                        --all
                        Output all.
                        --both
                        Output both (this description is quite long).
                        --long
                        Output all day long.
                        --par
                        This option has two paragraphs in its description. This is the first.

                        This is the second. Blank lines may be omitted between options (as above) or left in (as here).

                        The next paragraph looks like an option list, but lacks the two-space marker after the option. It is treated as a normal paragraph:

                        --foo bar baz

                        ---------------------------------------------------------------------- == fields == 60 column format: ---------------------------------------------------------------------- a First item. ab Second item. Indentation and wrapping is handled automatically. Next list: small The larger key below triggers full indentation here. much too large This key is big enough to get its own line. ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- a First item. ab Second item. Indentation and wrapping is handled automatically. Next list: small The larger key below triggers full indentation here. much too large This key is big enough to get its own line. ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------
                        a
                        First item.
                        ab
                        Second item. Indentation and wrapping is handled automatically.

                        Next list:

                        small
                        The larger key below triggers full indentation here.
                        much too large
                        This key is big enough to get its own line.
                        ---------------------------------------------------------------------- == containers (normal) == 60 column format: ---------------------------------------------------------------------- Normal output. ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- Normal output. ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------

                        Normal output.

                        ---------------------------------------------------------------------- == containers (verbose) == 60 column format: ---------------------------------------------------------------------- Normal output. Verbose output. ---------------------------------------------------------------------- ['debug', 'debug'] ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- Normal output. Verbose output. ---------------------------------------------------------------------- ['debug', 'debug'] ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------

                        Normal output.

                        Verbose output.

                        ---------------------------------------------------------------------- ['debug', 'debug'] ---------------------------------------------------------------------- == containers (debug) == 60 column format: ---------------------------------------------------------------------- Normal output. Initial debug output. ---------------------------------------------------------------------- ['verbose'] ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- Normal output. Initial debug output. ---------------------------------------------------------------------- ['verbose'] ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------

                        Normal output.

                        Initial debug output.

                        ---------------------------------------------------------------------- ['verbose'] ---------------------------------------------------------------------- == containers (verbose debug) == 60 column format: ---------------------------------------------------------------------- Normal output. Initial debug output. Verbose output. Debug output. ---------------------------------------------------------------------- [] ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- Normal output. Initial debug output. Verbose output. Debug output. ---------------------------------------------------------------------- [] ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------

                        Normal output.

                        Initial debug output.

                        Verbose output.

                        Debug output.

                        ---------------------------------------------------------------------- [] ---------------------------------------------------------------------- == roles == 60 column format: ---------------------------------------------------------------------- Please see 'hg add'. ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- Please see 'hg add'. ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------

                        Please see 'hg add'.

                        ---------------------------------------------------------------------- == sections == 60 column format: ---------------------------------------------------------------------- Title ===== Section ------- Subsection '''''''''' Markup: "foo" and 'hg help' --------------------------- ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- Title ===== Section ------- Subsection '''''''''' Markup: "foo" and 'hg help' --------------------------- ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------

                        Title

                        Section

                        Subsection

                        Markup: "foo" and 'hg help'

                        ---------------------------------------------------------------------- == admonitions == 60 column format: ---------------------------------------------------------------------- Note: This is a note - Bullet 1 - Bullet 2 Warning! This is a warning Second input line of warning !Danger! This is danger ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- Note: This is a note - Bullet 1 - Bullet 2 Warning! This is a warning Second input line of warning !Danger! This is danger ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------

                        Note:

                        This is a note

                        • Bullet 1
                        • Bullet 2

                        Warning! This is a warning Second input line of warning

                        !Danger! This is danger

                        ---------------------------------------------------------------------- == comments == 60 column format: ---------------------------------------------------------------------- Some text. Some indented text. Empty comment above ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- Some text. Some indented text. Empty comment above ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------

                        Some text.

                        Some indented text.

                        Empty comment above

                        ---------------------------------------------------------------------- === === ======================================== a b c === === ======================================== 1 2 3 foo bar baz this list is very very very long man === === ======================================== == table == 60 column format: ---------------------------------------------------------------------- a b c ------------------------------------------------ 1 2 3 foo bar baz this list is very very very long man ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- a b c ------------------------------ 1 2 3 foo bar baz this list is very very very long man ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------
                        a b c
                        1 2 3
                        foo bar baz this list is very very very long man
                        ---------------------------------------------------------------------- = ==== ====================================== s long line goes on here xy tried to fix here by indenting = ==== ====================================== == table+nl == 60 column format: ---------------------------------------------------------------------- s long line goes on here xy tried to fix here by indenting ---------------------------------------------------------------------- 30 column format: ---------------------------------------------------------------------- s long line goes on here xy tried to fix here by indenting ---------------------------------------------------------------------- html format: ----------------------------------------------------------------------
                        s long line goes on here
                        xy tried to fix here by indenting
                        ---------------------------------------------------------------------- mercurial-3.7.3/tests/test-rebuildstate.t0000644000175000017500000000554512676531525020133 0ustar mpmmpm00000000000000 $ cat > adddrop.py < from mercurial import cmdutil > cmdtable = {} > command = cmdutil.command(cmdtable) > @command('debugadddrop', > [('', 'drop', False, 'drop file from dirstate', 'FILE'), > ('', 'normal-lookup', False, 'add file to dirstate', 'FILE')], > 'hg debugadddrop') > def debugadddrop(ui, repo, *pats, **opts): > '''Add or drop unnamed arguments to or from the dirstate''' > drop = opts.get('drop') > nl = opts.get('normal_lookup') > if nl and drop: > raise error.Abort('drop and normal-lookup are mutually exclusive') > wlock = repo.wlock() > try: > for file in pats: > if opts.get('normal_lookup'): > repo.dirstate.normallookup(file) > else: > repo.dirstate.drop(file) > > repo.dirstate.write(repo.currenttransaction()) > finally: > wlock.release() > EOF $ echo "[extensions]" >> $HGRCPATH $ echo "debugadddrop=`pwd`/adddrop.py" >> $HGRCPATH basic test for hg debugrebuildstate $ hg init repo $ cd repo $ touch foo bar $ hg ci -Am 'add foo bar' adding bar adding foo $ touch baz $ hg add baz $ hg rm bar $ hg debugrebuildstate state dump after $ hg debugstate --nodates | sort n 644 -1 set bar n 644 -1 set foo $ hg debugadddrop --normal-lookup file1 file2 $ hg debugadddrop --drop bar $ hg debugadddrop --drop $ hg debugstate --nodates n 0 -1 unset file1 n 0 -1 unset file2 n 644 -1 set foo $ hg debugrebuildstate status $ hg st -A ! bar ? baz C foo Test debugdirstate --minimal where a file is not in parent manifest but in the dirstate $ touch foo bar qux $ hg add qux $ hg remove bar $ hg status -A A qux R bar ? baz C foo $ hg debugadddrop --normal-lookup baz $ hg debugdirstate --nodates r 0 0 * bar (glob) n 0 -1 * baz (glob) n 644 0 * foo (glob) a 0 -1 * qux (glob) $ hg debugrebuilddirstate --minimal $ hg debugdirstate --nodates r 0 0 * bar (glob) n 644 0 * foo (glob) a 0 -1 * qux (glob) $ hg status -A A qux R bar ? baz C foo Test debugdirstate --minimal where file is in the parent manifest but not the dirstate $ hg manifest bar foo $ hg status -A A qux R bar ? baz C foo $ hg debugdirstate --nodates r 0 0 * bar (glob) n 644 0 * foo (glob) a 0 -1 * qux (glob) $ hg debugadddrop --drop foo $ hg debugdirstate --nodates r 0 0 * bar (glob) a 0 -1 * qux (glob) $ hg debugrebuilddirstate --minimal $ hg debugdirstate --nodates r 0 0 * bar (glob) n 644 -1 * foo (glob) a 0 -1 * qux (glob) $ hg status -A A qux R bar ? baz C foo mercurial-3.7.3/tests/test-requires.t0000644000175000017500000000436712676531525017304 0ustar mpmmpm00000000000000 $ hg init t $ cd t $ echo a > a $ hg add a $ hg commit -m test $ rm .hg/requires $ hg tip abort: index 00changelog.i unknown format 2! [255] $ echo indoor-pool > .hg/requires $ hg tip abort: repository requires features unknown to this Mercurial: indoor-pool! (see https://mercurial-scm.org/wiki/MissingRequirement for more information) [255] $ echo outdoor-pool >> .hg/requires $ hg tip abort: repository requires features unknown to this Mercurial: indoor-pool outdoor-pool! (see https://mercurial-scm.org/wiki/MissingRequirement for more information) [255] $ cd .. Test checking between features supported locally and ones required in another repository of push/pull/clone on localhost: $ mkdir supported-locally $ cd supported-locally $ hg init supported $ echo a > supported/a $ hg -R supported commit -Am '#0 at supported' adding a $ echo 'featuresetup-test' >> supported/.hg/requires $ cat > $TESTTMP/supported-locally/supportlocally.py < from mercurial import localrepo, extensions > def featuresetup(ui, supported): > for name, module in extensions.extensions(ui): > if __name__ == module.__name__: > # support specific feature locally > supported |= set(['featuresetup-test']) > return > def uisetup(ui): > localrepo.localrepository.featuresetupfuncs.add(featuresetup) > EOF $ cat > supported/.hg/hgrc < [extensions] > # enable extension locally > supportlocally = $TESTTMP/supported-locally/supportlocally.py > EOF $ hg -R supported status $ hg init push-dst $ hg -R supported push push-dst pushing to push-dst abort: required features are not supported in the destination: featuresetup-test [255] $ hg init pull-src $ hg -R pull-src pull supported pulling from supported abort: required features are not supported in the destination: featuresetup-test [255] $ hg clone supported clone-dst abort: repository requires features unknown to this Mercurial: featuresetup-test! (see https://mercurial-scm.org/wiki/MissingRequirement for more information) [255] $ hg clone --pull supported clone-dst abort: required features are not supported in the destination: featuresetup-test [255] $ cd .. mercurial-3.7.3/tests/test-diff-upgrade.t0000644000175000017500000001421412676531525017772 0ustar mpmmpm00000000000000#require execbit $ cat <> $HGRCPATH > [extensions] > autodiff = $TESTDIR/autodiff.py > [diff] > nodates = 1 > EOF $ hg init repo $ cd repo make a combination of new, changed and deleted file $ echo regular > regular $ echo rmregular > rmregular $ $PYTHON -c "file('bintoregular', 'wb').write('\0')" $ touch rmempty $ echo exec > exec $ chmod +x exec $ echo rmexec > rmexec $ chmod +x rmexec $ echo setexec > setexec $ echo unsetexec > unsetexec $ chmod +x unsetexec $ echo binary > binary $ $PYTHON -c "file('rmbinary', 'wb').write('\0')" $ hg ci -Am addfiles adding binary adding bintoregular adding exec adding regular adding rmbinary adding rmempty adding rmexec adding rmregular adding setexec adding unsetexec $ echo regular >> regular $ echo newregular >> newregular $ rm rmempty $ touch newempty $ rm rmregular $ echo exec >> exec $ echo newexec > newexec $ echo bintoregular > bintoregular $ chmod +x newexec $ rm rmexec $ chmod +x setexec $ chmod -x unsetexec $ $PYTHON -c "file('binary', 'wb').write('\0\0')" $ $PYTHON -c "file('newbinary', 'wb').write('\0')" $ rm rmbinary $ hg addremove -s 0 adding newbinary adding newempty adding newexec adding newregular removing rmbinary removing rmempty removing rmexec removing rmregular git=no: regular diff for all files $ hg autodiff --git=no diff -r a66d19b9302d binary Binary file binary has changed diff -r a66d19b9302d bintoregular Binary file bintoregular has changed diff -r a66d19b9302d exec --- a/exec +++ b/exec @@ -1,1 +1,2 @@ exec +exec diff -r a66d19b9302d newbinary Binary file newbinary has changed diff -r a66d19b9302d newexec --- /dev/null +++ b/newexec @@ -0,0 +1,1 @@ +newexec diff -r a66d19b9302d newregular --- /dev/null +++ b/newregular @@ -0,0 +1,1 @@ +newregular diff -r a66d19b9302d regular --- a/regular +++ b/regular @@ -1,1 +1,2 @@ regular +regular diff -r a66d19b9302d rmbinary Binary file rmbinary has changed diff -r a66d19b9302d rmexec --- a/rmexec +++ /dev/null @@ -1,1 +0,0 @@ -rmexec diff -r a66d19b9302d rmregular --- a/rmregular +++ /dev/null @@ -1,1 +0,0 @@ -rmregular git=yes: git diff for single regular file $ hg autodiff --git=yes regular diff --git a/regular b/regular --- a/regular +++ b/regular @@ -1,1 +1,2 @@ regular +regular git=auto: regular diff for regular files and non-binary removals $ hg autodiff --git=auto regular newregular rmregular rmexec diff -r a66d19b9302d newregular --- /dev/null +++ b/newregular @@ -0,0 +1,1 @@ +newregular diff -r a66d19b9302d regular --- a/regular +++ b/regular @@ -1,1 +1,2 @@ regular +regular diff -r a66d19b9302d rmexec --- a/rmexec +++ /dev/null @@ -1,1 +0,0 @@ -rmexec diff -r a66d19b9302d rmregular --- a/rmregular +++ /dev/null @@ -1,1 +0,0 @@ -rmregular $ for f in exec newexec setexec unsetexec binary newbinary newempty rmempty rmbinary bintoregular; do > echo > echo '% git=auto: git diff for' $f > hg autodiff --git=auto $f > done % git=auto: git diff for exec diff -r a66d19b9302d exec --- a/exec +++ b/exec @@ -1,1 +1,2 @@ exec +exec % git=auto: git diff for newexec diff --git a/newexec b/newexec new file mode 100755 --- /dev/null +++ b/newexec @@ -0,0 +1,1 @@ +newexec % git=auto: git diff for setexec diff --git a/setexec b/setexec old mode 100644 new mode 100755 % git=auto: git diff for unsetexec diff --git a/unsetexec b/unsetexec old mode 100755 new mode 100644 % git=auto: git diff for binary diff --git a/binary b/binary index a9128c283485202893f5af379dd9beccb6e79486..09f370e38f498a462e1ca0faa724559b6630c04f GIT binary patch literal 2 Jc${Nk0000200961 % git=auto: git diff for newbinary diff --git a/newbinary b/newbinary new file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..f76dd238ade08917e6712764a16a22005a50573d GIT binary patch literal 1 Ic${MZ000310RR91 % git=auto: git diff for newempty diff --git a/newempty b/newempty new file mode 100644 % git=auto: git diff for rmempty diff --git a/rmempty b/rmempty deleted file mode 100644 % git=auto: git diff for rmbinary diff --git a/rmbinary b/rmbinary deleted file mode 100644 index f76dd238ade08917e6712764a16a22005a50573d..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 GIT binary patch literal 0 Hc$@ xargs "$check_code" --warnings --per-file=0 || false Skipping hgext/zeroconf/Zeroconf.py it has no-che?k-code (glob) Skipping i18n/polib.py it has no-che?k-code (glob) Skipping mercurial/httpclient/__init__.py it has no-che?k-code (glob) Skipping mercurial/httpclient/_readers.py it has no-che?k-code (glob) Skipping mercurial/httpclient/socketutil.py it has no-che?k-code (glob) mercurial-3.7.3/tests/test-duplicateoptions.py0000644000175000017500000000170412676531525021210 0ustar mpmmpm00000000000000import os from mercurial import ui, commands, extensions ignore = set(['highlight', 'win32text', 'factotum']) if os.name != 'nt': ignore.add('win32mbcs') disabled = [ext for ext in extensions.disabled().keys() if ext not in ignore] hgrc = open(os.environ["HGRCPATH"], 'w') hgrc.write('[extensions]\n') for ext in disabled: hgrc.write(ext + '=\n') hgrc.close() u = ui.ui() extensions.loadall(u) globalshort = set() globallong = set() for option in commands.globalopts: option[0] and globalshort.add(option[0]) option[1] and globallong.add(option[1]) for cmd, entry in commands.table.iteritems(): seenshort = globalshort.copy() seenlong = globallong.copy() for option in entry[1]: if (option[0] and option[0] in seenshort) or \ (option[1] and option[1] in seenlong): print "command '" + cmd + "' has duplicate option " + str(option) seenshort.add(option[0]) seenlong.add(option[1]) mercurial-3.7.3/tests/test-bad-pull.t0000644000175000017500000000066512676531525017142 0ustar mpmmpm00000000000000#require serve killdaemons #if windows $ hg clone http://localhost:$HGPORT/ copy abort: * (glob) [255] #else $ hg clone http://localhost:$HGPORT/ copy abort: error: Connection refused [255] #endif $ test -d copy [1] $ python "$TESTDIR/dumbhttp.py" -p $HGPORT --pid dumb.pid $ cat dumb.pid >> $DAEMON_PIDS $ hg clone http://localhost:$HGPORT/foo copy2 abort: HTTP Error 404: * (glob) [255] $ killdaemons.py mercurial-3.7.3/tests/test-newbranch.t0000644000175000017500000001722312676531525017407 0ustar mpmmpm00000000000000 $ branchcache=.hg/cache/branch2 $ listbranchcaches() { > for f in .hg/cache/branch2*; > do echo === $f ===; > cat $f; > done; > } $ purgebranchcaches() { > rm .hg/cache/branch2* > } $ hg init t $ cd t $ hg branches $ echo foo > a $ hg add a $ hg ci -m "initial" $ hg branch foo marked working directory as branch foo (branches are permanent and global, did you want a bookmark?) $ hg branch foo $ hg ci -m "add branch name" $ hg branch bar marked working directory as branch bar $ hg ci -m "change branch name" Branch shadowing: $ hg branch default abort: a branch of the same name already exists (use 'hg update' to switch to it) [255] $ hg branch -f default marked working directory as branch default $ hg ci -m "clear branch name" created new head There should be only one default branch head $ hg heads . changeset: 3:1c28f494dae6 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: clear branch name Merging and branches $ hg co foo 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch foo set existing branch name fails unless force - setting existing parent branch works without force: $ hg branch bar abort: a branch of the same name already exists (use 'hg update' to switch to it) [255] $ hg branch -f bar marked working directory as branch bar $ hg branch foo marked working directory as branch foo $ echo bleah > a $ hg ci -m "modify a branch" $ hg merge default 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg branch foo set existing branch name where branch head is ancestor: $ hg branch bar abort: a branch of the same name already exists (use 'hg update' to switch to it) [255] set (other) parent branch as branch name $ hg branch default marked working directory as branch default set (first) parent branch as branch name $ hg branch foo marked working directory as branch foo $ hg ci -m "merge" $ hg log -G -T '{rev}:{node|short} {branch} {desc}\n' @ 5:530046499edf foo merge |\ | o 4:adf1a74a7f7b foo modify a branch | | o | 3:1c28f494dae6 default clear branch name | | o | 2:c21617b13b22 bar change branch name |/ o 1:6c0e42da283a foo add branch name | o 0:db01e8ea3388 default initial $ hg branches foo 5:530046499edf default 3:1c28f494dae6 (inactive) bar 2:c21617b13b22 (inactive) $ hg branches -q foo default bar Test for invalid branch cache: $ hg rollback repository tip rolled back to revision 4 (undo commit) working directory now based on revisions 4 and 3 $ cp ${branchcache}-served .hg/bc-invalid $ hg log -r foo changeset: 4:adf1a74a7f7b branch: foo tag: tip parent: 1:6c0e42da283a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: modify a branch $ cp .hg/bc-invalid $branchcache $ hg --debug log -r foo changeset: 4:adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 branch: foo tag: tip phase: draft parent: 1:6c0e42da283a56b5edc5b4fadb491365ec7f5fa8 parent: -1:0000000000000000000000000000000000000000 manifest: 1:8c342a37dfba0b3d3ce073562a00d8a813c54ffe user: test date: Thu Jan 01 00:00:00 1970 +0000 files: a extra: branch=foo description: modify a branch $ purgebranchcaches $ echo corrupted > $branchcache $ hg log -qr foo 4:adf1a74a7f7b $ listbranchcaches === .hg/cache/branch2 === corrupted === .hg/cache/branch2-served === adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 4 c21617b13b220988e7a2e26290fbe4325ffa7139 o bar 1c28f494dae69a2f8fc815059d257eccf3fcfe75 o default adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 o foo Push should update the branch cache: $ hg init ../target Pushing just rev 0: $ hg push -qr 0 ../target $ (cd ../target/; listbranchcaches) === .hg/cache/branch2-base === db01e8ea3388fd3c7c94e1436ea2bd6a53d581c5 0 db01e8ea3388fd3c7c94e1436ea2bd6a53d581c5 o default Pushing everything: $ hg push -qf ../target $ (cd ../target/; listbranchcaches) === .hg/cache/branch2-base === adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 4 c21617b13b220988e7a2e26290fbe4325ffa7139 o bar 1c28f494dae69a2f8fc815059d257eccf3fcfe75 o default adf1a74a7f7b4cd193d12992f5d0d6a004ed21d6 o foo Update with no arguments: tipmost revision of the current branch: $ hg up -q -C 0 $ hg up -q $ hg id 1c28f494dae6 $ hg up -q 1 $ hg up -q $ hg id adf1a74a7f7b (foo) tip $ hg branch foobar marked working directory as branch foobar $ hg up abort: branch foobar not found [255] Fast-forward merge: $ hg branch ff marked working directory as branch ff $ echo ff > ff $ hg ci -Am'fast forward' adding ff $ hg up foo 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge ff 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg branch foo $ hg commit -m'Merge ff into foo' $ hg parents changeset: 6:185ffbfefa30 branch: foo tag: tip parent: 4:adf1a74a7f7b parent: 5:1a3c27dc5e11 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Merge ff into foo $ hg manifest a ff Test merging, add 3 default heads and one test head: $ cd .. $ hg init merges $ cd merges $ echo a > a $ hg ci -Ama adding a $ echo b > b $ hg ci -Amb adding b $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo c > c $ hg ci -Amc adding c created new head $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo d > d $ hg ci -Amd adding d created new head $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg branch test marked working directory as branch test (branches are permanent and global, did you want a bookmark?) $ echo e >> e $ hg ci -Ame adding e $ hg log changeset: 4:3a1e01ed1df4 branch: test tag: tip parent: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: e changeset: 3:980f7dc84c29 parent: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: d changeset: 2:d36c0562f908 parent: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: c changeset: 1:d2ae7f538514 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a Implicit merge with test branch as parent: $ hg merge abort: branch 'test' has one head - please merge with an explicit rev (run 'hg heads' to see all heads) [255] $ hg up -C default 1 files updated, 0 files merged, 1 files removed, 0 files unresolved Implicit merge with default branch as parent: $ hg merge abort: branch 'default' has 3 heads - please merge with an explicit rev (run 'hg heads .' to see heads) [255] 3 branch heads, explicit merge required: $ hg merge 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m merge 2 branch heads, implicit merge works: $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cd .. mercurial-3.7.3/tests/test-merge10.t0000644000175000017500000000254112676531525016675 0ustar mpmmpm00000000000000Test for changeset 9fe267f77f56ff127cf7e65dc15dd9de71ce8ceb (merge correctly when all the files in a directory are moved but then local changes are added in the same directory) $ hg init a $ cd a $ mkdir -p testdir $ echo a > testdir/a $ hg add testdir/a $ hg commit -m a $ cd .. $ hg clone a b updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd a $ echo alpha > testdir/a $ hg commit -m remote-change $ cd .. $ cd b $ mkdir testdir/subdir $ hg mv testdir/a testdir/subdir/a $ hg commit -m move $ mkdir newdir $ echo beta > newdir/beta $ hg add newdir/beta $ hg commit -m local-addition $ hg pull ../a pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up -C 2 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge merging testdir/subdir/a and testdir/a to testdir/subdir/a 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg stat M testdir/subdir/a $ hg diff --nodates diff -r bc21c9773bfa testdir/subdir/a --- a/testdir/subdir/a +++ b/testdir/subdir/a @@ -1,1 +1,1 @@ -a +alpha $ cd .. mercurial-3.7.3/tests/test-issue842.t0000644000175000017500000000132512676531525017022 0ustar mpmmpm00000000000000https://bz.mercurial-scm.org/842 $ hg init $ echo foo > a $ hg ci -Ama adding a $ hg up -r0000 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo bar > a Should issue new head warning: $ hg ci -Amb adding a created new head $ hg up -r0000 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo stuffy > a Should not issue new head warning: $ hg ci -q -Amc $ hg up -r0000 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo crap > a $ hg branch testing marked working directory as branch testing (branches are permanent and global, did you want a bookmark?) Should not issue warning: $ hg ci -q -Amd mercurial-3.7.3/tests/test-encode.t0000644000175000017500000000205012676531525016665 0ustar mpmmpm00000000000000Test encode/decode filters $ hg init $ cat > .hg/hgrc < [encode] > not.gz = tr [:lower:] [:upper:] > *.gz = gzip -d > [decode] > not.gz = tr [:upper:] [:lower:] > *.gz = gzip > EOF $ echo "this is a test" | gzip > a.gz $ echo "this is a test" > not.gz $ hg add * $ hg ci -m "test" no changes $ hg status $ touch * no changes $ hg status check contents in repo are encoded $ hg debugdata a.gz 0 this is a test $ hg debugdata not.gz 0 THIS IS A TEST check committed content was decoded $ gunzip < a.gz this is a test $ cat not.gz this is a test $ rm * $ hg co -C 2 files updated, 0 files merged, 0 files removed, 0 files unresolved check decoding of our new working dir copy $ gunzip < a.gz this is a test $ cat not.gz this is a test check hg cat operation $ hg cat a.gz this is a test $ hg cat --decode a.gz | gunzip this is a test $ mkdir subdir $ cd subdir $ hg -R .. cat ../a.gz this is a test $ hg -R .. cat --decode ../a.gz | gunzip this is a test $ cd .. mercurial-3.7.3/tests/test-glog-topological.t0000644000175000017500000000252012676531525020674 0ustar mpmmpm00000000000000This test file aims at test topological iteration and the various configuration it can has. $ cat >> $HGRCPATH << EOF > [ui] > logtemplate={rev}\n > EOF On this simple example, all topological branch are displayed in turn until we can finally display 0. this implies skipping from 8 to 3 and coming back to 7 later. $ hg init test01 $ cd test01 $ hg unbundle $TESTDIR/bundles/remote.hg adding changesets adding manifests adding file changes added 9 changesets with 7 changes to 4 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg log -G o 8 | | o 7 | | | o 6 | | | o 5 | | | o 4 | | o | 3 | | o | 2 | | o | 1 |/ o 0 (display all nodes) $ hg --config experimental.graph-group-branches=1 log -G o 8 | o 3 | o 2 | o 1 | | o 7 | | | o 6 | | | o 5 | | | o 4 |/ o 0 (revset skipping nodes) $ hg --config experimental.graph-group-branches=1 log -G --rev 'not (2+6)' o 8 | o 3 | o 1 | | o 7 | | | o 5 | | | o 4 |/ o 0 (begin) from the other branch $ hg --config experimental.graph-group-branches=1 --config experimental.graph-group-branches.firstbranch=5 log -G o 7 | o 6 | o 5 | o 4 | | o 8 | | | o 3 | | | o 2 | | | o 1 |/ o 0 mercurial-3.7.3/tests/test-simplemerge.py.out0000644000175000017500000000016312676531525020737 0ustar mpmmpm00000000000000................ ---------------------------------------------------------------------- Ran 16 tests in 0.000s OK mercurial-3.7.3/tests/test-annotate.t0000644000175000017500000002434112676531525017250 0ustar mpmmpm00000000000000 $ HGMERGE=true; export HGMERGE init $ hg init repo $ cd repo commit $ echo 'a' > a $ hg ci -A -m test -u nobody -d '1 0' adding a annotate -c $ hg annotate -c a 8435f90966e4: a annotate -cl $ hg annotate -cl a 8435f90966e4:1: a annotate -d $ hg annotate -d a Thu Jan 01 00:00:01 1970 +0000: a annotate -n $ hg annotate -n a 0: a annotate -nl $ hg annotate -nl a 0:1: a annotate -u $ hg annotate -u a nobody: a annotate -cdnu $ hg annotate -cdnu a nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000: a annotate -cdnul $ hg annotate -cdnul a nobody 0 8435f90966e4 Thu Jan 01 00:00:01 1970 +0000:1: a annotate (JSON) $ hg annotate -Tjson a [ { "line": "a\n", "rev": 0 } ] $ hg annotate -Tjson -cdfnul a [ { "date": [1.0, 0], "file": "a", "line": "a\n", "line_number": 1, "node": "8435f90966e442695d2ded29fdade2bac5ad8065", "rev": 0, "user": "nobody" } ] $ cat <>a > a > a > EOF $ hg ci -ma1 -d '1 0' $ hg cp a b $ hg ci -mb -d '1 0' $ cat <> b > b4 > b5 > b6 > EOF $ hg ci -mb2 -d '2 0' annotate -n b $ hg annotate -n b 0: a 1: a 1: a 3: b4 3: b5 3: b6 annotate --no-follow b $ hg annotate --no-follow b 2: a 2: a 2: a 3: b4 3: b5 3: b6 annotate -nl b $ hg annotate -nl b 0:1: a 1:2: a 1:3: a 3:4: b4 3:5: b5 3:6: b6 annotate -nf b $ hg annotate -nf b 0 a: a 1 a: a 1 a: a 3 b: b4 3 b: b5 3 b: b6 annotate -nlf b $ hg annotate -nlf b 0 a:1: a 1 a:2: a 1 a:3: a 3 b:4: b4 3 b:5: b5 3 b:6: b6 $ hg up -C 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat <> b > b4 > c > b5 > EOF $ hg ci -mb2.1 -d '2 0' created new head $ hg merge merging b 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -mmergeb -d '3 0' annotate after merge $ hg annotate -nf b 0 a: a 1 a: a 1 a: a 3 b: b4 4 b: c 3 b: b5 annotate after merge with -l $ hg annotate -nlf b 0 a:1: a 1 a:2: a 1 a:3: a 3 b:4: b4 4 b:5: c 3 b:5: b5 $ hg up -C 1 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg cp a b $ cat < b > a > z > a > EOF $ hg ci -mc -d '3 0' created new head $ hg merge merging b 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat <> b > b4 > c > b5 > EOF $ echo d >> b $ hg ci -mmerge2 -d '4 0' annotate after rename merge $ hg annotate -nf b 0 a: a 6 b: z 1 a: a 3 b: b4 4 b: c 3 b: b5 7 b: d annotate after rename merge with -l $ hg annotate -nlf b 0 a:1: a 6 b:2: z 1 a:3: a 3 b:4: b4 4 b:5: c 3 b:5: b5 7 b:7: d Issue2807: alignment of line numbers with -l $ echo more >> b $ hg ci -mmore -d '5 0' $ echo more >> b $ hg ci -mmore -d '6 0' $ echo more >> b $ hg ci -mmore -d '7 0' $ hg annotate -nlf b 0 a: 1: a 6 b: 2: z 1 a: 3: a 3 b: 4: b4 4 b: 5: c 3 b: 5: b5 7 b: 7: d 8 b: 8: more 9 b: 9: more 10 b:10: more linkrev vs rev $ hg annotate -r tip -n a 0: a 1: a 1: a linkrev vs rev with -l $ hg annotate -r tip -nl a 0:1: a 1:2: a 1:3: a Issue589: "undelete" sequence leads to crash annotate was crashing when trying to --follow something like A -> B -> A generate ABA rename configuration $ echo foo > foo $ hg add foo $ hg ci -m addfoo $ hg rename foo bar $ hg ci -m renamefoo $ hg rename bar foo $ hg ci -m renamebar annotate after ABA with follow $ hg annotate --follow foo foo: foo missing file $ hg ann nosuchfile abort: nosuchfile: no such file in rev e9e6b4fa872f [255] annotate file without '\n' on last line $ printf "" > c $ hg ci -A -m test -u nobody -d '1 0' adding c $ hg annotate c $ printf "a\nb" > c $ hg ci -m test $ hg annotate c [0-9]+: a (re) [0-9]+: b (re) Issue3841: check annotation of the file of which filelog includes merging between the revision and its ancestor to reproduce the situation with recent Mercurial, this script uses (1) "hg debugsetparents" to merge without ancestor check by "hg merge", and (2) the extension to allow filelog merging between the revision and its ancestor by overriding "repo._filecommit". $ cat > ../legacyrepo.py < from mercurial import node, error > def reposetup(ui, repo): > class legacyrepo(repo.__class__): > def _filecommit(self, fctx, manifest1, manifest2, > linkrev, tr, changelist): > fname = fctx.path() > text = fctx.data() > flog = self.file(fname) > fparent1 = manifest1.get(fname, node.nullid) > fparent2 = manifest2.get(fname, node.nullid) > meta = {} > copy = fctx.renamed() > if copy and copy[0] != fname: > raise error.Abort('copying is not supported') > if fparent2 != node.nullid: > changelist.append(fname) > return flog.add(text, meta, tr, linkrev, > fparent1, fparent2) > raise error.Abort('only merging is supported') > repo.__class__ = legacyrepo > EOF $ cat > baz < 1 > 2 > 3 > 4 > 5 > EOF $ hg add baz $ hg commit -m "baz:0" $ cat > baz < 1 baz:1 > 2 > 3 > 4 > 5 > EOF $ hg commit -m "baz:1" $ cat > baz < 1 baz:1 > 2 baz:2 > 3 > 4 > 5 > EOF $ hg debugsetparents 17 17 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:2" $ hg debugindexdot .hg/store/data/baz.i digraph G { -1 -> 0 0 -> 1 1 -> 2 1 -> 2 } $ hg annotate baz 17: 1 baz:1 18: 2 baz:2 16: 3 16: 4 16: 5 $ cat > baz < 1 baz:1 > 2 baz:2 > 3 baz:3 > 4 > 5 > EOF $ hg commit -m "baz:3" $ cat > baz < 1 baz:1 > 2 baz:2 > 3 baz:3 > 4 baz:4 > 5 > EOF $ hg debugsetparents 19 18 $ hg --config extensions.legacyrepo=../legacyrepo.py commit -m "baz:4" $ hg debugindexdot .hg/store/data/baz.i digraph G { -1 -> 0 0 -> 1 1 -> 2 1 -> 2 2 -> 3 3 -> 4 2 -> 4 } $ hg annotate baz 17: 1 baz:1 18: 2 baz:2 19: 3 baz:3 20: 4 baz:4 16: 5 annotate clean file $ hg annotate -ncr "wdir()" foo 11 472b18db256d : foo annotate modified file $ echo foofoo >> foo $ hg annotate -r "wdir()" foo 11 : foo 20+: foofoo $ hg annotate -cr "wdir()" foo 472b18db256d : foo b6bedd5477e7+: foofoo $ hg annotate -ncr "wdir()" foo 11 472b18db256d : foo 20 b6bedd5477e7+: foofoo $ hg annotate --debug -ncr "wdir()" foo 11 472b18db256d1e8282064eab4bfdaf48cbfe83cd : foo 20 b6bedd5477e797f25e568a6402d4697f3f895a72+: foofoo $ hg annotate -udr "wdir()" foo test Thu Jan 01 00:00:00 1970 +0000: foo test [A-Za-z0-9:+ ]+: foofoo (re) $ hg annotate -ncr "wdir()" -Tjson foo [ { "line": "foo\n", "node": "472b18db256d1e8282064eab4bfdaf48cbfe83cd", "rev": 11 }, { "line": "foofoo\n", "node": null, "rev": null } ] annotate added file $ echo bar > bar $ hg add bar $ hg annotate -ncr "wdir()" bar 20 b6bedd5477e7+: bar annotate renamed file $ hg rename foo renamefoo2 $ hg annotate -ncr "wdir()" renamefoo2 11 472b18db256d : foo 20 b6bedd5477e7+: foofoo annotate missing file $ rm baz #if windows $ hg annotate -ncr "wdir()" baz abort: $TESTTMP\repo\baz: The system cannot find the file specified [255] #else $ hg annotate -ncr "wdir()" baz abort: No such file or directory: $TESTTMP/repo/baz [255] #endif annotate removed file $ hg rm baz #if windows $ hg annotate -ncr "wdir()" baz abort: $TESTTMP\repo\baz: The system cannot find the file specified [255] #else $ hg annotate -ncr "wdir()" baz abort: No such file or directory: $TESTTMP/repo/baz [255] #endif Test annotate with whitespace options $ cd .. $ hg init repo-ws $ cd repo-ws $ cat > a < aa > > b b > EOF $ hg ci -Am "adda" adding a $ sed 's/EOL$//g' > a < a a > > EOL > b b > EOF $ hg ci -m "changea" Annotate with no option $ hg annotate a 1: a a 0: 1: 1: b b Annotate with --ignore-space-change $ hg annotate --ignore-space-change a 1: a a 1: 0: 0: b b Annotate with --ignore-all-space $ hg annotate --ignore-all-space a 0: a a 0: 1: 0: b b Annotate with --ignore-blank-lines (similar to no options case) $ hg annotate --ignore-blank-lines a 1: a a 0: 1: 1: b b $ cd .. Annotate with linkrev pointing to another branch ------------------------------------------------ create history with a filerev whose linkrev points to another branch $ hg init branchedlinkrev $ cd branchedlinkrev $ echo A > a $ hg commit -Am 'contentA' adding a $ echo B >> a $ hg commit -m 'contentB' $ hg up --rev 'desc(contentA)' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo unrelated > unrelated $ hg commit -Am 'unrelated' adding unrelated created new head $ hg graft -r 'desc(contentB)' grafting 1:fd27c222e3e6 "contentB" $ echo C >> a $ hg commit -m 'contentC' $ echo W >> a $ hg log -G @ changeset: 4:072f1e8df249 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: contentC | o changeset: 3:ff38df03cc4b | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: contentB | o changeset: 2:62aaf3f6fc06 | parent: 0:f0932f74827e | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: unrelated | | o changeset: 1:fd27c222e3e6 |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: contentB | o changeset: 0:f0932f74827e user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: contentA Annotate should list ancestor of starting revision only $ hg annotate a 0: A 3: B 4: C $ hg annotate a -r 'wdir()' 0 : A 3 : B 4 : C 4+: W Even when the starting revision is the linkrev-shadowed one: $ hg annotate a -r 3 0: A 3: B $ cd .. mercurial-3.7.3/tests/test-largefiles-small-disk.t0000644000175000017500000000336012676531525021610 0ustar mpmmpm00000000000000Test how largefiles abort in case the disk runs full $ cat > criple.py < import os, errno, shutil > from mercurial import util > # > # this makes the original largefiles code abort: > def copyfileobj(fsrc, fdst, length=16*1024): > fdst.write(fsrc.read(4)) > raise IOError(errno.ENOSPC, os.strerror(errno.ENOSPC)) > shutil.copyfileobj = copyfileobj > # > # this makes the rewritten code abort: > def filechunkiter(f, size=65536, limit=None): > yield f.read(4) > raise IOError(errno.ENOSPC, os.strerror(errno.ENOSPC)) > util.filechunkiter = filechunkiter > # > def oslink(src, dest): > raise OSError("no hardlinks, try copying instead") > util.oslink = oslink > EOF $ echo "[extensions]" >> $HGRCPATH $ echo "largefiles =" >> $HGRCPATH $ hg init alice $ cd alice $ echo "this is a very big file" > big $ hg add --large big $ hg commit --config extensions.criple=$TESTTMP/criple.py -m big abort: No space left on device [255] The largefile is not created in .hg/largefiles: $ ls .hg/largefiles dirstate The user cache is not even created: >>> import os; os.path.exists("$HOME/.cache/largefiles/") False Make the commit with space on the device: $ hg commit -m big Now make a clone with a full disk, and make sure lfutil.link function makes copies instead of hardlinks: $ cd .. $ hg --config extensions.criple=$TESTTMP/criple.py clone --pull alice bob requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default getting changed largefiles abort: No space left on device [255] The largefile is not created in .hg/largefiles: $ ls bob/.hg/largefiles dirstate mercurial-3.7.3/tests/test-filecache.py.out0000644000175000017500000000210312676531525020325 0ustar mpmmpm00000000000000basic: * neither file exists creating * neither file still exists * empty file x created creating * file x changed size creating * nothing changed with either file * file x changed inode creating * empty file y created creating * file y changed size creating * file y changed inode creating * both files changed inode creating fakeuncacheable: * neither file exists creating * neither file still exists creating * empty file x created creating * file x changed size creating * nothing changed with either file creating * file x changed inode creating * empty file y created creating * file y changed size creating * file y changed inode creating * both files changed inode creating repository tip rolled back to revision -1 (undo commit) working directory now based on revision -1 repository tip rolled back to revision -1 (undo commit) working directory now based on revision -1 setbeforeget: * neither file exists string set externally * file x created creating string from function * string set externally again string 2 set externally * file y created creating string from function mercurial-3.7.3/tests/test-bundle-type.t0000644000175000017500000000536312676531525017672 0ustar mpmmpm00000000000000 $ cat << EOF >> $HGRCPATH > [format] > usegeneraldelta=yes > EOF bundle w/o type option $ hg init t1 $ hg init t2 $ cd t1 $ echo blablablablabla > file.txt $ hg ci -Ama adding file.txt $ hg log | grep summary summary: a $ hg bundle ../b1 ../t2 searching for changes 1 changesets found $ cd ../t2 $ hg pull ../b1 pulling from ../b1 requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log | grep summary summary: a $ cd .. test bundle types $ for t in "None" "bzip2" "gzip" "none-v2" "v2" "v1" "gzip-v1"; do > echo % test bundle type $t > hg init t$t > cd t1 > hg bundle -t $t ../b$t ../t$t > f -q -B6 -D ../b$t; echo > cd ../t$t > hg debugbundle ../b$t > hg debugbundle --spec ../b$t > echo > cd .. > done % test bundle type None searching for changes 1 changesets found HG20\x00\x00 (esc) Stream params: {} changegroup -- "{'version': '02'}" c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf none-v2 % test bundle type bzip2 searching for changes 1 changesets found HG20\x00\x00 (esc) Stream params: {'Compression': 'BZ'} changegroup -- "{'version': '02'}" c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf bzip2-v2 % test bundle type gzip searching for changes 1 changesets found HG20\x00\x00 (esc) Stream params: {'Compression': 'GZ'} changegroup -- "{'version': '02'}" c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf gzip-v2 % test bundle type none-v2 searching for changes 1 changesets found HG20\x00\x00 (esc) Stream params: {} changegroup -- "{'version': '02'}" c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf none-v2 % test bundle type v2 searching for changes 1 changesets found HG20\x00\x00 (esc) Stream params: {'Compression': 'BZ'} changegroup -- "{'version': '02'}" c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf bzip2-v2 % test bundle type v1 searching for changes 1 changesets found HG10BZ c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf bzip2-v1 % test bundle type gzip-v1 searching for changes 1 changesets found HG10GZ c35a0f9217e65d1fdb90c936ffa7dbe679f83ddf gzip-v1 test garbage file $ echo garbage > bgarbage $ hg init tgarbage $ cd tgarbage $ hg pull ../bgarbage pulling from ../bgarbage abort: ../bgarbage: not a Mercurial bundle [255] $ cd .. test invalid bundle type $ cd t1 $ hg bundle -a -t garbage ../bgarbage abort: garbage is not a recognized bundle specification (see "hg help bundle" for supported values for --type) [255] $ cd .. mercurial-3.7.3/tests/test-revset-dirstate-parents.t0000644000175000017500000000157112676531525022236 0ustar mpmmpm00000000000000 $ HGENCODING=utf-8 $ export HGENCODING $ try() { > hg debugrevspec --debug $@ > } $ log() { > hg log --template '{rev}\n' -r "$1" > } $ hg init repo $ cd repo $ try 'p1()' (func ('symbol', 'p1') None) * set: $ try 'p2()' (func ('symbol', 'p2') None) * set: $ try 'parents()' (func ('symbol', 'parents') None) * set: null revision $ log 'p1()' $ log 'p2()' $ log 'parents()' working dir with a single parent $ echo a > a $ hg ci -Aqm0 $ log 'p1()' 0 $ log 'tag() and p1()' $ log 'p2()' $ log 'parents()' 0 $ log 'tag() and parents()' merge in progress $ echo b > b $ hg ci -Aqm1 $ hg up -q 0 $ echo c > c $ hg ci -Aqm2 $ hg merge -q $ log 'p1()' 2 $ log 'p2()' 1 $ log 'tag() and p2()' $ log 'parents()' 1 2 $ cd .. mercurial-3.7.3/tests/seq.py0000755000175000017500000000067412676531525015445 0ustar mpmmpm00000000000000#!/usr/bin/env python # # A portable replacement for 'seq' # # Usage: # seq STOP [1, STOP] stepping by 1 # seq START STOP [START, STOP] stepping by 1 # seq START STEP STOP [START, STOP] stepping by STEP import sys start = 1 if len(sys.argv) > 2: start = int(sys.argv[1]) step = 1 if len(sys.argv) > 3: step = int(sys.argv[2]) stop = int(sys.argv[-1]) + 1 for i in xrange(start, stop, step): print i mercurial-3.7.3/tests/test-mq-qpush-fail.t0000644000175000017500000002270712676531525020127 0ustar mpmmpm00000000000000Test that qpush cleans things up if it doesn't complete $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg init repo $ cd repo $ echo foo > foo $ hg ci -Am 'add foo' adding foo $ touch untracked-file $ echo 'syntax: glob' > .hgignore $ echo '.hgignore' >> .hgignore $ hg qinit test qpush on empty series $ hg qpush no patches in series $ hg qnew patch1 $ echo >> foo $ hg qrefresh -m 'patch 1' $ hg qnew patch2 $ echo bar > bar $ hg add bar $ hg qrefresh -m 'patch 2' $ hg qnew --config 'mq.plain=true' -U bad-patch $ echo >> foo $ hg qrefresh $ hg qpop -a popping bad-patch popping patch2 popping patch1 patch queue now empty $ $PYTHON -c 'print "\xe9"' > message $ cat .hg/patches/bad-patch >> message $ mv message .hg/patches/bad-patch $ cat > $TESTTMP/wrapplayback.py < import os > from mercurial import extensions, transaction > def wrapplayback(orig, > journal, report, opener, vfsmap, entries, backupentries, > unlink=True): > orig(journal, report, opener, vfsmap, entries, backupentries, unlink) > # Touching files truncated at "transaction.abort" causes > # forcible re-loading invalidated filecache properties > # (including repo.changelog) > for f, o, _ignore in entries: > if o or not unlink: > os.utime(opener.join(f), (0.0, 0.0)) > def extsetup(ui): > extensions.wrapfunction(transaction, '_playback', wrapplayback) > EOF $ hg qpush -a --config extensions.wrapplayback=$TESTTMP/wrapplayback.py && echo 'qpush succeeded?!' applying patch1 applying patch2 applying bad-patch transaction abort! rollback completed cleaning up working directory... reverting foo done abort: decoding near '\xe9': 'ascii' codec can't decode byte 0xe9 in position 0: ordinal not in range(128)! (esc) [255] $ hg parents changeset: 0:bbd179dfa0a7 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo test corrupt status file $ hg qpush applying patch1 now at: patch1 $ cp .hg/patches/status .hg/patches/status.orig $ hg qpop popping patch1 patch queue now empty $ cp .hg/patches/status.orig .hg/patches/status $ hg qpush abort: working directory revision is not qtip [255] $ rm .hg/patches/status .hg/patches/status.orig bar should be gone; other unknown/ignored files should still be around $ hg status -A ? untracked-file I .hgignore C foo preparing qpush of a missing patch $ hg qpop -a no patches applied $ hg qpush applying patch1 now at: patch1 $ rm .hg/patches/patch2 now we expect the push to fail, but it should NOT complain about patch1 $ hg qpush applying patch2 unable to read patch2 now at: patch1 [1] preparing qpush of missing patch with no patch applied $ hg qpop -a popping patch1 patch queue now empty $ rm .hg/patches/patch1 qpush should fail the same way as below $ hg qpush applying patch1 unable to read patch1 [1] Test qpush to a patch below the currently applied patch. $ hg qq -c guardedseriesorder $ hg qnew a $ hg qguard +block $ hg qnew b $ hg qnew c $ hg qpop -a popping c popping b popping a patch queue now empty try to push and pop while a is guarded $ hg qpush a cannot push 'a' - guarded by '+block' [1] $ hg qpush -a applying b patch b is empty applying c patch c is empty now at: c now try it when a is unguarded, and we're at the top of the queue $ hg qapplied -v 0 G a 1 A b 2 A c $ hg qsel block $ hg qpush b abort: cannot push to a previous patch: b [255] $ hg qpush a abort: cannot push to a previous patch: a [255] and now we try it one more time with a unguarded, while we're not at the top of the queue $ hg qpop b popping c now at: b $ hg qpush a abort: cannot push to a previous patch: a [255] test qpop --force and backup files $ hg qpop -a popping b patch queue now empty $ hg qq --create force $ echo a > a $ echo b > b $ echo c > c $ hg ci -Am add a b c $ echo a >> a $ hg rm b $ hg rm c $ hg qnew p1 $ echo a >> a $ echo bb > b $ hg add b $ echo cc > c $ hg add c $ hg qpop --force --verbose saving current version of a as a.orig saving current version of b as b.orig saving current version of c as c.orig popping p1 patch queue now empty $ hg st ? a.orig ? b.orig ? c.orig ? untracked-file $ cat a.orig a a a $ cat b.orig bb $ cat c.orig cc test qpop --force --no-backup $ hg qpush applying p1 now at: p1 $ rm a.orig $ echo a >> a $ hg qpop --force --no-backup --verbose popping p1 patch queue now empty $ test -f a.orig && echo 'error: backup with --no-backup' [1] test qpop --keep-changes $ hg qpush applying p1 now at: p1 $ hg qpop --keep-changes --force abort: cannot use both --force and --keep-changes [255] $ echo a >> a $ hg qpop --keep-changes abort: local changes found, qrefresh first [255] $ hg revert -qa a $ rm a $ hg qpop --keep-changes abort: local changes found, qrefresh first [255] $ hg rm -A a $ hg qpop --keep-changes abort: local changes found, qrefresh first [255] $ hg revert -qa a $ echo b > b $ hg add b $ hg qpop --keep-changes abort: local changes found, qrefresh first [255] $ hg forget b $ echo d > d $ hg add d $ hg qpop --keep-changes popping p1 patch queue now empty $ hg forget d $ rm d test qpush --force and backup files $ echo a >> a $ hg qnew p2 $ echo b >> b $ echo d > d $ echo e > e $ hg add d e $ hg rm c $ hg qnew p3 $ hg qpop -a popping p3 popping p2 patch queue now empty $ echo a >> a $ echo b1 >> b $ echo d1 > d $ hg add d $ echo e1 > e $ hg qpush -a --force --verbose applying p2 saving current version of a as a.orig patching file a committing files: a committing manifest committing changelog applying p3 saving current version of b as b.orig saving current version of d as d.orig patching file b patching file c patching file d file d already exists 1 out of 1 hunks FAILED -- saving rejects to file d.rej patching file e file e already exists 1 out of 1 hunks FAILED -- saving rejects to file e.rej patch failed to apply committing files: b committing manifest committing changelog patch failed, rejects left in working directory errors during apply, please fix and qrefresh p3 [2] $ cat a.orig a a $ cat b.orig b b1 $ cat d.orig d1 test qpush --force --no-backup $ hg revert -qa $ hg qpop -a popping p3 popping p2 patch queue now empty $ echo a >> a $ rm a.orig $ hg qpush --force --no-backup --verbose applying p2 patching file a committing files: a committing manifest committing changelog now at: p2 $ test -f a.orig && echo 'error: backup with --no-backup' [1] test qgoto --force --no-backup $ hg qpop popping p2 patch queue now empty $ echo a >> a $ hg qgoto --force --no-backup p2 --verbose applying p2 patching file a committing files: a committing manifest committing changelog now at: p2 $ test -f a.orig && echo 'error: backup with --no-backup' [1] test qpush --keep-changes $ hg qpush --keep-changes --force abort: cannot use both --force and --keep-changes [255] $ hg qpush --keep-changes --exact abort: cannot use --exact and --keep-changes together [255] $ echo b >> b $ hg qpush --keep-changes applying p3 abort: conflicting local changes found (did you forget to qrefresh?) [255] $ rm b $ hg qpush --keep-changes applying p3 abort: conflicting local changes found (did you forget to qrefresh?) [255] $ hg rm -A b $ hg qpush --keep-changes applying p3 abort: conflicting local changes found (did you forget to qrefresh?) [255] $ hg revert -aq b $ echo d > d $ hg add d $ hg qpush --keep-changes applying p3 abort: conflicting local changes found (did you forget to qrefresh?) [255] $ hg forget d $ rm d $ hg qpop popping p2 patch queue now empty $ echo b >> b $ hg qpush -a --keep-changes applying p2 applying p3 abort: conflicting local changes found (did you forget to qrefresh?) [255] $ hg qtop p2 $ hg parents --template "{rev} {desc}\n" 2 imported patch p2 $ hg st b M b $ cat b b b test qgoto --keep-changes $ hg revert -aq b $ rm e $ hg qgoto --keep-changes --force p3 abort: cannot use both --force and --keep-changes [255] $ echo a >> a $ hg qgoto --keep-changes p3 applying p3 now at: p3 $ hg st a M a $ hg qgoto --keep-changes p2 popping p3 now at: p2 $ hg st a M a test mq.keepchanges setting $ hg --config mq.keepchanges=1 qpush applying p3 now at: p3 $ hg st a M a $ hg --config mq.keepchanges=1 qpop popping p3 now at: p2 $ hg st a M a $ hg --config mq.keepchanges=1 qgoto p3 applying p3 now at: p3 $ hg st a M a $ echo b >> b $ hg --config mq.keepchanges=1 qpop --force --config 'ui.origbackuppath=.hg/origbackups' popping p3 now at: p2 $ hg st b $ hg --config mq.keepchanges=1 qpush --exact abort: local changes found, qrefresh first [255] $ hg revert -qa a $ hg qpop popping p2 patch queue now empty $ echo a >> a $ hg --config mq.keepchanges=1 qpush --force applying p2 now at: p2 $ hg st a test previous qpop (with --force and --config) saved .orig files to where user wants them $ ls .hg/origbackups b.orig $ rm -rf .hg/origbackups $ cd .. mercurial-3.7.3/tests/test-push-http.t0000644000175000017500000001306412676531525017373 0ustar mpmmpm00000000000000#require killdaemons $ hg init test $ cd test $ echo a > a $ hg ci -Ama adding a $ cd .. $ hg clone test test2 updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd test2 $ echo a >> a $ hg ci -mb $ req() { > hg serve -p $HGPORT -d --pid-file=hg.pid -E errors.log > cat hg.pid >> $DAEMON_PIDS > hg --cwd ../test2 push http://localhost:$HGPORT/ > exitstatus=$? > killdaemons.py > echo % serve errors > cat errors.log > return $exitstatus > } $ cd ../test expect ssl error $ req pushing to http://localhost:$HGPORT/ searching for changes abort: HTTP Error 403: ssl required % serve errors [255] expect authorization error $ echo '[web]' > .hg/hgrc $ echo 'push_ssl = false' >> .hg/hgrc $ req pushing to http://localhost:$HGPORT/ searching for changes abort: authorization failed % serve errors [255] expect authorization error: must have authorized user $ echo 'allow_push = unperson' >> .hg/hgrc $ req pushing to http://localhost:$HGPORT/ searching for changes abort: authorization failed % serve errors [255] expect success $ echo 'allow_push = *' >> .hg/hgrc $ echo '[hooks]' >> .hg/hgrc $ echo "changegroup = printenv.py changegroup 0" >> .hg/hgrc $ echo "pushkey = printenv.py pushkey 0" >> .hg/hgrc $ req pushing to http://localhost:$HGPORT/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: pushkey hook: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1 HG_RET=1 remote: changegroup hook: HG_BUNDLE2=1 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:http:127.0.0.1: (glob) % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) expect success, server lacks the httpheader capability $ CAP=httpheader $ . "$TESTDIR/notcapable" $ req pushing to http://localhost:$HGPORT/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: pushkey hook: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1 HG_RET=1 remote: changegroup hook: HG_BUNDLE2=1 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:http:127.0.0.1: (glob) % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) expect success, server lacks the unbundlehash capability $ CAP=unbundlehash $ . "$TESTDIR/notcapable" $ req pushing to http://localhost:$HGPORT/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: pushkey hook: HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_OLD=1 HG_RET=1 remote: changegroup hook: HG_BUNDLE2=1 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:http:127.0.0.1: (glob) % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) expect push success, phase change failure $ cat > .hg/hgrc < [web] > push_ssl = false > allow_push = * > [hooks] > prepushkey = printenv.py prepushkey 1 > EOF $ req pushing to http://localhost:$HGPORT/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: prepushkey hook: HG_BUNDLE2=1 HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_OLD=1 HG_PENDING=$TESTTMP/test HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:http:127.0.0.1: (glob) remote: pushkey-abort: prepushkey hook exited with status 1 remote: transaction abort! remote: rollback completed abort: updating ba677d0156c1 to public failed % serve errors [255] expect phase change success $ echo "prepushkey = printenv.py prepushkey 0" >> .hg/hgrc $ req pushing to http://localhost:$HGPORT/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: prepushkey hook: HG_BUNDLE2=1 HG_KEY=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NAMESPACE=phases HG_NEW=0 HG_NODE=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_NODE_LAST=ba677d0156c1196c1a699fa53f390dcfc3ce3872 HG_OLD=1 HG_PENDING=$TESTTMP/test HG_PHASES_MOVED=1 HG_SOURCE=serve HG_TXNID=TXN:* HG_URL=remote:http:127.0.0.1: (glob) % serve errors $ hg rollback repository tip rolled back to revision 0 (undo serve) expect authorization error: all users denied $ echo '[web]' > .hg/hgrc $ echo 'push_ssl = false' >> .hg/hgrc $ echo 'deny_push = *' >> .hg/hgrc $ req pushing to http://localhost:$HGPORT/ searching for changes abort: authorization failed % serve errors [255] expect authorization error: some users denied, users must be authenticated $ echo 'deny_push = unperson' >> .hg/hgrc $ req pushing to http://localhost:$HGPORT/ searching for changes abort: authorization failed % serve errors [255] $ cd .. mercurial-3.7.3/tests/test-histedit-bookmark-motion.t0000644000175000017500000001224012676531525022355 0ustar mpmmpm00000000000000 $ . "$TESTDIR/histedit-helpers.sh" $ cat >> $HGRCPATH < [extensions] > histedit= > EOF $ hg init r $ cd r $ for x in a b c d e f ; do > echo $x > $x > hg add $x > hg ci -m $x > done $ hg book -r 1 will-move-backwards $ hg book -r 2 two $ hg book -r 2 also-two $ hg book -r 3 three $ hg book -r 4 four $ hg book -r tip five $ hg log --graph @ changeset: 5:652413bf663e | bookmark: five | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 4:e860deea161a | bookmark: four | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 3:055a42cdd887 | bookmark: three | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 2:177f92b77385 | bookmark: also-two | bookmark: two | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 1:d2ae7f538514 | bookmark: will-move-backwards | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ HGEDITOR=cat hg histedit 1 pick d2ae7f538514 1 b pick 177f92b77385 2 c pick 055a42cdd887 3 d pick e860deea161a 4 e pick 652413bf663e 5 f # Edit history between d2ae7f538514 and 652413bf663e # # Commits are listed from least to most recent # # Commands: # # e, edit = use commit, but stop for amending # m, mess = edit commit message without changing commit content # p, pick = use commit # d, drop = remove commit from history # f, fold = use commit, but combine it with the one above # r, roll = like fold, but discard this commit's description # $ hg histedit 1 --commands - --verbose << EOF | grep histedit > pick 177f92b77385 2 c > drop d2ae7f538514 1 b > pick 055a42cdd887 3 d > fold e860deea161a 4 e > pick 652413bf663e 5 f > EOF saved backup bundle to $TESTTMP/r/.hg/strip-backup/96e494a2d553-3c6c5d92-backup.hg (glob) histedit: moving bookmarks also-two from 177f92b77385 to b346ab9a313d histedit: moving bookmarks five from 652413bf663e to cacdfd884a93 histedit: moving bookmarks four from e860deea161a to 59d9f330561f histedit: moving bookmarks three from 055a42cdd887 to 59d9f330561f histedit: moving bookmarks two from 177f92b77385 to b346ab9a313d histedit: moving bookmarks will-move-backwards from d2ae7f538514 to cb9a9f314b8b saved backup bundle to $TESTTMP/r/.hg/strip-backup/d2ae7f538514-48787b8d-backup.hg (glob) $ hg log --graph @ changeset: 3:cacdfd884a93 | bookmark: five | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 2:59d9f330561f | bookmark: four | bookmark: three | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 1:b346ab9a313d | bookmark: also-two | bookmark: two | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 0:cb9a9f314b8b bookmark: will-move-backwards user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ HGEDITOR=cat hg histedit 1 pick b346ab9a313d 1 c pick 59d9f330561f 2 d pick cacdfd884a93 3 f # Edit history between b346ab9a313d and cacdfd884a93 # # Commits are listed from least to most recent # # Commands: # # e, edit = use commit, but stop for amending # m, mess = edit commit message without changing commit content # p, pick = use commit # d, drop = remove commit from history # f, fold = use commit, but combine it with the one above # r, roll = like fold, but discard this commit's description # $ hg histedit 1 --commands - --verbose << EOF | grep histedit > pick b346ab9a313d 1 c > pick cacdfd884a93 3 f > pick 59d9f330561f 2 d > EOF histedit: moving bookmarks five from cacdfd884a93 to c04e50810e4b histedit: moving bookmarks four from 59d9f330561f to c04e50810e4b histedit: moving bookmarks three from 59d9f330561f to c04e50810e4b saved backup bundle to $TESTTMP/r/.hg/strip-backup/59d9f330561f-073008af-backup.hg (glob) We expect 'five' to stay at tip, since the tipmost bookmark is most likely the useful signal. $ hg log --graph @ changeset: 3:c04e50810e4b | bookmark: five | bookmark: four | bookmark: three | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 2:c13eb81022ca | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 1:b346ab9a313d | bookmark: also-two | bookmark: two | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 0:cb9a9f314b8b bookmark: will-move-backwards user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a mercurial-3.7.3/tests/test-bundle2-format.t0000644000175000017500000015162312676531525020264 0ustar mpmmpm00000000000000This test is dedicated to test the bundle2 container format It test multiple existing parts to test different feature of the container. You probably do not need to touch this test unless you change the binary encoding of the bundle2 format itself. Create an extension to test bundle2 API $ cat > bundle2.py << EOF > """A small extension to test bundle2 implementation > > Current bundle2 implementation is far too limited to be used in any core > code. We still need to be able to test it while it grow up. > """ > > import sys, os, gc > from mercurial import cmdutil > from mercurial import util > from mercurial import bundle2 > from mercurial import scmutil > from mercurial import discovery > from mercurial import changegroup > from mercurial import error > from mercurial import obsolete > > > try: > import msvcrt > msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY) > msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) > msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY) > except ImportError: > pass > > cmdtable = {} > command = cmdutil.command(cmdtable) > > ELEPHANTSSONG = """Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko > Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko > Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko.""" > assert len(ELEPHANTSSONG) == 178 # future test say 178 bytes, trust it. > > @bundle2.parthandler('test:song') > def songhandler(op, part): > """handle a "test:song" bundle2 part, printing the lyrics on stdin""" > op.ui.write('The choir starts singing:\n') > verses = 0 > for line in part.read().split('\n'): > op.ui.write(' %s\n' % line) > verses += 1 > op.records.add('song', {'verses': verses}) > > @bundle2.parthandler('test:ping') > def pinghandler(op, part): > op.ui.write('received ping request (id %i)\n' % part.id) > if op.reply is not None and 'ping-pong' in op.reply.capabilities: > op.ui.write_err('replying to ping request (id %i)\n' % part.id) > op.reply.newpart('test:pong', [('in-reply-to', str(part.id))], > mandatory=False) > > @bundle2.parthandler('test:debugreply') > def debugreply(op, part): > """print data about the capacity of the bundle reply""" > if op.reply is None: > op.ui.write('debugreply: no reply\n') > else: > op.ui.write('debugreply: capabilities:\n') > for cap in sorted(op.reply.capabilities): > op.ui.write('debugreply: %r\n' % cap) > for val in op.reply.capabilities[cap]: > op.ui.write('debugreply: %r\n' % val) > > @command('bundle2', > [('', 'param', [], 'stream level parameter'), > ('', 'unknown', False, 'include an unknown mandatory part in the bundle'), > ('', 'unknownparams', False, 'include an unknown part parameters in the bundle'), > ('', 'parts', False, 'include some arbitrary parts to the bundle'), > ('', 'reply', False, 'produce a reply bundle'), > ('', 'pushrace', False, 'includes a check:head part with unknown nodes'), > ('', 'genraise', False, 'includes a part that raise an exception during generation'), > ('', 'timeout', False, 'emulate a timeout during bundle generation'), > ('r', 'rev', [], 'includes those changeset in the bundle'), > ('', 'compress', '', 'compress the stream'),], > '[OUTPUTFILE]') > def cmdbundle2(ui, repo, path=None, **opts): > """write a bundle2 container on standard output""" > bundler = bundle2.bundle20(ui) > for p in opts['param']: > p = p.split('=', 1) > try: > bundler.addparam(*p) > except ValueError, exc: > raise error.Abort('%s' % exc) > > if opts['compress']: > bundler.setcompression(opts['compress']) > > if opts['reply']: > capsstring = 'ping-pong\nelephants=babar,celeste\ncity%3D%21=celeste%2Cville' > bundler.newpart('replycaps', data=capsstring) > > if opts['pushrace']: > # also serve to test the assignement of data outside of init > part = bundler.newpart('check:heads') > part.data = '01234567890123456789' > > revs = opts['rev'] > if 'rev' in opts: > revs = scmutil.revrange(repo, opts['rev']) > if revs: > # very crude version of a changegroup part creation > bundled = repo.revs('%ld::%ld', revs, revs) > headmissing = [c.node() for c in repo.set('heads(%ld)', revs)] > headcommon = [c.node() for c in repo.set('parents(%ld) - %ld', revs, revs)] > outgoing = discovery.outgoing(repo.changelog, headcommon, headmissing) > cg = changegroup.getlocalchangegroup(repo, 'test:bundle2', outgoing, None) > bundler.newpart('changegroup', data=cg.getchunks(), > mandatory=False) > > if opts['parts']: > bundler.newpart('test:empty', mandatory=False) > # add a second one to make sure we handle multiple parts > bundler.newpart('test:empty', mandatory=False) > bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False) > bundler.newpart('test:debugreply', mandatory=False) > mathpart = bundler.newpart('test:math') > mathpart.addparam('pi', '3.14') > mathpart.addparam('e', '2.72') > mathpart.addparam('cooking', 'raw', mandatory=False) > mathpart.data = '42' > mathpart.mandatory = False > # advisory known part with unknown mandatory param > bundler.newpart('test:song', [('randomparam','')], mandatory=False) > if opts['unknown']: > bundler.newpart('test:unknown', data='some random content') > if opts['unknownparams']: > bundler.newpart('test:song', [('randomparams', '')]) > if opts['parts']: > bundler.newpart('test:ping', mandatory=False) > if opts['genraise']: > def genraise(): > yield 'first line\n' > raise RuntimeError('Someone set up us the bomb!') > bundler.newpart('output', data=genraise(), mandatory=False) > > if path is None: > file = sys.stdout > else: > file = open(path, 'wb') > > if opts['timeout']: > bundler.newpart('test:song', data=ELEPHANTSSONG, mandatory=False) > for idx, junk in enumerate(bundler.getchunks()): > ui.write('%d chunk\n' % idx) > if idx > 4: > # This throws a GeneratorExit inside the generator, which > # can cause problems if the exception-recovery code is > # too zealous. It's important for this test that the break > # occur while we're in the middle of a part. > break > gc.collect() > ui.write('fake timeout complete.\n') > return > try: > for chunk in bundler.getchunks(): > file.write(chunk) > except RuntimeError, exc: > raise error.Abort(exc) > > @command('unbundle2', [], '') > def cmdunbundle2(ui, repo, replypath=None): > """process a bundle2 stream from stdin on the current repo""" > try: > tr = None > lock = repo.lock() > tr = repo.transaction('processbundle') > try: > unbundler = bundle2.getunbundler(ui, sys.stdin) > op = bundle2.processbundle(repo, unbundler, lambda: tr) > tr.close() > except error.BundleValueError, exc: > raise error.Abort('missing support for %s' % exc) > except error.PushRaced, exc: > raise error.Abort('push race: %s' % exc) > finally: > if tr is not None: > tr.release() > lock.release() > remains = sys.stdin.read() > ui.write('%i unread bytes\n' % len(remains)) > if op.records['song']: > totalverses = sum(r['verses'] for r in op.records['song']) > ui.write('%i total verses sung\n' % totalverses) > for rec in op.records['changegroup']: > ui.write('addchangegroup return: %i\n' % rec['return']) > if op.reply is not None and replypath is not None: > file = open(replypath, 'wb') > for chunk in op.reply.getchunks(): > file.write(chunk) > > @command('statbundle2', [], '') > def cmdstatbundle2(ui, repo): > """print statistic on the bundle2 container read from stdin""" > unbundler = bundle2.getunbundler(ui, sys.stdin) > try: > params = unbundler.params > except error.BundleValueError, exc: > raise error.Abort('unknown parameters: %s' % exc) > ui.write('options count: %i\n' % len(params)) > for key in sorted(params): > ui.write('- %s\n' % key) > value = params[key] > if value is not None: > ui.write(' %s\n' % value) > count = 0 > for p in unbundler.iterparts(): > count += 1 > ui.write(' :%s:\n' % p.type) > ui.write(' mandatory: %i\n' % len(p.mandatoryparams)) > ui.write(' advisory: %i\n' % len(p.advisoryparams)) > ui.write(' payload: %i bytes\n' % len(p.read())) > ui.write('parts count: %i\n' % count) > EOF $ cat >> $HGRCPATH << EOF > [extensions] > bundle2=$TESTTMP/bundle2.py > [experimental] > bundle2-exp=True > evolution=createmarkers > [ui] > ssh=python "$TESTDIR/dummyssh" > logtemplate={rev}:{node|short} {phase} {author} {bookmarks} {desc|firstline} > [web] > push_ssl = false > allow_push = * > [phases] > publish=False > EOF The extension requires a repo (currently unused) $ hg init main $ cd main $ touch a $ hg add a $ hg commit -m 'a' Empty bundle ================= - no option - no parts Test bundling $ hg bundle2 | f --hexdump 0000: 48 47 32 30 00 00 00 00 00 00 00 00 |HG20........| Test timeouts during bundling $ hg bundle2 --timeout --debug --config devel.bundle2.debug=yes bundle2-output-bundle: "HG20", 1 parts total bundle2-output: start emission of HG20 stream 0 chunk bundle2-output: bundle parameter: 1 chunk bundle2-output: start of parts bundle2-output: bundle part: "test:song" bundle2-output-part: "test:song" (advisory) 178 bytes payload bundle2-output: part 0: "test:song" bundle2-output: header chunk size: 16 2 chunk 3 chunk bundle2-output: payload chunk size: 178 4 chunk 5 chunk bundle2-generatorexit fake timeout complete. Test unbundling $ hg bundle2 | hg statbundle2 options count: 0 parts count: 0 Test old style bundle are detected and refused $ hg bundle --all --type v1 ../bundle.hg 1 changesets found $ hg statbundle2 < ../bundle.hg abort: unknown bundle version 10 [255] Test parameters ================= - some options - no parts advisory parameters, no value ------------------------------- Simplest possible parameters form Test generation simple option $ hg bundle2 --param 'caution' | f --hexdump 0000: 48 47 32 30 00 00 00 07 63 61 75 74 69 6f 6e 00 |HG20....caution.| 0010: 00 00 00 |...| Test unbundling $ hg bundle2 --param 'caution' | hg statbundle2 options count: 1 - caution parts count: 0 Test generation multiple option $ hg bundle2 --param 'caution' --param 'meal' | f --hexdump 0000: 48 47 32 30 00 00 00 0c 63 61 75 74 69 6f 6e 20 |HG20....caution | 0010: 6d 65 61 6c 00 00 00 00 |meal....| Test unbundling $ hg bundle2 --param 'caution' --param 'meal' | hg statbundle2 options count: 2 - caution - meal parts count: 0 advisory parameters, with value ------------------------------- Test generation $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | f --hexdump 0000: 48 47 32 30 00 00 00 1c 63 61 75 74 69 6f 6e 20 |HG20....caution | 0010: 6d 65 61 6c 3d 76 65 67 61 6e 20 65 6c 65 70 68 |meal=vegan eleph| 0020: 61 6e 74 73 00 00 00 00 |ants....| Test unbundling $ hg bundle2 --param 'caution' --param 'meal=vegan' --param 'elephants' | hg statbundle2 options count: 3 - caution - elephants - meal vegan parts count: 0 parameter with special char in value --------------------------------------------------- Test generation $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | f --hexdump 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%| 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23| 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl| 0030: 65 00 00 00 00 |e....| Test unbundling $ hg bundle2 --param 'e|! 7/=babar%#==tutu' --param simple | hg statbundle2 options count: 2 - e|! 7/ babar%#==tutu - simple parts count: 0 Test unknown mandatory option --------------------------------------------------- $ hg bundle2 --param 'Gravity' | hg statbundle2 abort: unknown parameters: Stream Parameter - Gravity [255] Test debug output --------------------------------------------------- bundling debug $ hg bundle2 --debug --param 'e|! 7/=babar%#==tutu' --param simple ../out.hg2 --config progress.debug=true --config devel.bundle2.debug=true bundle2-output-bundle: "HG20", (2 params) 0 parts total bundle2-output: start emission of HG20 stream bundle2-output: bundle parameter: e%7C%21%207/=babar%25%23%3D%3Dtutu simple bundle2-output: start of parts bundle2-output: end of bundle file content is ok $ f --hexdump ../out.hg2 ../out.hg2: 0000: 48 47 32 30 00 00 00 29 65 25 37 43 25 32 31 25 |HG20...)e%7C%21%| 0010: 32 30 37 2f 3d 62 61 62 61 72 25 32 35 25 32 33 |207/=babar%25%23| 0020: 25 33 44 25 33 44 74 75 74 75 20 73 69 6d 70 6c |%3D%3Dtutu simpl| 0030: 65 00 00 00 00 |e....| unbundling debug $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../out.hg2 bundle2-input: start processing of HG20 stream bundle2-input: reading bundle2 stream parameters bundle2-input: ignoring unknown parameter 'e|! 7/' bundle2-input: ignoring unknown parameter 'simple' options count: 2 - e|! 7/ babar%#==tutu - simple bundle2-input: start extraction of bundle2 parts bundle2-input: part header size: 0 bundle2-input: end of bundle2 stream parts count: 0 Test buggy input --------------------------------------------------- empty parameter name $ hg bundle2 --param '' --quiet abort: empty parameter name [255] bad parameter name $ hg bundle2 --param 42babar abort: non letter first character: '42babar' [255] Test part ================= $ hg bundle2 --parts ../parts.hg2 --debug --config progress.debug=true --config devel.bundle2.debug=true bundle2-output-bundle: "HG20", 7 parts total bundle2-output: start emission of HG20 stream bundle2-output: bundle parameter: bundle2-output: start of parts bundle2-output: bundle part: "test:empty" bundle2-output-part: "test:empty" (advisory) empty payload bundle2-output: part 0: "test:empty" bundle2-output: header chunk size: 17 bundle2-output: closing payload chunk bundle2-output: bundle part: "test:empty" bundle2-output-part: "test:empty" (advisory) empty payload bundle2-output: part 1: "test:empty" bundle2-output: header chunk size: 17 bundle2-output: closing payload chunk bundle2-output: bundle part: "test:song" bundle2-output-part: "test:song" (advisory) 178 bytes payload bundle2-output: part 2: "test:song" bundle2-output: header chunk size: 16 bundle2-output: payload chunk size: 178 bundle2-output: closing payload chunk bundle2-output: bundle part: "test:debugreply" bundle2-output-part: "test:debugreply" (advisory) empty payload bundle2-output: part 3: "test:debugreply" bundle2-output: header chunk size: 22 bundle2-output: closing payload chunk bundle2-output: bundle part: "test:math" bundle2-output-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) 2 bytes payload bundle2-output: part 4: "test:math" bundle2-output: header chunk size: 43 bundle2-output: payload chunk size: 2 bundle2-output: closing payload chunk bundle2-output: bundle part: "test:song" bundle2-output-part: "test:song" (advisory) (params: 1 mandatory) empty payload bundle2-output: part 5: "test:song" bundle2-output: header chunk size: 29 bundle2-output: closing payload chunk bundle2-output: bundle part: "test:ping" bundle2-output-part: "test:ping" (advisory) empty payload bundle2-output: part 6: "test:ping" bundle2-output: header chunk size: 16 bundle2-output: closing payload chunk bundle2-output: end of bundle $ f --hexdump ../parts.hg2 ../parts.hg2: 0000: 48 47 32 30 00 00 00 00 00 00 00 11 0a 74 65 73 |HG20.........tes| 0010: 74 3a 65 6d 70 74 79 00 00 00 00 00 00 00 00 00 |t:empty.........| 0020: 00 00 00 00 11 0a 74 65 73 74 3a 65 6d 70 74 79 |......test:empty| 0030: 00 00 00 01 00 00 00 00 00 00 00 00 00 10 09 74 |...............t| 0040: 65 73 74 3a 73 6f 6e 67 00 00 00 02 00 00 00 00 |est:song........| 0050: 00 b2 50 61 74 61 6c 69 20 44 69 72 61 70 61 74 |..Patali Dirapat| 0060: 61 2c 20 43 72 6f 6d 64 61 20 43 72 6f 6d 64 61 |a, Cromda Cromda| 0070: 20 52 69 70 61 6c 6f 2c 20 50 61 74 61 20 50 61 | Ripalo, Pata Pa| 0080: 74 61 2c 20 4b 6f 20 4b 6f 20 4b 6f 0a 42 6f 6b |ta, Ko Ko Ko.Bok| 0090: 6f 72 6f 20 44 69 70 6f 75 6c 69 74 6f 2c 20 52 |oro Dipoulito, R| 00a0: 6f 6e 64 69 20 52 6f 6e 64 69 20 50 65 70 69 6e |ondi Rondi Pepin| 00b0: 6f 2c 20 50 61 74 61 20 50 61 74 61 2c 20 4b 6f |o, Pata Pata, Ko| 00c0: 20 4b 6f 20 4b 6f 0a 45 6d 61 6e 61 20 4b 61 72 | Ko Ko.Emana Kar| 00d0: 61 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c |assoli, Loucra L| 00e0: 6f 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 |oucra Ponponto, | 00f0: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko| 0100: 20 4b 6f 2e 00 00 00 00 00 00 00 16 0f 74 65 73 | Ko..........tes| 0110: 74 3a 64 65 62 75 67 72 65 70 6c 79 00 00 00 03 |t:debugreply....| 0120: 00 00 00 00 00 00 00 00 00 2b 09 74 65 73 74 3a |.........+.test:| 0130: 6d 61 74 68 00 00 00 04 02 01 02 04 01 04 07 03 |math............| 0140: 70 69 33 2e 31 34 65 32 2e 37 32 63 6f 6f 6b 69 |pi3.14e2.72cooki| 0150: 6e 67 72 61 77 00 00 00 02 34 32 00 00 00 00 00 |ngraw....42.....| 0160: 00 00 1d 09 74 65 73 74 3a 73 6f 6e 67 00 00 00 |....test:song...| 0170: 05 01 00 0b 00 72 61 6e 64 6f 6d 70 61 72 61 6d |.....randomparam| 0180: 00 00 00 00 00 00 00 10 09 74 65 73 74 3a 70 69 |.........test:pi| 0190: 6e 67 00 00 00 06 00 00 00 00 00 00 00 00 00 00 |ng..............| $ hg statbundle2 < ../parts.hg2 options count: 0 :test:empty: mandatory: 0 advisory: 0 payload: 0 bytes :test:empty: mandatory: 0 advisory: 0 payload: 0 bytes :test:song: mandatory: 0 advisory: 0 payload: 178 bytes :test:debugreply: mandatory: 0 advisory: 0 payload: 0 bytes :test:math: mandatory: 2 advisory: 1 payload: 2 bytes :test:song: mandatory: 1 advisory: 0 payload: 0 bytes :test:ping: mandatory: 0 advisory: 0 payload: 0 bytes parts count: 7 $ hg statbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2 bundle2-input: start processing of HG20 stream bundle2-input: reading bundle2 stream parameters options count: 0 bundle2-input: start extraction of bundle2 parts bundle2-input: part header size: 17 bundle2-input: part type: "test:empty" bundle2-input: part id: "0" bundle2-input: part parameters: 0 :test:empty: mandatory: 0 advisory: 0 bundle2-input: payload chunk size: 0 payload: 0 bytes bundle2-input: part header size: 17 bundle2-input: part type: "test:empty" bundle2-input: part id: "1" bundle2-input: part parameters: 0 :test:empty: mandatory: 0 advisory: 0 bundle2-input: payload chunk size: 0 payload: 0 bytes bundle2-input: part header size: 16 bundle2-input: part type: "test:song" bundle2-input: part id: "2" bundle2-input: part parameters: 0 :test:song: mandatory: 0 advisory: 0 bundle2-input: payload chunk size: 178 bundle2-input: payload chunk size: 0 bundle2-input-part: total payload size 178 payload: 178 bytes bundle2-input: part header size: 22 bundle2-input: part type: "test:debugreply" bundle2-input: part id: "3" bundle2-input: part parameters: 0 :test:debugreply: mandatory: 0 advisory: 0 bundle2-input: payload chunk size: 0 payload: 0 bytes bundle2-input: part header size: 43 bundle2-input: part type: "test:math" bundle2-input: part id: "4" bundle2-input: part parameters: 3 :test:math: mandatory: 2 advisory: 1 bundle2-input: payload chunk size: 2 bundle2-input: payload chunk size: 0 bundle2-input-part: total payload size 2 payload: 2 bytes bundle2-input: part header size: 29 bundle2-input: part type: "test:song" bundle2-input: part id: "5" bundle2-input: part parameters: 1 :test:song: mandatory: 1 advisory: 0 bundle2-input: payload chunk size: 0 payload: 0 bytes bundle2-input: part header size: 16 bundle2-input: part type: "test:ping" bundle2-input: part id: "6" bundle2-input: part parameters: 0 :test:ping: mandatory: 0 advisory: 0 bundle2-input: payload chunk size: 0 payload: 0 bytes bundle2-input: part header size: 0 bundle2-input: end of bundle2 stream parts count: 7 Test actual unbundling of test part ======================================= Process the bundle $ hg unbundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true < ../parts.hg2 bundle2-input: start processing of HG20 stream bundle2-input: reading bundle2 stream parameters bundle2-input-bundle: with-transaction bundle2-input: start extraction of bundle2 parts bundle2-input: part header size: 17 bundle2-input: part type: "test:empty" bundle2-input: part id: "0" bundle2-input: part parameters: 0 bundle2-input: ignoring unsupported advisory part test:empty bundle2-input-part: "test:empty" (advisory) unsupported-type bundle2-input: payload chunk size: 0 bundle2-input: part header size: 17 bundle2-input: part type: "test:empty" bundle2-input: part id: "1" bundle2-input: part parameters: 0 bundle2-input: ignoring unsupported advisory part test:empty bundle2-input-part: "test:empty" (advisory) unsupported-type bundle2-input: payload chunk size: 0 bundle2-input: part header size: 16 bundle2-input: part type: "test:song" bundle2-input: part id: "2" bundle2-input: part parameters: 0 bundle2-input: found a handler for part 'test:song' bundle2-input-part: "test:song" (advisory) supported The choir starts singing: bundle2-input: payload chunk size: 178 bundle2-input: payload chunk size: 0 bundle2-input-part: total payload size 178 Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko. bundle2-input: part header size: 22 bundle2-input: part type: "test:debugreply" bundle2-input: part id: "3" bundle2-input: part parameters: 0 bundle2-input: found a handler for part 'test:debugreply' bundle2-input-part: "test:debugreply" (advisory) supported debugreply: no reply bundle2-input: payload chunk size: 0 bundle2-input: part header size: 43 bundle2-input: part type: "test:math" bundle2-input: part id: "4" bundle2-input: part parameters: 3 bundle2-input: ignoring unsupported advisory part test:math bundle2-input-part: "test:math" (advisory) (params: 2 mandatory 2 advisory) unsupported-type bundle2-input: payload chunk size: 2 bundle2-input: payload chunk size: 0 bundle2-input-part: total payload size 2 bundle2-input: part header size: 29 bundle2-input: part type: "test:song" bundle2-input: part id: "5" bundle2-input: part parameters: 1 bundle2-input: found a handler for part 'test:song' bundle2-input: ignoring unsupported advisory part test:song - randomparam bundle2-input-part: "test:song" (advisory) (params: 1 mandatory) unsupported-params (['randomparam']) bundle2-input: payload chunk size: 0 bundle2-input: part header size: 16 bundle2-input: part type: "test:ping" bundle2-input: part id: "6" bundle2-input: part parameters: 0 bundle2-input: found a handler for part 'test:ping' bundle2-input-part: "test:ping" (advisory) supported received ping request (id 6) bundle2-input: payload chunk size: 0 bundle2-input: part header size: 0 bundle2-input: end of bundle2 stream bundle2-input-bundle: 6 parts total 0 unread bytes 3 total verses sung Unbundle with an unknown mandatory part (should abort) $ hg bundle2 --parts --unknown ../unknown.hg2 $ hg unbundle2 < ../unknown.hg2 The choir starts singing: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko. debugreply: no reply 0 unread bytes abort: missing support for test:unknown [255] Unbundle with an unknown mandatory part parameters (should abort) $ hg bundle2 --unknownparams ../unknown.hg2 $ hg unbundle2 < ../unknown.hg2 0 unread bytes abort: missing support for test:song - randomparams [255] unbundle with a reply $ hg bundle2 --parts --reply ../parts-reply.hg2 $ hg unbundle2 ../reply.hg2 < ../parts-reply.hg2 0 unread bytes 3 total verses sung The reply is a bundle $ f --hexdump ../reply.hg2 ../reply.hg2: 0000: 48 47 32 30 00 00 00 00 00 00 00 1b 06 6f 75 74 |HG20.........out| 0010: 70 75 74 00 00 00 00 00 01 0b 01 69 6e 2d 72 65 |put........in-re| 0020: 70 6c 79 2d 74 6f 33 00 00 00 d9 54 68 65 20 63 |ply-to3....The c| 0030: 68 6f 69 72 20 73 74 61 72 74 73 20 73 69 6e 67 |hoir starts sing| 0040: 69 6e 67 3a 0a 20 20 20 20 50 61 74 61 6c 69 20 |ing:. Patali | 0050: 44 69 72 61 70 61 74 61 2c 20 43 72 6f 6d 64 61 |Dirapata, Cromda| 0060: 20 43 72 6f 6d 64 61 20 52 69 70 61 6c 6f 2c 20 | Cromda Ripalo, | 0070: 50 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f |Pata Pata, Ko Ko| 0080: 20 4b 6f 0a 20 20 20 20 42 6f 6b 6f 72 6f 20 44 | Ko. Bokoro D| 0090: 69 70 6f 75 6c 69 74 6f 2c 20 52 6f 6e 64 69 20 |ipoulito, Rondi | 00a0: 52 6f 6e 64 69 20 50 65 70 69 6e 6f 2c 20 50 61 |Rondi Pepino, Pa| 00b0: 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 4b |ta Pata, Ko Ko K| 00c0: 6f 0a 20 20 20 20 45 6d 61 6e 61 20 4b 61 72 61 |o. Emana Kara| 00d0: 73 73 6f 6c 69 2c 20 4c 6f 75 63 72 61 20 4c 6f |ssoli, Loucra Lo| 00e0: 75 63 72 61 20 50 6f 6e 70 6f 6e 74 6f 2c 20 50 |ucra Ponponto, P| 00f0: 61 74 61 20 50 61 74 61 2c 20 4b 6f 20 4b 6f 20 |ata Pata, Ko Ko | 0100: 4b 6f 2e 0a 00 00 00 00 00 00 00 1b 06 6f 75 74 |Ko...........out| 0110: 70 75 74 00 00 00 01 00 01 0b 01 69 6e 2d 72 65 |put........in-re| 0120: 70 6c 79 2d 74 6f 34 00 00 00 c9 64 65 62 75 67 |ply-to4....debug| 0130: 72 65 70 6c 79 3a 20 63 61 70 61 62 69 6c 69 74 |reply: capabilit| 0140: 69 65 73 3a 0a 64 65 62 75 67 72 65 70 6c 79 3a |ies:.debugreply:| 0150: 20 20 20 20 20 27 63 69 74 79 3d 21 27 0a 64 65 | 'city=!'.de| 0160: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: | 0170: 20 20 27 63 65 6c 65 73 74 65 2c 76 69 6c 6c 65 | 'celeste,ville| 0180: 27 0a 64 65 62 75 67 72 65 70 6c 79 3a 20 20 20 |'.debugreply: | 0190: 20 20 27 65 6c 65 70 68 61 6e 74 73 27 0a 64 65 | 'elephants'.de| 01a0: 62 75 67 72 65 70 6c 79 3a 20 20 20 20 20 20 20 |bugreply: | 01b0: 20 20 27 62 61 62 61 72 27 0a 64 65 62 75 67 72 | 'babar'.debugr| 01c0: 65 70 6c 79 3a 20 20 20 20 20 20 20 20 20 27 63 |eply: 'c| 01d0: 65 6c 65 73 74 65 27 0a 64 65 62 75 67 72 65 70 |eleste'.debugrep| 01e0: 6c 79 3a 20 20 20 20 20 27 70 69 6e 67 2d 70 6f |ly: 'ping-po| 01f0: 6e 67 27 0a 00 00 00 00 00 00 00 1e 09 74 65 73 |ng'..........tes| 0200: 74 3a 70 6f 6e 67 00 00 00 02 01 00 0b 01 69 6e |t:pong........in| 0210: 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 00 00 00 |-reply-to7......| 0220: 00 1b 06 6f 75 74 70 75 74 00 00 00 03 00 01 0b |...output.......| 0230: 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 37 00 00 00 |.in-reply-to7...| 0240: 3d 72 65 63 65 69 76 65 64 20 70 69 6e 67 20 72 |=received ping r| 0250: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 72 65 |equest (id 7).re| 0260: 70 6c 79 69 6e 67 20 74 6f 20 70 69 6e 67 20 72 |plying to ping r| 0270: 65 71 75 65 73 74 20 28 69 64 20 37 29 0a 00 00 |equest (id 7)...| 0280: 00 00 00 00 00 00 |......| The reply is valid $ hg statbundle2 < ../reply.hg2 options count: 0 :output: mandatory: 0 advisory: 1 payload: 217 bytes :output: mandatory: 0 advisory: 1 payload: 201 bytes :test:pong: mandatory: 1 advisory: 0 payload: 0 bytes :output: mandatory: 0 advisory: 1 payload: 61 bytes parts count: 4 Unbundle the reply to get the output: $ hg unbundle2 < ../reply.hg2 remote: The choir starts singing: remote: Patali Dirapata, Cromda Cromda Ripalo, Pata Pata, Ko Ko Ko remote: Bokoro Dipoulito, Rondi Rondi Pepino, Pata Pata, Ko Ko Ko remote: Emana Karassoli, Loucra Loucra Ponponto, Pata Pata, Ko Ko Ko. remote: debugreply: capabilities: remote: debugreply: 'city=!' remote: debugreply: 'celeste,ville' remote: debugreply: 'elephants' remote: debugreply: 'babar' remote: debugreply: 'celeste' remote: debugreply: 'ping-pong' remote: received ping request (id 7) remote: replying to ping request (id 7) 0 unread bytes Test push race detection $ hg bundle2 --pushrace ../part-race.hg2 $ hg unbundle2 < ../part-race.hg2 0 unread bytes abort: push race: repository changed while pushing - please try again [255] Support for changegroup =================================== $ hg unbundle $TESTDIR/bundles/rebase.hg adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+3 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg log -G o 8:02de42196ebe draft Nicolas Dumazet H | | o 7:eea13746799a draft Nicolas Dumazet G |/| o | 6:24b6387c8c8c draft Nicolas Dumazet F | | | o 5:9520eea781bc draft Nicolas Dumazet E |/ | o 4:32af7686d403 draft Nicolas Dumazet D | | | o 3:5fddd98957c8 draft Nicolas Dumazet C | | | o 2:42ccdea3bb16 draft Nicolas Dumazet B |/ o 1:cd010b8cd998 draft Nicolas Dumazet A @ 0:3903775176ed draft test a $ hg bundle2 --debug --config progress.debug=true --config devel.bundle2.debug=true --rev '8+7+5+4' ../rev.hg2 4 changesets found list of changesets: 32af7686d403cf45b5d95f2d70cebea587ac806a 9520eea781bcca16c1e15acc0ba14335a0e8e5ba eea13746799a9e0bfd88f29d3c2e9dc9389f524f 02de42196ebee42ef284b6780a87cdc96e8eaab6 bundle2-output-bundle: "HG20", 1 parts total bundle2-output: start emission of HG20 stream bundle2-output: bundle parameter: bundle2-output: start of parts bundle2-output: bundle part: "changegroup" bundle2-output-part: "changegroup" (advisory) streamed payload bundle2-output: part 0: "changegroup" bundle2-output: header chunk size: 18 bundling: 1/4 changesets (25.00%) bundling: 2/4 changesets (50.00%) bundling: 3/4 changesets (75.00%) bundling: 4/4 changesets (100.00%) bundling: 1/4 manifests (25.00%) bundling: 2/4 manifests (50.00%) bundling: 3/4 manifests (75.00%) bundling: 4/4 manifests (100.00%) bundling: D 1/3 files (33.33%) bundling: E 2/3 files (66.67%) bundling: H 3/3 files (100.00%) bundle2-output: payload chunk size: 1555 bundle2-output: closing payload chunk bundle2-output: end of bundle $ f --hexdump ../rev.hg2 ../rev.hg2: 0000: 48 47 32 30 00 00 00 00 00 00 00 12 0b 63 68 61 |HG20.........cha| 0010: 6e 67 65 67 72 6f 75 70 00 00 00 00 00 00 00 00 |ngegroup........| 0020: 06 13 00 00 00 a4 32 af 76 86 d4 03 cf 45 b5 d9 |......2.v....E..| 0030: 5f 2d 70 ce be a5 87 ac 80 6a 5f dd d9 89 57 c8 |_-p......j_...W.| 0040: a5 4a 4d 43 6d fe 1d a9 d8 7f 21 a1 b9 7b 00 00 |.JMCm.....!..{..| 0050: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| 0060: 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d 70 ce |..2.v....E.._-p.| 0070: be a5 87 ac 80 6a 00 00 00 00 00 00 00 29 00 00 |.....j.......)..| 0080: 00 29 36 65 31 66 34 63 34 37 65 63 62 35 33 33 |.)6e1f4c47ecb533| 0090: 66 66 64 30 63 38 65 35 32 63 64 63 38 38 61 66 |ffd0c8e52cdc88af| 00a0: 62 36 63 64 33 39 65 32 30 63 0a 00 00 00 66 00 |b6cd39e20c....f.| 00b0: 00 00 68 00 00 00 02 44 0a 00 00 00 69 00 00 00 |..h....D....i...| 00c0: 6a 00 00 00 01 44 00 00 00 a4 95 20 ee a7 81 bc |j....D..... ....| 00d0: ca 16 c1 e1 5a cc 0b a1 43 35 a0 e8 e5 ba cd 01 |....Z...C5......| 00e0: 0b 8c d9 98 f3 98 1a 5a 81 15 f9 4f 8d a4 ab 50 |.......Z...O...P| 00f0: 60 89 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |`...............| 0100: 00 00 00 00 00 00 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........| 0110: 5a cc 0b a1 43 35 a0 e8 e5 ba 00 00 00 00 00 00 |Z...C5..........| 0120: 00 29 00 00 00 29 34 64 65 63 65 39 63 38 32 36 |.)...)4dece9c826| 0130: 66 36 39 34 39 30 35 30 37 62 39 38 63 36 33 38 |f69490507b98c638| 0140: 33 61 33 30 30 39 62 32 39 35 38 33 37 64 0a 00 |3a3009b295837d..| 0150: 00 00 66 00 00 00 68 00 00 00 02 45 0a 00 00 00 |..f...h....E....| 0160: 69 00 00 00 6a 00 00 00 01 45 00 00 00 a2 ee a1 |i...j....E......| 0170: 37 46 79 9a 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f |7Fy.......<...8.| 0180: 52 4f 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 fa 95 |RO$.8|...7......| 0190: de d3 cb 1c f7 85 95 20 ee a7 81 bc ca 16 c1 e1 |....... ........| 01a0: 5a cc 0b a1 43 35 a0 e8 e5 ba ee a1 37 46 79 9a |Z...C5......7Fy.| 01b0: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..| 01c0: 00 00 00 00 00 29 00 00 00 29 33 36 35 62 39 33 |.....)...)365b93| 01d0: 64 35 37 66 64 66 34 38 31 34 65 32 62 35 39 31 |d57fdf4814e2b591| 01e0: 31 64 36 62 61 63 66 66 32 62 31 32 30 31 34 34 |1d6bacff2b120144| 01f0: 34 31 0a 00 00 00 66 00 00 00 68 00 00 00 00 00 |41....f...h.....| 0200: 00 00 69 00 00 00 6a 00 00 00 01 47 00 00 00 a4 |..i...j....G....| 0210: 02 de 42 19 6e be e4 2e f2 84 b6 78 0a 87 cd c9 |..B.n......x....| 0220: 6e 8e aa b6 24 b6 38 7c 8c 8c ae 37 17 88 80 f3 |n...$.8|...7....| 0230: fa 95 de d3 cb 1c f7 85 00 00 00 00 00 00 00 00 |................| 0240: 00 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 |..............B.| 0250: 6e be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 |n......x....n...| 0260: 00 00 00 00 00 00 00 29 00 00 00 29 38 62 65 65 |.......)...)8bee| 0270: 34 38 65 64 63 37 33 31 38 35 34 31 66 63 30 30 |48edc7318541fc00| 0280: 31 33 65 65 34 31 62 30 38 39 32 37 36 61 38 63 |13ee41b089276a8c| 0290: 32 34 62 66 0a 00 00 00 66 00 00 00 66 00 00 00 |24bf....f...f...| 02a0: 02 48 0a 00 00 00 67 00 00 00 68 00 00 00 01 48 |.H....g...h....H| 02b0: 00 00 00 00 00 00 00 8b 6e 1f 4c 47 ec b5 33 ff |........n.LG..3.| 02c0: d0 c8 e5 2c dc 88 af b6 cd 39 e2 0c 66 a5 a0 18 |...,.....9..f...| 02d0: 17 fd f5 23 9c 27 38 02 b5 b7 61 8d 05 1c 89 e4 |...#.'8...a.....| 02e0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| 02f0: 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f 2d |....2.v....E.._-| 0300: 70 ce be a5 87 ac 80 6a 00 00 00 81 00 00 00 81 |p......j........| 0310: 00 00 00 2b 44 00 63 33 66 31 63 61 32 39 32 34 |...+D.c3f1ca2924| 0320: 63 31 36 61 31 39 62 30 36 35 36 61 38 34 39 30 |c16a19b0656a8490| 0330: 30 65 35 30 34 65 35 62 30 61 65 63 32 64 0a 00 |0e504e5b0aec2d..| 0340: 00 00 8b 4d ec e9 c8 26 f6 94 90 50 7b 98 c6 38 |...M...&...P{..8| 0350: 3a 30 09 b2 95 83 7d 00 7d 8c 9d 88 84 13 25 f5 |:0....}.}.....%.| 0360: c6 b0 63 71 b3 5b 4e 8a 2b 1a 83 00 00 00 00 00 |..cq.[N.+.......| 0370: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 95 |................| 0380: 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 a0 | ........Z...C5.| 0390: e8 e5 ba 00 00 00 2b 00 00 00 ac 00 00 00 2b 45 |......+.......+E| 03a0: 00 39 63 36 66 64 30 33 35 30 61 36 63 30 64 30 |.9c6fd0350a6c0d0| 03b0: 63 34 39 64 34 61 39 63 35 30 31 37 63 66 30 37 |c49d4a9c5017cf07| 03c0: 30 34 33 66 35 34 65 35 38 0a 00 00 00 8b 36 5b |043f54e58.....6[| 03d0: 93 d5 7f df 48 14 e2 b5 91 1d 6b ac ff 2b 12 01 |....H.....k..+..| 03e0: 44 41 28 a5 84 c6 5e f1 21 f8 9e b6 6a b7 d0 bc |DA(...^.!...j...| 03f0: 15 3d 80 99 e7 ce 4d ec e9 c8 26 f6 94 90 50 7b |.=....M...&...P{| 0400: 98 c6 38 3a 30 09 b2 95 83 7d ee a1 37 46 79 9a |..8:0....}..7Fy.| 0410: 9e 0b fd 88 f2 9d 3c 2e 9d c9 38 9f 52 4f 00 00 |......<...8.RO..| 0420: 00 56 00 00 00 56 00 00 00 2b 46 00 32 32 62 66 |.V...V...+F.22bf| 0430: 63 66 64 36 32 61 32 31 61 33 32 38 37 65 64 62 |cfd62a21a3287edb| 0440: 64 34 64 36 35 36 32 31 38 64 30 66 35 32 35 65 |d4d656218d0f525e| 0450: 64 37 36 61 0a 00 00 00 97 8b ee 48 ed c7 31 85 |d76a.......H..1.| 0460: 41 fc 00 13 ee 41 b0 89 27 6a 8c 24 bf 28 a5 84 |A....A..'j.$.(..| 0470: c6 5e f1 21 f8 9e b6 6a b7 d0 bc 15 3d 80 99 e7 |.^.!...j....=...| 0480: ce 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| 0490: 00 00 00 00 00 02 de 42 19 6e be e4 2e f2 84 b6 |.......B.n......| 04a0: 78 0a 87 cd c9 6e 8e aa b6 00 00 00 2b 00 00 00 |x....n......+...| 04b0: 56 00 00 00 00 00 00 00 81 00 00 00 81 00 00 00 |V...............| 04c0: 2b 48 00 38 35 30 30 31 38 39 65 37 34 61 39 65 |+H.8500189e74a9e| 04d0: 30 34 37 35 65 38 32 32 30 39 33 62 63 37 64 62 |0475e822093bc7db| 04e0: 30 64 36 33 31 61 65 62 30 62 34 0a 00 00 00 00 |0d631aeb0b4.....| 04f0: 00 00 00 05 44 00 00 00 62 c3 f1 ca 29 24 c1 6a |....D...b...)$.j| 0500: 19 b0 65 6a 84 90 0e 50 4e 5b 0a ec 2d 00 00 00 |..ej...PN[..-...| 0510: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| 0520: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| 0530: 00 00 00 00 00 32 af 76 86 d4 03 cf 45 b5 d9 5f |.....2.v....E.._| 0540: 2d 70 ce be a5 87 ac 80 6a 00 00 00 00 00 00 00 |-p......j.......| 0550: 00 00 00 00 02 44 0a 00 00 00 00 00 00 00 05 45 |.....D.........E| 0560: 00 00 00 62 9c 6f d0 35 0a 6c 0d 0c 49 d4 a9 c5 |...b.o.5.l..I...| 0570: 01 7c f0 70 43 f5 4e 58 00 00 00 00 00 00 00 00 |.|.pC.NX........| 0580: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| 0590: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| 05a0: 95 20 ee a7 81 bc ca 16 c1 e1 5a cc 0b a1 43 35 |. ........Z...C5| 05b0: a0 e8 e5 ba 00 00 00 00 00 00 00 00 00 00 00 02 |................| 05c0: 45 0a 00 00 00 00 00 00 00 05 48 00 00 00 62 85 |E.........H...b.| 05d0: 00 18 9e 74 a9 e0 47 5e 82 20 93 bc 7d b0 d6 31 |...t..G^. ..}..1| 05e0: ae b0 b4 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| 05f0: 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| 0600: 00 00 00 00 00 00 00 00 00 00 00 02 de 42 19 6e |.............B.n| 0610: be e4 2e f2 84 b6 78 0a 87 cd c9 6e 8e aa b6 00 |......x....n....| 0620: 00 00 00 00 00 00 00 00 00 00 02 48 0a 00 00 00 |...........H....| 0630: 00 00 00 00 00 00 00 00 00 00 00 00 00 |.............| $ hg debugbundle ../rev.hg2 Stream params: {} changegroup -- '{}' 32af7686d403cf45b5d95f2d70cebea587ac806a 9520eea781bcca16c1e15acc0ba14335a0e8e5ba eea13746799a9e0bfd88f29d3c2e9dc9389f524f 02de42196ebee42ef284b6780a87cdc96e8eaab6 $ hg unbundle ../rev.hg2 adding changesets adding manifests adding file changes added 0 changesets with 0 changes to 3 files (run 'hg update' to get a working copy) with reply $ hg bundle2 --rev '8+7+5+4' --reply ../rev-rr.hg2 $ hg unbundle2 ../rev-reply.hg2 < ../rev-rr.hg2 0 unread bytes addchangegroup return: 1 $ f --hexdump ../rev-reply.hg2 ../rev-reply.hg2: 0000: 48 47 32 30 00 00 00 00 00 00 00 2f 11 72 65 70 |HG20......./.rep| 0010: 6c 79 3a 63 68 61 6e 67 65 67 72 6f 75 70 00 00 |ly:changegroup..| 0020: 00 00 00 02 0b 01 06 01 69 6e 2d 72 65 70 6c 79 |........in-reply| 0030: 2d 74 6f 31 72 65 74 75 72 6e 31 00 00 00 00 00 |-to1return1.....| 0040: 00 00 1b 06 6f 75 74 70 75 74 00 00 00 01 00 01 |....output......| 0050: 0b 01 69 6e 2d 72 65 70 6c 79 2d 74 6f 31 00 00 |..in-reply-to1..| 0060: 00 64 61 64 64 69 6e 67 20 63 68 61 6e 67 65 73 |.dadding changes| 0070: 65 74 73 0a 61 64 64 69 6e 67 20 6d 61 6e 69 66 |ets.adding manif| 0080: 65 73 74 73 0a 61 64 64 69 6e 67 20 66 69 6c 65 |ests.adding file| 0090: 20 63 68 61 6e 67 65 73 0a 61 64 64 65 64 20 30 | changes.added 0| 00a0: 20 63 68 61 6e 67 65 73 65 74 73 20 77 69 74 68 | changesets with| 00b0: 20 30 20 63 68 61 6e 67 65 73 20 74 6f 20 33 20 | 0 changes to 3 | 00c0: 66 69 6c 65 73 0a 00 00 00 00 00 00 00 00 |files.........| Check handling of exception during generation. ---------------------------------------------- $ hg bundle2 --genraise > ../genfailed.hg2 abort: Someone set up us the bomb! [255] Should still be a valid bundle $ f --hexdump ../genfailed.hg2 ../genfailed.hg2: 0000: 48 47 32 30 00 00 00 00 00 00 00 0d 06 6f 75 74 |HG20.........out| 0010: 70 75 74 00 00 00 00 00 00 ff ff ff ff 00 00 00 |put.............| 0020: 48 0b 65 72 72 6f 72 3a 61 62 6f 72 74 00 00 00 |H.error:abort...| 0030: 00 01 00 07 2d 6d 65 73 73 61 67 65 75 6e 65 78 |....-messageunex| 0040: 70 65 63 74 65 64 20 65 72 72 6f 72 3a 20 53 6f |pected error: So| 0050: 6d 65 6f 6e 65 20 73 65 74 20 75 70 20 75 73 20 |meone set up us | 0060: 74 68 65 20 62 6f 6d 62 21 00 00 00 00 00 00 00 |the bomb!.......| 0070: 00 |.| And its handling on the other size raise a clean exception $ cat ../genfailed.hg2 | hg unbundle2 0 unread bytes abort: unexpected error: Someone set up us the bomb! [255] Test compression ================ Simple case where it just work: GZ ---------------------------------- $ hg bundle2 --compress GZ --rev '8+7+5+4' ../rev.hg2.bz $ f --hexdump ../rev.hg2.bz ../rev.hg2.bz: 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress| 0010: 69 6f 6e 3d 47 5a 78 9c 95 94 7d 68 95 55 1c c7 |ion=GZx...}h.U..| 0020: 9f 3b 31 e8 ce fa c3 65 be a0 a4 b4 52 b9 29 e7 |.;1....e....R.).| 0030: f5 79 ce 89 fa 63 ed 5e 77 8b 9c c3 3f 2a 1c 68 |.y...c.^w...?*.h| 0040: cf 79 9b dd 6a ae b0 28 74 b8 e5 96 5b bb 86 61 |.y..j..(t...[..a| 0050: a3 15 6e 3a 71 c8 6a e8 a5 da 95 64 28 22 ce 69 |..n:q.j....d(".i| 0060: cd 06 59 34 28 2b 51 2a 58 c3 17 56 2a 9a 9d 67 |..Y4(+Q*X..V*..g| 0070: dc c6 35 9e c4 1d f8 9e 87 f3 9c f3 3b bf 0f bf |..5.........;...| 0080: 97 e3 38 ce f4 42 b9 d6 af ae d2 55 af ae 7b ad |..8..B.....U..{.| 0090: c6 c9 8d bb 8a ec b4 07 ed 7f fd ed d3 53 be 4e |.............S.N| 00a0: f4 0e af 59 52 73 ea 50 d7 96 9e ba d4 9a 1f 87 |...YRs.P........| 00b0: 9b 9f 1d e8 7a 6a 79 e9 cb 7f cf eb fe 7e d3 82 |....zjy......~..| 00c0: ce 2f 36 38 21 23 cc 36 b7 b5 38 90 ab a1 21 92 |./68!#.6..8...!.| 00d0: 78 5a 0a 8a b1 31 0a 48 a6 29 92 4a 32 e6 1b e1 |xZ...1.H.).J2...| 00e0: 4a 85 b9 46 40 46 ed 61 63 b5 d6 aa 20 1e ac 5e |J..F@F.ac... ..^| 00f0: b0 0a ae 8a c4 03 c6 d6 f9 a3 7b eb fb 4e de 7f |..........{..N..| 0100: e4 97 55 5f 15 76 96 d2 5d bf 9d 3f 38 18 29 4c |..U_.v..]..?8.)L| 0110: 0f b7 5d 6e 9b b3 aa 7e c6 d5 15 5b f7 7c 52 f1 |..]n...~...[.|R.| 0120: 7c 73 18 63 98 6d 3e 23 51 5a 6a 2e 19 72 8d cb ||s.c.m>#QZj..r..| 0130: 09 07 14 78 82 33 e9 62 86 7d 0c 00 17 88 53 86 |...x.3.b.}....S.| 0140: 3d 75 0b 63 e2 16 c6 84 9d 76 8f 76 7a cb de fc |=u.c.....v.vz...| 0150: a8 a3 f0 46 d3 a5 f6 c7 96 b6 9f 60 3b 57 ae 28 |...F.......`;W.(| 0160: ce b2 8d e9 f4 3e 6f 66 53 dd e5 6b ad 67 be f9 |.....>ofS..k.g..| 0170: 72 ee 5f 8d 61 3c 61 b6 f9 8c d8 a5 82 63 45 3d |r._.a.| 0210: 58 dc 91 d8 40 e9 23 8e 88 84 ae 0f b9 00 2e b5 |X...@.#.........| 0220: 74 36 f3 40 53 40 34 15 c0 d7 12 8d e7 bb 65 f9 |t6.@S@4.......e.| 0230: c8 ef 03 0f ff f9 fe b6 8a 0d 6d fd ec 51 70 f7 |..........m..Qp.| 0240: a7 ad 9b 6b 9d da 74 7b 53 43 d1 43 63 fd 19 f9 |...k..t{SC.Cc...| 0250: ca 67 95 e5 ef c4 e6 6c 9e 44 e1 c5 ac 7a 82 6f |.g.....l.D...z.o| 0260: c2 e1 d2 b5 2d 81 29 f0 5d 09 6c 6f 10 ae 88 cf |....-.).].lo....| 0270: 25 05 d0 93 06 78 80 60 43 2d 10 1b 47 71 2b b7 |%....x.`C-..Gq+.| 0280: 7f bb e9 a7 e4 7d 67 7b df 9b f7 62 cf cd d8 f4 |.....}g{...b....| 0290: 48 bc 64 51 57 43 ff ea 8b 0b ae 74 64 53 07 86 |H.dQWC.....tdS..| 02a0: fa 66 3c 5e f7 e1 af a7 c2 90 ff a7 be 9e c9 29 |.f<^...........)| 02b0: b6 cc 41 48 18 69 94 8b 7c 04 7d 8c 98 a7 95 50 |..AH.i..|.}....P| 02c0: 44 d9 d0 20 c8 14 30 14 51 ad 6c 16 03 94 0f 5a |D.. ..0.Q.l....Z| 02d0: 46 93 7f 1c 87 8d 25 d7 9d a2 d1 92 4c f3 c2 54 |F.....%.....L..T| 02e0: ba f8 70 18 ca 24 0a 29 96 43 71 f2 93 95 74 18 |..p..$.).Cq...t.| 02f0: b5 65 c4 b8 f6 6c 5c 34 20 1e d5 0c 21 c0 b1 90 |.e...l\4 ...!...| 0300: 9e 12 40 b9 18 fa 5a 00 41 a2 39 d3 a9 c1 73 21 |..@...Z.A.9...s!| 0310: 8e 5e 3c b9 b8 f8 48 6a 76 46 a7 1a b6 dd 5b 51 |.^<...HjvF....[Q| 0320: 5e 19 1d 59 12 c6 32 89 02 9a c0 8f 4f b8 0a ba |^..Y..2.....O...| 0330: 5e ec 58 37 44 a3 2f dd 33 ed c9 d3 dd c7 22 1b |^.X7D./.3.....".| 0340: 2f d4 94 8e 95 3f 77 a7 ae 6e f3 32 8d bb 4a 4c |/....?w..n.2..JL| 0350: b8 0a 5a 43 34 3a b3 3a d6 77 ff 5c b6 fa ad f9 |..ZC4:.:.w.\....| 0360: db fb 6a 33 df c1 7d 99 cf ef d4 d5 6d da 77 7c |..j3..}.....m.w|| 0370: 3b 19 fd af c5 3f f1 60 c3 17 |;....?.`..| $ hg debugbundle ../rev.hg2.bz Stream params: {'Compression': 'GZ'} changegroup -- '{}' 32af7686d403cf45b5d95f2d70cebea587ac806a 9520eea781bcca16c1e15acc0ba14335a0e8e5ba eea13746799a9e0bfd88f29d3c2e9dc9389f524f 02de42196ebee42ef284b6780a87cdc96e8eaab6 $ hg unbundle ../rev.hg2.bz adding changesets adding manifests adding file changes added 0 changesets with 0 changes to 3 files (run 'hg update' to get a working copy) Simple case where it just work: BZ ---------------------------------- $ hg bundle2 --compress BZ --rev '8+7+5+4' ../rev.hg2.bz $ f --hexdump ../rev.hg2.bz ../rev.hg2.bz: 0000: 48 47 32 30 00 00 00 0e 43 6f 6d 70 72 65 73 73 |HG20....Compress| 0010: 69 6f 6e 3d 42 5a 42 5a 68 39 31 41 59 26 53 59 |ion=BZBZh91AY&SY| 0020: a3 4b 18 3d 00 00 1a 7f ff ff bf 5f f6 ef ef 7f |.K.=......._....| 0030: f6 3f f7 d1 d9 ff ff f7 6e ff ff 6e f7 f6 bd df |.?......n..n....| 0040: b5 ab ff cf 67 f6 e7 7b f7 c0 02 d7 33 82 8b 51 |....g..{....3..Q| 0050: 04 a5 53 d5 3d 27 a0 99 18 4d 0d 34 00 d1 a1 e8 |..S.='...M.4....| 0060: 80 c8 7a 87 a9 a3 43 6a 3d 46 86 26 80 34 3d 40 |..z...Cj=F.&.4=@| 0070: c8 c9 b5 34 f4 8f 48 0f 51 ea 34 34 fd 4d aa 19 |...4..H.Q.44.M..| 0080: 03 40 0c 08 da 86 43 d4 f5 0f 42 1e a0 f3 54 33 |.@....C...B...T3| 0090: 54 d3 13 4d 03 40 32 00 00 32 03 26 80 0d 00 0d |T..M.@2..2.&....| 00a0: 00 68 c8 c8 03 20 32 30 98 8c 80 00 00 03 4d 00 |.h... 20......M.| 00b0: c8 00 00 0d 00 00 22 99 a1 34 c2 64 a6 d5 34 1a |......"..4.d..4.| 00c0: 00 00 06 86 83 4d 07 a8 d1 a0 68 01 a0 00 00 00 |.....M....h.....| 00d0: 00 0d 06 80 00 00 00 0d 00 03 40 00 00 04 a4 a1 |..........@.....| 00e0: 4d a9 89 89 b4 9a 32 0c 43 46 86 87 a9 8d 41 9a |M.....2.CF....A.| 00f0: 98 46 9a 0d 31 32 1a 34 0d 0c 8d a2 0c 98 4d 06 |.F..12.4......M.| 0100: 8c 40 c2 60 8d 0d 0c 20 c9 89 fa a0 d0 d3 21 a1 |.@.`... ......!.| 0110: ea 34 d3 68 9e a6 d1 74 05 33 cb 66 96 93 28 64 |.4.h...t.3.f..(d| 0120: 40 91 22 ac 55 9b ea 40 7b 38 94 e2 f8 06 00 cb |@.".U..@{8......| 0130: 28 02 00 4d ab 40 24 10 43 18 cf 64 b4 06 83 0c |(..M.@$.C..d....| 0140: 34 6c b4 a3 d4 0a 0a e4 a8 5c 4e 23 c0 c9 7a 31 |4l.......\N#..z1| 0150: 97 87 77 7a 64 88 80 8e 60 97 20 93 0f 8e eb c4 |..wzd...`. .....| 0160: 62 a4 44 a3 52 20 b2 99 a9 2e e1 d7 29 4a 54 ac |b.D.R ......)JT.| 0170: 44 7a bb cc 04 3d e0 aa bd 6a 33 5e 9b a2 57 36 |Dz...=...j3^..W6| 0180: fa cb 45 bb 6d 3e c1 d9 d9 f5 83 69 8a d0 e0 e2 |..E.m>.....i....| 0190: e7 ae 90 55 24 da 3f ab 78 c0 4c b4 56 a3 9e a4 |...U$.?.x.L.V...| 01a0: af 9c 65 74 86 ec 6d dc 62 dc 33 ca c8 50 dd 9d |..et..m.b.3..P..| 01b0: 98 8e 9e 59 20 f3 f0 42 91 4a 09 f5 75 8d 3d a5 |...Y ..B.J..u.=.| 01c0: a5 15 cb 8d 10 63 b0 c2 2e b2 81 f7 c1 76 0e 53 |.....c.......v.S| 01d0: 6c 0e 46 73 b5 ae 67 f9 4c 0b 45 6b a8 32 2a 2f |l.Fs..g.L.Ek.2*/| 01e0: a2 54 a4 44 05 20 a1 38 d1 a4 c6 09 a8 2b 08 99 |.T.D. .8.....+..| 01f0: a4 14 ae 8d a3 e3 aa 34 27 d8 44 ca c3 5d 21 8b |.......4'.D..]!.| 0200: 1a 1e 97 29 71 2b 09 4a 4a 55 55 94 58 65 b2 bc |...)q+.JJUU.Xe..| 0210: f3 a5 90 26 36 76 67 7a 51 98 d6 8a 4a 99 50 b5 |...&6vgzQ...J.P.| 0220: 99 8f 94 21 17 a9 8b f3 ad 4c 33 d4 2e 40 c8 0c |...!.....L3..@..| 0230: 3b 90 53 39 db 48 02 34 83 48 d6 b3 99 13 d2 58 |;.S9.H.4.H.....X| 0240: 65 8e 71 ac a9 06 95 f2 c4 8e b4 08 6b d3 0c ae |e.q.........k...| 0250: d9 90 56 71 43 a7 a2 62 16 3e 50 63 d3 57 3c 2d |..VqC..b.>Pc.W<-| 0260: 9f 0f 34 05 08 d8 a6 4b 59 31 54 66 3a 45 0c 8a |..4....KY1Tf:E..| 0270: c7 90 3a f0 6a 83 1b f5 ca fb 80 2b 50 06 fb 51 |..:.j......+P..Q| 0280: 7e a6 a4 d4 81 44 82 21 54 00 5b 1a 30 83 62 a3 |~....D.!T.[.0.b.| 0290: 18 b6 24 19 1e 45 df 4d 5c db a6 af 5b ac 90 fa |..$..E.M\...[...| 02a0: 3e ed f9 ec 4c ba 36 ee d8 60 20 a7 c7 3b cb d1 |>...L.6..` ..;..| 02b0: 90 43 7d 27 16 50 5d ad f4 14 07 0b 90 5c cc 6b |.C}'.P]......\.k| 02c0: 8d 3f a6 88 f4 34 37 a8 cf 14 63 36 19 f7 3e 28 |.?...47...c6..>(| 02d0: de 99 e8 16 a4 9d 0d 40 a1 a7 24 52 14 a6 72 62 |.......@..$R..rb| 02e0: 59 5a ca 2d e5 51 90 78 88 d9 c6 c7 21 d0 f7 46 |YZ.-.Q.x....!..F| 02f0: b2 04 46 44 4e 20 9c 12 b1 03 4e 25 e0 a9 0c 58 |..FDN ....N%...X| 0300: 5b 1d 3c 93 20 01 51 de a9 1c 69 23 32 46 14 b4 |[.<. .Q...i#2F..| 0310: 90 db 17 98 98 50 03 90 29 aa 40 b0 13 d8 43 d2 |.....P..).@...C.| 0320: 5f c5 9d eb f3 f2 ad 41 e8 7a a9 ed a1 58 84 a6 |_......A.z...X..| 0330: 42 bf d6 fc 24 82 c1 20 32 26 4a 15 a6 1d 29 7f |B...$.. 2&J...).| 0340: 7e f4 3d 07 bc 62 9a 5b ec 44 3d 72 1d 41 8b 5c |~.=..b.[.D=r.A.\| 0350: 80 de 0e 62 9a 2e f8 83 00 d5 07 a0 9c c6 74 98 |...b..........t.| 0360: 11 b2 5e a9 38 02 03 ee fd 86 5c f4 86 b3 ae da |..^.8.....\.....| 0370: 05 94 01 c5 c6 ea 18 e6 ba 2a ba b3 04 5c 96 89 |.........*...\..| 0380: 72 63 5b 10 11 f6 67 34 98 cb e4 c0 4e fa e6 99 |rc[...g4....N...| 0390: 19 6e 50 e8 26 8d 0c 17 e0 be ef e1 8e 02 6f 32 |.nP.&.........o2| 03a0: 82 dc 26 f8 a1 08 f3 8a 0d f3 c4 75 00 48 73 b8 |..&........u.Hs.| 03b0: be 3b 0d 7f d0 fd c7 78 96 ec e0 03 80 68 4d 8d |.;.....x.....hM.| 03c0: 43 8c d7 68 58 f9 50 f0 18 cb 21 58 1b 60 cd 1f |C..hX.P...!X.`..| 03d0: 84 36 2e 16 1f 0a f7 4e 8f eb df 01 2d c2 79 0b |.6.....N....-.y.| 03e0: f7 24 ea 0d e8 59 86 51 6e 1c 30 a3 ad 2f ee 8c |.$...Y.Qn.0../..| 03f0: 90 c8 84 d5 e8 34 c1 95 b2 c9 f6 4d 87 1c 7d 19 |.....4.....M..}.| 0400: d6 41 58 56 7a e0 6c ba 10 c7 e8 33 39 36 96 e7 |.AXVz.l....396..| 0410: d2 f9 59 9a 08 95 48 38 e7 0b b7 0a 24 67 c4 39 |..Y...H8....$g.9| 0420: 8b 43 88 57 9c 01 f5 61 b5 e1 27 41 7e af 83 fe |.C.W...a..'A~...| 0430: 2e e4 8a 70 a1 21 46 96 30 7a |...p.!F.0z| $ hg debugbundle ../rev.hg2.bz Stream params: {'Compression': 'BZ'} changegroup -- '{}' 32af7686d403cf45b5d95f2d70cebea587ac806a 9520eea781bcca16c1e15acc0ba14335a0e8e5ba eea13746799a9e0bfd88f29d3c2e9dc9389f524f 02de42196ebee42ef284b6780a87cdc96e8eaab6 $ hg unbundle ../rev.hg2.bz adding changesets adding manifests adding file changes added 0 changesets with 0 changes to 3 files (run 'hg update' to get a working copy) unknown compression while unbundling ----------------------------- $ hg bundle2 --param Compression=FooBarUnknown --rev '8+7+5+4' ../rev.hg2.bz $ cat ../rev.hg2.bz | hg statbundle2 abort: unknown parameters: Stream Parameter - Compression='FooBarUnknown' [255] $ hg unbundle ../rev.hg2.bz abort: ../rev.hg2.bz: unknown bundle feature, Stream Parameter - Compression='FooBarUnknown' (see https://mercurial-scm.org/wiki/BundleFeature for more information) [255] $ cd .. mercurial-3.7.3/tests/test-run-tests.py0000644000175000017500000000503512676531525017567 0ustar mpmmpm00000000000000"""test line matching with some failing examples and some which warn run-test.t only checks positive matches and can not see warnings (both by design) """ from __future__ import print_function import os, re # this is hack to make sure no escape characters are inserted into the output if 'TERM' in os.environ: del os.environ['TERM'] import doctest run_tests = __import__('run-tests') def prn(ex): m = ex.args[0] if isinstance(m, str): print(m) else: print(m.decode('utf-8')) def lm(expected, output): r"""check if output matches expected does it generally work? >>> lm(b'H*e (glob)\n', b'Here\n') True fail on bad test data >>> try: lm(b'a\n',b'a') ... except AssertionError as ex: print(ex) missing newline >>> try: lm(b'single backslash\n', b'single \backslash\n') ... except AssertionError as ex: prn(ex) single backslash or unknown char """ assert (expected.endswith(b'\n') and output.endswith(b'\n')), 'missing newline' assert not re.search(br'[^ \w\\/\r\n()*?]', expected + output), \ b'single backslash or unknown char' match = run_tests.TTest.linematch(expected, output) if isinstance(match, str): return 'special: ' + match elif isinstance(match, bytes): return 'special: ' + match.decode('utf-8') else: return bool(match) # do not return match object def wintests(): r"""test matching like running on windows enable windows matching on any os >>> _osaltsep = os.altsep >>> os.altsep = True valid match on windows >>> lm(b'g/a*/d (glob)\n', b'g\\abc/d\n') True direct matching, glob unnecessary >>> lm(b'g/b (glob)\n', b'g/b\n') 'special: -glob' missing glob >>> lm(b'/g/c/d/fg\n', b'\\g\\c\\d/fg\n') 'special: +glob' restore os.altsep >>> os.altsep = _osaltsep """ pass def otherostests(): r"""test matching like running on non-windows os disable windows matching on any os >>> _osaltsep = os.altsep >>> os.altsep = False backslash does not match slash >>> lm(b'h/a* (glob)\n', b'h\\ab\n') False direct matching glob can not be recognized >>> lm(b'h/b (glob)\n', b'h/b\n') True missing glob can not not be recognized >>> lm(b'/h/c/df/g/\n', b'\\h/c\\df/g\\\n') False restore os.altsep >>> os.altsep = _osaltsep """ pass if __name__ == '__main__': doctest.testmod() mercurial-3.7.3/tests/test-trusted.py.out0000644000175000017500000001017112676531525020120 0ustar mpmmpm00000000000000# same user, same group trusted global = /some/path local = /another/path untrusted . . global = /some/path . . local = /another/path # same user, different group trusted global = /some/path local = /another/path untrusted . . global = /some/path . . local = /another/path # different user, same group not trusting file .hg/hgrc from untrusted user abc, group bar trusted global = /some/path untrusted . . global = /some/path . . local = /another/path # different user, same group, but we trust the group trusted global = /some/path local = /another/path untrusted . . global = /some/path . . local = /another/path # different user, different group not trusting file .hg/hgrc from untrusted user abc, group def trusted global = /some/path untrusted . . global = /some/path . . local = /another/path # different user, different group, but we trust the user trusted global = /some/path local = /another/path untrusted . . global = /some/path . . local = /another/path # different user, different group, but we trust the group trusted global = /some/path local = /another/path untrusted . . global = /some/path . . local = /another/path # different user, different group, but we trust the user and the group trusted global = /some/path local = /another/path untrusted . . global = /some/path . . local = /another/path # we trust all users # different user, different group trusted global = /some/path local = /another/path untrusted . . global = /some/path . . local = /another/path # we trust all groups # different user, different group trusted global = /some/path local = /another/path untrusted . . global = /some/path . . local = /another/path # we trust all users and groups # different user, different group trusted global = /some/path local = /another/path untrusted . . global = /some/path . . local = /another/path # we don't get confused by users and groups with the same name # different user, different group not trusting file .hg/hgrc from untrusted user abc, group def trusted global = /some/path untrusted . . global = /some/path . . local = /another/path # list of user names # different user, different group, but we trust the user trusted global = /some/path local = /another/path untrusted . . global = /some/path . . local = /another/path # list of group names # different user, different group, but we trust the group trusted global = /some/path local = /another/path untrusted . . global = /some/path . . local = /another/path # Can't figure out the name of the user running this process # different user, different group not trusting file .hg/hgrc from untrusted user abc, group def trusted global = /some/path untrusted . . global = /some/path . . local = /another/path # prints debug warnings # different user, different group not trusting file .hg/hgrc from untrusted user abc, group def trusted ignoring untrusted configuration option paths.local = /another/path global = /some/path untrusted . . global = /some/path .ignoring untrusted configuration option paths.local = /another/path . local = /another/path # report_untrusted enabled without debug hides warnings # different user, different group trusted global = /some/path untrusted . . global = /some/path . . local = /another/path # report_untrusted enabled with debug shows warnings # different user, different group not trusting file .hg/hgrc from untrusted user abc, group def trusted ignoring untrusted configuration option paths.local = /another/path global = /some/path untrusted . . global = /some/path .ignoring untrusted configuration option paths.local = /another/path . local = /another/path # ui.readconfig sections quux # read trusted, untrusted, new ui, trusted not trusting file foobar from untrusted user abc, group def trusted: ignoring untrusted configuration option foobar.baz = quux None untrusted: quux # error handling # file doesn't exist # same user, same group # different user, different group # parse error # different user, different group not trusting file .hg/hgrc from untrusted user abc, group def ('foo', '.hg/hgrc:1') # same user, same group ('foo', '.hg/hgrc:1') mercurial-3.7.3/tests/test-mq-qgoto.t0000644000175000017500000000267012676531525017204 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg init a $ cd a $ echo a > a $ hg ci -Ama adding a $ hg qnew a.patch $ echo a >> a $ hg qrefresh $ hg qnew b.patch $ echo b > b $ hg add b $ hg qrefresh $ hg qnew c.patch $ echo c > c $ hg add c $ hg qrefresh $ hg qgoto a.patch popping c.patch popping b.patch now at: a.patch $ hg qgoto c.patch applying b.patch applying c.patch now at: c.patch $ hg qgoto b.patch popping c.patch now at: b.patch Using index: $ hg qgoto 0 popping b.patch now at: a.patch $ hg qgoto 2 applying b.patch applying c.patch now at: c.patch No warnings when using index ... and update from non-qtip and with pending changes in unrelated files: $ hg qnew bug314159 $ echo d >> c $ hg qrefresh $ hg qnew bug141421 $ echo e >> b $ hg qrefresh $ hg up -r bug314159 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo f >> a $ echo f >> b $ echo f >> c $ hg qgoto 1 abort: local changes found, qrefresh first [255] $ hg qgoto 1 -f popping bug141421 popping bug314159 popping c.patch now at: b.patch $ hg st M a M b ? c.orig $ hg up -qCr. $ hg qgoto 3 applying c.patch applying bug314159 now at: bug314159 Detect ambiguous non-index: $ hg qgoto 14 patch name "14" is ambiguous: bug314159 bug141421 abort: patch 14 not in series [255] $ cd .. mercurial-3.7.3/tests/test-revert-unknown.t0000644000175000017500000000047712676531525020447 0ustar mpmmpm00000000000000 $ hg init $ touch unknown $ touch a $ hg add a $ hg ci -m "1" $ touch b $ hg add b $ hg ci -m "2" Should show unknown $ hg status ? unknown $ hg revert -r 0 --all removing b Should show unknown and b removed $ hg status R b ? unknown Should show a and unknown $ ls a unknown mercurial-3.7.3/tests/test-commit.t0000644000175000017500000003707412676531525016736 0ustar mpmmpm00000000000000commit date test $ hg init test $ cd test $ echo foo > foo $ hg add foo $ cat > $TESTTMP/checkeditform.sh < env | grep HGEDITFORM > true > EOF $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg commit -m "" HGEDITFORM=commit.normal.normal abort: empty commit message [255] $ hg commit -d '0 0' -m commit-1 $ echo foo >> foo $ hg commit -d '1 4444444' -m commit-3 abort: impossible time zone offset: 4444444 [255] $ hg commit -d '1 15.1' -m commit-4 abort: invalid date: '1\t15.1' [255] $ hg commit -d 'foo bar' -m commit-5 abort: invalid date: 'foo bar' [255] $ hg commit -d ' 1 4444' -m commit-6 $ hg commit -d '111111111111 0' -m commit-7 abort: date exceeds 32 bits: 111111111111 [255] $ hg commit -d '-7654321 3600' -m commit-7 abort: negative date value: -7654321 [255] commit added file that has been deleted $ echo bar > bar $ hg add bar $ rm bar $ hg commit -m commit-8 nothing changed (1 missing files, see 'hg status') [1] $ hg commit -m commit-8-2 bar abort: bar: file not found! [255] $ hg -q revert -a --no-backup $ mkdir dir $ echo boo > dir/file $ hg add adding dir/file (glob) $ hg -v commit -m commit-9 dir committing files: dir/file committing manifest committing changelog committed changeset 2:d2a76177cb42 $ echo > dir.file $ hg add adding dir.file $ hg commit -m commit-10 dir dir.file abort: dir: no match under directory! [255] $ echo >> dir/file $ mkdir bleh $ mkdir dir2 $ cd bleh $ hg commit -m commit-11 . abort: bleh: no match under directory! [255] $ hg commit -m commit-12 ../dir ../dir2 abort: dir2: no match under directory! [255] $ hg -v commit -m commit-13 ../dir committing files: dir/file committing manifest committing changelog committed changeset 3:1cd62a2d8db5 $ cd .. $ hg commit -m commit-14 does-not-exist abort: does-not-exist: * (glob) [255] #if symlink $ ln -s foo baz $ hg commit -m commit-15 baz abort: baz: file not tracked! [255] #endif $ touch quux $ hg commit -m commit-16 quux abort: quux: file not tracked! [255] $ echo >> dir/file $ hg -v commit -m commit-17 dir/file committing files: dir/file committing manifest committing changelog committed changeset 4:49176991390e An empty date was interpreted as epoch origin $ echo foo >> foo $ hg commit -d '' -m commit-no-date $ hg tip --template '{date|isodate}\n' | grep '1970' [1] Make sure we do not obscure unknown requires file entries (issue2649) $ echo foo >> foo $ echo fake >> .hg/requires $ hg commit -m bla abort: repository requires features unknown to this Mercurial: fake! (see https://mercurial-scm.org/wiki/MissingRequirement for more information) [255] $ cd .. partial subdir commit test $ hg init test2 $ cd test2 $ mkdir foo $ echo foo > foo/foo $ mkdir bar $ echo bar > bar/bar $ hg add adding bar/bar (glob) adding foo/foo (glob) $ HGEDITOR=cat hg ci -e -m commit-subdir-1 foo commit-subdir-1 HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: added foo/foo $ hg ci -m commit-subdir-2 bar subdir log 1 $ hg log -v foo changeset: 0:f97e73a25882 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: foo/foo description: commit-subdir-1 subdir log 2 $ hg log -v bar changeset: 1:aa809156d50d tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: bar/bar description: commit-subdir-2 full log $ hg log -v changeset: 1:aa809156d50d tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: bar/bar description: commit-subdir-2 changeset: 0:f97e73a25882 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: foo/foo description: commit-subdir-1 $ cd .. dot and subdir commit test $ hg init test3 $ echo commit-foo-subdir > commit-log-test $ cd test3 $ mkdir foo $ echo foo content > foo/plain-file $ hg add foo/plain-file $ HGEDITOR=cat hg ci --edit -l ../commit-log-test foo commit-foo-subdir HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: added foo/plain-file $ echo modified foo content > foo/plain-file $ hg ci -m commit-foo-dot . full log $ hg log -v changeset: 1:95b38e3a5b2e tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: foo/plain-file description: commit-foo-dot changeset: 0:65d4e9386227 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: foo/plain-file description: commit-foo-subdir subdir log $ cd foo $ hg log . changeset: 1:95b38e3a5b2e tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit-foo-dot changeset: 0:65d4e9386227 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: commit-foo-subdir $ cd .. $ cd .. Issue1049: Hg permits partial commit of merge without warning $ hg init issue1049 $ cd issue1049 $ echo a > a $ hg ci -Ama adding a $ echo a >> a $ hg ci -mb $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b >> a $ hg ci -mc created new head $ HGMERGE=true hg merge merging a 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) should fail because we are specifying a file name $ hg ci -mmerge a abort: cannot partially commit a merge (do not specify files or patterns) [255] should fail because we are specifying a pattern $ hg ci -mmerge -I a abort: cannot partially commit a merge (do not specify files or patterns) [255] should succeed $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg ci -mmerge --edit HGEDITFORM=commit.normal.merge $ cd .. test commit message content $ hg init commitmsg $ cd commitmsg $ echo changed > changed $ echo removed > removed $ hg book activebookmark $ hg ci -qAm init $ hg rm removed $ echo changed >> changed $ echo added > added $ hg add added $ HGEDITOR=cat hg ci -A HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: bookmark 'activebookmark' HG: added added HG: changed changed HG: removed removed abort: empty commit message [255] test saving last-message.txt $ hg init sub $ echo a > sub/a $ hg -R sub add sub/a $ cat > sub/.hg/hgrc < [hooks] > precommit.test-saving-last-message = false > EOF $ echo 'sub = sub' > .hgsub $ hg add .hgsub $ cat > $TESTTMP/editor.sh < echo "==== before editing:" > cat \$1 > echo "====" > echo "test saving last-message.txt" >> \$1 > EOF $ rm -f .hg/last-message.txt $ HGEDITOR="sh $TESTTMP/editor.sh" hg commit -S -q ==== before editing: HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: bookmark 'activebookmark' HG: subrepo sub HG: added .hgsub HG: added added HG: changed .hgsubstate HG: changed changed HG: removed removed ==== abort: precommit.test-saving-last-message hook exited with status 1 (in subrepo sub) [255] $ cat .hg/last-message.txt test saving last-message.txt test that '[committemplate] changeset' definition and commit log specific template keywords work well $ cat >> .hg/hgrc < [committemplate] > changeset.commit.normal = HG: this is "commit.normal" template > HG: {extramsg} > {if(activebookmark, > "HG: bookmark '{activebookmark}' is activated\n", > "HG: no bookmark is activated\n")}{subrepos % > "HG: subrepo '{subrepo}' is changed\n"} > > changeset.commit = HG: this is "commit" template > HG: {extramsg} > {if(activebookmark, > "HG: bookmark '{activebookmark}' is activated\n", > "HG: no bookmark is activated\n")}{subrepos % > "HG: subrepo '{subrepo}' is changed\n"} > > changeset = HG: this is customized commit template > HG: {extramsg} > {if(activebookmark, > "HG: bookmark '{activebookmark}' is activated\n", > "HG: no bookmark is activated\n")}{subrepos % > "HG: subrepo '{subrepo}' is changed\n"} > EOF $ hg init sub2 $ echo a > sub2/a $ hg -R sub2 add sub2/a $ echo 'sub2 = sub2' >> .hgsub $ HGEDITOR=cat hg commit -S -q HG: this is "commit.normal" template HG: Leave message empty to abort commit. HG: bookmark 'activebookmark' is activated HG: subrepo 'sub' is changed HG: subrepo 'sub2' is changed abort: empty commit message [255] $ cat >> .hg/hgrc < [committemplate] > changeset.commit.normal = > # now, "changeset.commit" should be chosen for "hg commit" > EOF $ hg bookmark --inactive activebookmark $ hg forget .hgsub $ HGEDITOR=cat hg commit -q HG: this is "commit" template HG: Leave message empty to abort commit. HG: no bookmark is activated abort: empty commit message [255] $ cat >> .hg/hgrc < [committemplate] > changeset.commit = > # now, "changeset" should be chosen for "hg commit" > EOF $ HGEDITOR=cat hg commit -q HG: this is customized commit template HG: Leave message empty to abort commit. HG: no bookmark is activated abort: empty commit message [255] $ cat >> .hg/hgrc < [committemplate] > changeset = {desc} > HG: mods={file_mods} > HG: adds={file_adds} > HG: dels={file_dels} > HG: files={files} > HG: > {splitlines(diff()) % 'HG: {line}\n' > }HG: > HG: mods={file_mods} > HG: adds={file_adds} > HG: dels={file_dels} > HG: files={files}\n > EOF $ hg status -amr M changed A added R removed $ HGEDITOR=cat hg commit -q -e -m "foo bar" changed foo bar HG: mods=changed HG: adds= HG: dels= HG: files=changed HG: HG: --- a/changed Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/changed Thu Jan 01 00:00:00 1970 +0000 HG: @@ -1,1 +1,2 @@ HG: changed HG: +changed HG: HG: mods=changed HG: adds= HG: dels= HG: files=changed $ hg status -amr A added R removed $ hg parents --template "M {file_mods}\nA {file_adds}\nR {file_dels}\n" M changed A R $ hg rollback -q $ cat >> .hg/hgrc < [committemplate] > changeset = {desc} > HG: mods={file_mods} > HG: adds={file_adds} > HG: dels={file_dels} > HG: files={files} > HG: > {splitlines(diff("changed")) % 'HG: {line}\n' > }HG: > HG: mods={file_mods} > HG: adds={file_adds} > HG: dels={file_dels} > HG: files={files} > HG: > {splitlines(diff("added")) % 'HG: {line}\n' > }HG: > HG: mods={file_mods} > HG: adds={file_adds} > HG: dels={file_dels} > HG: files={files} > HG: > {splitlines(diff("removed")) % 'HG: {line}\n' > }HG: > HG: mods={file_mods} > HG: adds={file_adds} > HG: dels={file_dels} > HG: files={files}\n > EOF $ HGEDITOR=cat hg commit -q -e -m "foo bar" added removed foo bar HG: mods= HG: adds=added HG: dels=removed HG: files=added removed HG: HG: HG: mods= HG: adds=added HG: dels=removed HG: files=added removed HG: HG: --- /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: +++ b/added Thu Jan 01 00:00:00 1970 +0000 HG: @@ -0,0 +1,1 @@ HG: +added HG: HG: mods= HG: adds=added HG: dels=removed HG: files=added removed HG: HG: --- a/removed Thu Jan 01 00:00:00 1970 +0000 HG: +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 HG: @@ -1,1 +0,0 @@ HG: -removed HG: HG: mods= HG: adds=added HG: dels=removed HG: files=added removed $ hg status -amr M changed $ hg parents --template "M {file_mods}\nA {file_adds}\nR {file_dels}\n" M A added R removed $ hg rollback -q $ cat >> .hg/hgrc < # disable customizing for subsequent tests > [committemplate] > changeset = > EOF $ cd .. commit copy $ hg init dir2 $ cd dir2 $ echo bleh > bar $ hg add bar $ hg ci -m 'add bar' $ hg cp bar foo $ echo >> bar $ hg ci -m 'cp bar foo; change bar' $ hg debugrename foo foo renamed from bar:26d3ca0dfd18e44d796b564e38dd173c9668d3a9 $ hg debugindex bar rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 6 ..... 0 26d3ca0dfd18 000000000000 000000000000 (re) 1 6 7 ..... 1 d267bddd54f7 26d3ca0dfd18 000000000000 (re) Test making empty commits $ hg commit --config ui.allowemptycommit=True -m "empty commit" $ hg log -r . -v --stat changeset: 2:d809f3644287 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 description: empty commit verify pathauditor blocks evil filepaths $ cat > evil-commit.py < from mercurial import ui, hg, context, node > notrc = u".h\u200cg".encode('utf-8') + '/hgrc' > u = ui.ui() > r = hg.repository(u, '.') > def filectxfn(repo, memctx, path): > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned') > c = context.memctx(r, [r['tip'].node(), node.nullid], > 'evil', [notrc], filectxfn, 0) > r.commitctx(c) > EOF $ $PYTHON evil-commit.py #if windows $ hg co --clean tip abort: path contains illegal component: .h\xe2\x80\x8cg\\hgrc (esc) [255] #else $ hg co --clean tip abort: path contains illegal component: .h\xe2\x80\x8cg/hgrc (esc) [255] #endif $ hg rollback -f repository tip rolled back to revision 2 (undo commit) $ cat > evil-commit.py < from mercurial import ui, hg, context, node > notrc = "HG~1/hgrc" > u = ui.ui() > r = hg.repository(u, '.') > def filectxfn(repo, memctx, path): > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned') > c = context.memctx(r, [r['tip'].node(), node.nullid], > 'evil', [notrc], filectxfn, 0) > r.commitctx(c) > EOF $ $PYTHON evil-commit.py $ hg co --clean tip abort: path contains illegal component: HG~1/hgrc (glob) [255] $ hg rollback -f repository tip rolled back to revision 2 (undo commit) $ cat > evil-commit.py < from mercurial import ui, hg, context, node > notrc = "HG8B6C~2/hgrc" > u = ui.ui() > r = hg.repository(u, '.') > def filectxfn(repo, memctx, path): > return context.memfilectx(repo, path, '[hooks]\nupdate = echo owned') > c = context.memctx(r, [r['tip'].node(), node.nullid], > 'evil', [notrc], filectxfn, 0) > r.commitctx(c) > EOF $ $PYTHON evil-commit.py $ hg co --clean tip abort: path contains illegal component: HG8B6C~2/hgrc (glob) [255] # test that an unmodified commit template message aborts $ hg init unmodified_commit_template $ cd unmodified_commit_template $ echo foo > foo $ hg add foo $ hg commit -m "foo" $ cat >> .hg/hgrc < [committemplate] > changeset.commit = HI THIS IS NOT STRIPPED > HG: this is customized commit template > HG: {extramsg} > {if(activebookmark, > "HG: bookmark '{activebookmark}' is activated\n", > "HG: no bookmark is activated\n")}{subrepos % > "HG: subrepo '{subrepo}' is changed\n"} > EOF $ cat > $TESTTMP/notouching.sh < true > EOF $ echo foo2 > foo2 $ hg add foo2 $ HGEDITOR="sh $TESTTMP/notouching.sh" hg commit abort: commit message unchanged [255] $ cd .. mercurial-3.7.3/tests/test-bookmarks-pushpull.t0000644000175000017500000005640512676531525021307 0ustar mpmmpm00000000000000#require serve $ cat << EOF >> $HGRCPATH > [ui] > logtemplate={rev}:{node|short} {desc|firstline} > [phases] > publish=False > [experimental] > evolution=createmarkers,exchange > # drop me once bundle2 is the default, > # added to get test change early. > bundle2-exp = True > EOF initialize $ hg init a $ cd a $ echo 'test' > test $ hg commit -Am'test' adding test set bookmarks $ hg bookmark X $ hg bookmark Y $ hg bookmark Z import bookmark by name $ hg init ../b $ cd ../b $ hg book Y $ hg book * Y -1:000000000000 $ hg pull ../a pulling from ../a requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files adding remote bookmark X updating bookmark Y adding remote bookmark Z (run 'hg update' to get a working copy) $ hg bookmarks X 0:4e3505fd9583 * Y 0:4e3505fd9583 Z 0:4e3505fd9583 $ hg debugpushkey ../a namespaces bookmarks namespaces obsolete phases $ hg debugpushkey ../a bookmarks X 4e3505fd95835d721066b76e75dbb8cc554d7f77 Y 4e3505fd95835d721066b76e75dbb8cc554d7f77 Z 4e3505fd95835d721066b76e75dbb8cc554d7f77 delete the bookmark to re-pull it $ hg book -d X $ hg pull -B X ../a pulling from ../a no changes found adding remote bookmark X finally no-op pull $ hg pull -B X ../a pulling from ../a no changes found $ hg bookmark X 0:4e3505fd9583 * Y 0:4e3505fd9583 Z 0:4e3505fd9583 export bookmark by name $ hg bookmark W $ hg bookmark foo $ hg bookmark foobar $ hg push -B W ../a pushing to ../a searching for changes no changes found exporting bookmark W [1] $ hg -R ../a bookmarks W -1:000000000000 X 0:4e3505fd9583 Y 0:4e3505fd9583 * Z 0:4e3505fd9583 delete a remote bookmark $ hg book -d W $ hg push -B W ../a pushing to ../a searching for changes no changes found deleting remote bookmark W [1] push/pull name that doesn't exist $ hg push -B badname ../a pushing to ../a searching for changes bookmark badname does not exist on the local or remote repository! no changes found [2] $ hg pull -B anotherbadname ../a pulling from ../a abort: remote bookmark anotherbadname not found! [255] divergent bookmarks $ cd ../a $ echo c1 > f1 $ hg ci -Am1 adding f1 $ hg book -f @ $ hg book -f X $ hg book @ 1:0d2164f0ce0d * X 1:0d2164f0ce0d Y 0:4e3505fd9583 Z 1:0d2164f0ce0d $ cd ../b $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved updating bookmark foobar $ echo c2 > f2 $ hg ci -Am2 adding f2 $ hg book -if @ $ hg book -if X $ hg book @ 1:9b140be10808 X 1:9b140be10808 Y 0:4e3505fd9583 Z 0:4e3505fd9583 foo -1:000000000000 * foobar 1:9b140be10808 $ hg pull --config paths.foo=../a foo pulling from $TESTTMP/a (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) divergent bookmark @ stored as @foo divergent bookmark X stored as X@foo updating bookmark Z (run 'hg heads' to see heads, 'hg merge' to merge) $ hg book @ 1:9b140be10808 @foo 2:0d2164f0ce0d X 1:9b140be10808 X@foo 2:0d2164f0ce0d Y 0:4e3505fd9583 Z 2:0d2164f0ce0d foo -1:000000000000 * foobar 1:9b140be10808 (test that too many divergence of bookmark) $ python $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -r 000000000000 "X@${i}"; done $ hg pull ../a pulling from ../a searching for changes no changes found warning: failed to assign numbered name to divergent bookmark X divergent bookmark @ stored as @1 $ hg bookmarks | grep '^ X' | grep -v ':000000000000' X 1:9b140be10808 X@foo 2:0d2164f0ce0d (test that remotely diverged bookmarks are reused if they aren't changed) $ hg bookmarks | grep '^ @' @ 1:9b140be10808 @1 2:0d2164f0ce0d @foo 2:0d2164f0ce0d $ hg pull ../a pulling from ../a searching for changes no changes found warning: failed to assign numbered name to divergent bookmark X divergent bookmark @ stored as @1 $ hg bookmarks | grep '^ @' @ 1:9b140be10808 @1 2:0d2164f0ce0d @foo 2:0d2164f0ce0d $ python $TESTDIR/seq.py 1 100 | while read i; do hg bookmarks -d "X@${i}"; done $ hg bookmarks -d "@1" $ hg push -f ../a pushing to ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) $ hg -R ../a book @ 1:0d2164f0ce0d * X 1:0d2164f0ce0d Y 0:4e3505fd9583 Z 1:0d2164f0ce0d explicit pull should overwrite the local version (issue4439) $ hg pull --config paths.foo=../a foo -B X pulling from $TESTTMP/a (glob) no changes found divergent bookmark @ stored as @foo importing bookmark X reinstall state for further testing: $ hg book -fr 9b140be10808 X revsets should not ignore divergent bookmarks $ hg bookmark -fr 1 Z $ hg log -r 'bookmark()' --template '{rev}:{node|short} {bookmarks}\n' 0:4e3505fd9583 Y 1:9b140be10808 @ X Z foobar 2:0d2164f0ce0d @foo X@foo $ hg log -r 'bookmark("X@foo")' --template '{rev}:{node|short} {bookmarks}\n' 2:0d2164f0ce0d @foo X@foo $ hg log -r 'bookmark("re:X@foo")' --template '{rev}:{node|short} {bookmarks}\n' 2:0d2164f0ce0d @foo X@foo update a remote bookmark from a non-head to a head $ hg up -q Y $ echo c3 > f2 $ hg ci -Am3 adding f2 created new head $ hg push ../a pushing to ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) updating bookmark Y $ hg -R ../a book @ 1:0d2164f0ce0d * X 1:0d2164f0ce0d Y 3:f6fc62dde3c0 Z 1:0d2164f0ce0d update a bookmark in the middle of a client pulling changes $ cd .. $ hg clone -q a pull-race We want to use http because it is stateless and therefore more susceptible to race conditions $ hg -R pull-race serve -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log $ cat pull-race.pid >> $DAEMON_PIDS $ hg clone -q http://localhost:$HGPORT/ pull-race2 $ cd pull-race $ hg up -q Y $ echo c4 > f2 $ hg ci -Am4 $ echo c5 > f3 $ cat < .hg/hgrc > [hooks] > outgoing.makecommit = hg ci -Am5; echo committed in pull-race > EOF (new config needs a server restart) $ cd .. $ killdaemons.py $ hg -R pull-race serve -p $HGPORT -d --pid-file=pull-race.pid -E main-error.log $ cat pull-race.pid >> $DAEMON_PIDS $ cd pull-race2 $ hg -R $TESTTMP/pull-race book @ 1:0d2164f0ce0d X 1:0d2164f0ce0d * Y 4:b0a5eff05604 Z 1:0d2164f0ce0d $ hg pull pulling from http://localhost:$HGPORT/ searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating bookmark Y (run 'hg update' to get a working copy) $ hg book * @ 1:0d2164f0ce0d X 1:0d2164f0ce0d Y 4:b0a5eff05604 Z 1:0d2164f0ce0d Update a bookmark right after the initial lookup -B (issue4689) $ echo c6 > ../pull-race/f3 # to be committed during the race $ cat < ../pull-race/.hg/hgrc > [hooks] > # If anything to commit, commit it right after the first key listing used > # during lookup. This makes the commit appear before the actual getbundle > # call. > listkeys.makecommit= ((hg st | grep -q M) && (hg commit -m race; echo commited in pull-race)) || exit 0 > EOF (new config need server restart) $ killdaemons.py $ hg -R ../pull-race serve -p $HGPORT -d --pid-file=../pull-race.pid -E main-error.log $ cat ../pull-race.pid >> $DAEMON_PIDS $ hg -R $TESTTMP/pull-race book @ 1:0d2164f0ce0d X 1:0d2164f0ce0d * Y 5:35d1ef0a8d1b Z 1:0d2164f0ce0d $ hg pull -B Y pulling from http://localhost:$HGPORT/ searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating bookmark Y (run 'hg update' to get a working copy) $ hg book * @ 1:0d2164f0ce0d X 1:0d2164f0ce0d Y 5:35d1ef0a8d1b Z 1:0d2164f0ce0d (done with this section of the test) $ killdaemons.py $ cd ../b diverging a remote bookmark fails $ hg up -q 4e3505fd9583 $ echo c4 > f2 $ hg ci -Am4 adding f2 created new head $ echo c5 > f2 $ hg ci -Am5 $ hg log -G @ 5:c922c0139ca0 5 | o 4:4efff6d98829 4 | | o 3:f6fc62dde3c0 3 |/ | o 2:0d2164f0ce0d 1 |/ | o 1:9b140be10808 2 |/ o 0:4e3505fd9583 test $ hg book -f Y $ cat < ../a/.hg/hgrc > [web] > push_ssl = false > allow_push = * > EOF $ hg -R ../a serve -p $HGPORT2 -d --pid-file=../hg2.pid $ cat ../hg2.pid >> $DAEMON_PIDS $ hg push http://localhost:$HGPORT2/ pushing to http://localhost:$HGPORT2/ searching for changes abort: push creates new remote head c922c0139ca0 with bookmark 'Y'! (merge or see "hg help push" for details about pushing new heads) [255] $ hg -R ../a book @ 1:0d2164f0ce0d * X 1:0d2164f0ce0d Y 3:f6fc62dde3c0 Z 1:0d2164f0ce0d Unrelated marker does not alter the decision $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb $ hg push http://localhost:$HGPORT2/ pushing to http://localhost:$HGPORT2/ searching for changes abort: push creates new remote head c922c0139ca0 with bookmark 'Y'! (merge or see "hg help push" for details about pushing new heads) [255] $ hg -R ../a book @ 1:0d2164f0ce0d * X 1:0d2164f0ce0d Y 3:f6fc62dde3c0 Z 1:0d2164f0ce0d Update to a successor works $ hg id --debug -r 3 f6fc62dde3c0771e29704af56ba4d8af77abcc2f $ hg id --debug -r 4 4efff6d98829d9c824c621afd6e3f01865f5439f $ hg id --debug -r 5 c922c0139ca03858f655e4a2af4dd02796a63969 tip Y $ hg debugobsolete f6fc62dde3c0771e29704af56ba4d8af77abcc2f cccccccccccccccccccccccccccccccccccccccc $ hg debugobsolete cccccccccccccccccccccccccccccccccccccccc 4efff6d98829d9c824c621afd6e3f01865f5439f $ hg push http://localhost:$HGPORT2/ pushing to http://localhost:$HGPORT2/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 2 changesets with 2 changes to 1 files (+1 heads) remote: 2 new obsolescence markers updating bookmark Y $ hg -R ../a book @ 1:0d2164f0ce0d * X 1:0d2164f0ce0d Y 5:c922c0139ca0 Z 1:0d2164f0ce0d hgweb $ cat < .hg/hgrc > [web] > push_ssl = false > allow_push = * > EOF $ hg serve -p $HGPORT -d --pid-file=../hg.pid -E errors.log $ cat ../hg.pid >> $DAEMON_PIDS $ cd ../a $ hg debugpushkey http://localhost:$HGPORT/ namespaces bookmarks namespaces obsolete phases $ hg debugpushkey http://localhost:$HGPORT/ bookmarks @ 9b140be1080824d768c5a4691a564088eede71f9 X 9b140be1080824d768c5a4691a564088eede71f9 Y c922c0139ca03858f655e4a2af4dd02796a63969 Z 9b140be1080824d768c5a4691a564088eede71f9 foo 0000000000000000000000000000000000000000 foobar 9b140be1080824d768c5a4691a564088eede71f9 $ hg out -B http://localhost:$HGPORT/ comparing with http://localhost:$HGPORT/ searching for changed bookmarks @ 0d2164f0ce0d X 0d2164f0ce0d Z 0d2164f0ce0d foo foobar $ hg push -B Z http://localhost:$HGPORT/ pushing to http://localhost:$HGPORT/ searching for changes no changes found updating bookmark Z [1] $ hg book -d Z $ hg in -B http://localhost:$HGPORT/ comparing with http://localhost:$HGPORT/ searching for changed bookmarks @ 9b140be10808 X 9b140be10808 Z 0d2164f0ce0d foo 000000000000 foobar 9b140be10808 $ hg pull -B Z http://localhost:$HGPORT/ pulling from http://localhost:$HGPORT/ no changes found divergent bookmark @ stored as @1 divergent bookmark X stored as X@1 adding remote bookmark Z adding remote bookmark foo adding remote bookmark foobar $ hg clone http://localhost:$HGPORT/ cloned-bookmarks requesting all changes adding changesets adding manifests adding file changes added 5 changesets with 5 changes to 3 files (+2 heads) 2 new obsolescence markers updating to bookmark @ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R cloned-bookmarks bookmarks * @ 1:9b140be10808 X 1:9b140be10808 Y 4:c922c0139ca0 Z 2:0d2164f0ce0d foo -1:000000000000 foobar 1:9b140be10808 $ cd .. Test to show result of bookmarks comparision $ mkdir bmcomparison $ cd bmcomparison $ hg init source $ hg -R source debugbuilddag '+2*2*3*4' $ hg -R source log -G --template '{rev}:{node|short}' o 4:e7bd5218ca15 | | o 3:6100d3090acf |/ | o 2:fa942426a6fd |/ | o 1:66f7d451a68b |/ o 0:1ea73414a91b $ hg -R source bookmarks -r 0 SAME $ hg -R source bookmarks -r 0 ADV_ON_REPO1 $ hg -R source bookmarks -r 0 ADV_ON_REPO2 $ hg -R source bookmarks -r 0 DIFF_ADV_ON_REPO1 $ hg -R source bookmarks -r 0 DIFF_ADV_ON_REPO2 $ hg -R source bookmarks -r 1 DIVERGED $ hg clone -U source repo1 (test that incoming/outgoing exit with 1, if there is no bookmark to be exchanged) $ hg -R repo1 incoming -B comparing with $TESTTMP/bmcomparison/source searching for changed bookmarks no changed bookmarks found [1] $ hg -R repo1 outgoing -B comparing with $TESTTMP/bmcomparison/source searching for changed bookmarks no changed bookmarks found [1] $ hg -R repo1 bookmarks -f -r 1 ADD_ON_REPO1 $ hg -R repo1 bookmarks -f -r 2 ADV_ON_REPO1 $ hg -R repo1 bookmarks -f -r 3 DIFF_ADV_ON_REPO1 $ hg -R repo1 bookmarks -f -r 3 DIFF_DIVERGED $ hg -R repo1 -q --config extensions.mq= strip 4 $ hg -R repo1 log -G --template '{node|short} ({bookmarks})' o 6100d3090acf (DIFF_ADV_ON_REPO1 DIFF_DIVERGED) | | o fa942426a6fd (ADV_ON_REPO1) |/ | o 66f7d451a68b (ADD_ON_REPO1 DIVERGED) |/ o 1ea73414a91b (ADV_ON_REPO2 DIFF_ADV_ON_REPO2 SAME) $ hg clone -U source repo2 $ hg -R repo2 bookmarks -f -r 1 ADD_ON_REPO2 $ hg -R repo2 bookmarks -f -r 1 ADV_ON_REPO2 $ hg -R repo2 bookmarks -f -r 2 DIVERGED $ hg -R repo2 bookmarks -f -r 4 DIFF_ADV_ON_REPO2 $ hg -R repo2 bookmarks -f -r 4 DIFF_DIVERGED $ hg -R repo2 -q --config extensions.mq= strip 3 $ hg -R repo2 log -G --template '{node|short} ({bookmarks})' o e7bd5218ca15 (DIFF_ADV_ON_REPO2 DIFF_DIVERGED) | | o fa942426a6fd (DIVERGED) |/ | o 66f7d451a68b (ADD_ON_REPO2 ADV_ON_REPO2) |/ o 1ea73414a91b (ADV_ON_REPO1 DIFF_ADV_ON_REPO1 SAME) (test that difference of bookmarks between repositories are fully shown) $ hg -R repo1 incoming -B repo2 -v comparing with repo2 searching for changed bookmarks ADD_ON_REPO2 66f7d451a68b added ADV_ON_REPO2 66f7d451a68b advanced DIFF_ADV_ON_REPO2 e7bd5218ca15 changed DIFF_DIVERGED e7bd5218ca15 changed DIVERGED fa942426a6fd diverged $ hg -R repo1 outgoing -B repo2 -v comparing with repo2 searching for changed bookmarks ADD_ON_REPO1 66f7d451a68b added ADD_ON_REPO2 deleted ADV_ON_REPO1 fa942426a6fd advanced DIFF_ADV_ON_REPO1 6100d3090acf advanced DIFF_ADV_ON_REPO2 1ea73414a91b changed DIFF_DIVERGED 6100d3090acf changed DIVERGED 66f7d451a68b diverged $ hg -R repo2 incoming -B repo1 -v comparing with repo1 searching for changed bookmarks ADD_ON_REPO1 66f7d451a68b added ADV_ON_REPO1 fa942426a6fd advanced DIFF_ADV_ON_REPO1 6100d3090acf changed DIFF_DIVERGED 6100d3090acf changed DIVERGED 66f7d451a68b diverged $ hg -R repo2 outgoing -B repo1 -v comparing with repo1 searching for changed bookmarks ADD_ON_REPO1 deleted ADD_ON_REPO2 66f7d451a68b added ADV_ON_REPO2 66f7d451a68b advanced DIFF_ADV_ON_REPO1 1ea73414a91b changed DIFF_ADV_ON_REPO2 e7bd5218ca15 advanced DIFF_DIVERGED e7bd5218ca15 changed DIVERGED fa942426a6fd diverged $ cd .. Pushing a bookmark should only push the changes required by that bookmark, not all outgoing changes: $ hg clone http://localhost:$HGPORT/ addmarks requesting all changes adding changesets adding manifests adding file changes added 5 changesets with 5 changes to 3 files (+2 heads) 2 new obsolescence markers updating to bookmark @ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd addmarks $ echo foo > foo $ hg add foo $ hg commit -m 'add foo' $ echo bar > bar $ hg add bar $ hg commit -m 'add bar' $ hg co "tip^" 0 files updated, 0 files merged, 1 files removed, 0 files unresolved (leaving bookmark @) $ hg book add-foo $ hg book -r tip add-bar Note: this push *must* push only a single changeset, as that's the point of this test. $ hg push -B add-foo --traceback pushing to http://localhost:$HGPORT/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files exporting bookmark add-foo pushing a new bookmark on a new head does not require -f if -B is specified $ hg up -q X $ hg book W $ echo c5 > f2 $ hg ci -Am5 created new head $ hg push -B W pushing to http://localhost:$HGPORT/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files (+1 heads) exporting bookmark W $ hg -R ../b id -r W cc978a373a53 tip W pushing an existing but divergent bookmark with -B still requires -f $ hg clone -q . r $ hg up -q X $ echo 1 > f2 $ hg ci -qAml $ cd r $ hg up -q X $ echo 2 > f2 $ hg ci -qAmr $ hg push -B X pushing to $TESTTMP/addmarks (glob) searching for changes remote has heads on branch 'default' that are not known locally: a2a606d9ff1b abort: push creates new remote head 54694f811df9 with bookmark 'X'! (pull and merge or see "hg help push" for details about pushing new heads) [255] $ cd .. Check summary output for incoming/outgoing bookmarks $ hg bookmarks -d X $ hg bookmarks -d Y $ hg summary --remote | grep '^remote:' remote: *, 2 incoming bookmarks, 1 outgoing bookmarks (glob) $ cd .. pushing an unchanged bookmark should result in no changes $ hg init unchanged-a $ hg init unchanged-b $ cd unchanged-a $ echo initial > foo $ hg commit -A -m initial adding foo $ hg bookmark @ $ hg push -B @ ../unchanged-b pushing to ../unchanged-b searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files exporting bookmark @ $ hg push -B @ ../unchanged-b pushing to ../unchanged-b searching for changes no changes found [1] Check hook preventing push (issue4455) ====================================== $ hg bookmarks * @ 0:55482a6fb4b1 $ hg log -G @ 0:55482a6fb4b1 initial $ hg init ../issue4455-dest $ hg push ../issue4455-dest # changesets only pushing to ../issue4455-dest searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files $ cat >> .hg/hgrc << EOF > [paths] > local=../issue4455-dest/ > ssh=ssh://user@dummy/issue4455-dest > http=http://localhost:$HGPORT/ > [ui] > ssh=python "$TESTDIR/dummyssh" > EOF $ cat >> ../issue4455-dest/.hg/hgrc << EOF > [hooks] > prepushkey=false > [web] > push_ssl = false > allow_push = * > EOF $ killdaemons.py $ hg -R ../issue4455-dest serve -p $HGPORT -d --pid-file=../issue4455.pid -E ../issue4455-error.log $ cat ../issue4455.pid >> $DAEMON_PIDS Local push ---------- $ hg push -B @ local pushing to $TESTTMP/issue4455-dest (glob) searching for changes no changes found pushkey-abort: prepushkey hook exited with status 1 abort: exporting bookmark @ failed! [255] $ hg -R ../issue4455-dest/ bookmarks no bookmarks set Using ssh --------- $ hg push -B @ ssh --config experimental.bundle2-exp=True pushing to ssh://user@dummy/issue4455-dest searching for changes no changes found remote: pushkey-abort: prepushkey hook exited with status 1 abort: exporting bookmark @ failed! [255] $ hg -R ../issue4455-dest/ bookmarks no bookmarks set $ hg push -B @ ssh --config experimental.bundle2-exp=False pushing to ssh://user@dummy/issue4455-dest searching for changes no changes found remote: pushkey-abort: prepushkey hook exited with status 1 exporting bookmark @ failed! [1] $ hg -R ../issue4455-dest/ bookmarks no bookmarks set Using http ---------- $ hg push -B @ http --config experimental.bundle2-exp=True pushing to http://localhost:$HGPORT/ searching for changes no changes found remote: pushkey-abort: prepushkey hook exited with status 1 abort: exporting bookmark @ failed! [255] $ hg -R ../issue4455-dest/ bookmarks no bookmarks set $ hg push -B @ http --config experimental.bundle2-exp=False pushing to http://localhost:$HGPORT/ searching for changes no changes found remote: pushkey-abort: prepushkey hook exited with status 1 exporting bookmark @ failed! [1] $ hg -R ../issue4455-dest/ bookmarks no bookmarks set mercurial-3.7.3/tests/test-mq-qrename.t0000644000175000017500000000350612676531525017502 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg init a $ cd a $ echo 'base' > base $ hg ci -Ambase adding base $ hg qnew -mmqbase mqbase $ hg qrename mqbase renamed $ mkdir .hg/patches/foo $ hg qrename renamed foo $ hg qseries foo/renamed $ ls .hg/patches/foo renamed $ mkdir .hg/patches/bar $ hg qrename foo/renamed bar $ hg qseries bar/renamed $ ls .hg/patches/bar renamed $ hg qrename bar/renamed baz $ hg qseries baz $ ls .hg/patches/baz .hg/patches/baz $ hg qrename baz new/dir $ hg qseries new/dir $ ls .hg/patches/new/dir .hg/patches/new/dir $ cd .. Test patch being renamed before committed: $ hg init b $ cd b $ hg qinit -c $ hg qnew x $ hg qrename y $ hg qcommit -m rename $ cd .. Test overlapping renames (issue2388) $ hg init c $ cd c $ hg qinit -c $ echo a > a $ hg add adding a $ hg qnew patcha $ echo b > b $ hg add adding b $ hg qnew patchb $ hg ci --mq -m c1 $ hg qrename patchb patchc $ hg qrename patcha patchb $ hg st --mq M patchb M series A patchc R patcha $ cd .. Test renames with mq repo (issue2097) $ hg init issue2097 $ cd issue2097 $ hg qnew p0 $ (cd .hg/patches && hg init) $ hg qren p0 p1 $ hg debugstate --mq $ hg ci --mq -mq0 nothing changed [1] $ cd .. Test renaming to a folded patch (issue3058) $ hg init issue3058 $ cd issue3058 $ hg init --mq $ echo a > a $ hg add a $ hg qnew adda $ echo b >> a $ hg qnew addb $ hg qpop popping addb now at: adda $ hg ci --mq -m "save mq" $ hg qfold addb $ hg qmv addb $ cat .hg/patches/addb # HG changeset patch # Parent 0000000000000000000000000000000000000000 diff -r 000000000000 a --- /dev/null * (glob) +++ b/a * (glob) @@ -0,0 +1,2 @@ +a +b $ cd .. mercurial-3.7.3/tests/test-export.t0000644000175000017500000001242712676531525016762 0ustar mpmmpm00000000000000 $ hg init repo $ cd repo $ touch foo $ hg add foo $ for i in 0 1 2 3 4 5 6 7 8 9 10 11; do > echo "foo-$i" >> foo > hg ci -m "foo-$i" > done $ for out in "%nof%N" "%%%H" "%b-%R" "%h" "%r" "%m"; do > echo > echo "# foo-$out.patch" > hg export -v -o "foo-$out.patch" 2:tip > done # foo-%nof%N.patch exporting patches: foo-01of10.patch foo-02of10.patch foo-03of10.patch foo-04of10.patch foo-05of10.patch foo-06of10.patch foo-07of10.patch foo-08of10.patch foo-09of10.patch foo-10of10.patch # foo-%%%H.patch exporting patches: foo-%617188a1c80f869a7b66c85134da88a6fb145f67.patch foo-%dd41a5ff707a5225204105611ba49cc5c229d55f.patch foo-%f95a5410f8664b6e1490a4af654e4b7d41a7b321.patch foo-%4346bcfde53b4d9042489078bcfa9c3e28201db2.patch foo-%afda8c3a009cc99449a05ad8aa4655648c4ecd34.patch foo-%35284ce2b6b99c9d2ac66268fe99e68e1974e1aa.patch foo-%9688c41894e6931305fa7165a37f6568050b4e9b.patch foo-%747d3c68f8ec44bb35816bfcd59aeb50b9654c2f.patch foo-%5f17a83f5fbd9414006a5e563eab4c8a00729efd.patch foo-%f3acbafac161ec68f1598af38f794f28847ca5d3.patch # foo-%b-%R.patch exporting patches: foo-repo-2.patch foo-repo-3.patch foo-repo-4.patch foo-repo-5.patch foo-repo-6.patch foo-repo-7.patch foo-repo-8.patch foo-repo-9.patch foo-repo-10.patch foo-repo-11.patch # foo-%h.patch exporting patches: foo-617188a1c80f.patch foo-dd41a5ff707a.patch foo-f95a5410f866.patch foo-4346bcfde53b.patch foo-afda8c3a009c.patch foo-35284ce2b6b9.patch foo-9688c41894e6.patch foo-747d3c68f8ec.patch foo-5f17a83f5fbd.patch foo-f3acbafac161.patch # foo-%r.patch exporting patches: foo-02.patch foo-03.patch foo-04.patch foo-05.patch foo-06.patch foo-07.patch foo-08.patch foo-09.patch foo-10.patch foo-11.patch # foo-%m.patch exporting patches: foo-foo_2.patch foo-foo_3.patch foo-foo_4.patch foo-foo_5.patch foo-foo_6.patch foo-foo_7.patch foo-foo_8.patch foo-foo_9.patch foo-foo_10.patch foo-foo_11.patch Doing it again clobbers the files rather than appending: $ hg export -v -o "foo-%m.patch" 2:3 exporting patches: foo-foo_2.patch foo-foo_3.patch $ grep HG foo-foo_2.patch | wc -l \s*1 (re) $ grep HG foo-foo_3.patch | wc -l \s*1 (re) Exporting 4 changesets to a file: $ hg export -o export_internal 1 2 3 4 $ grep HG export_internal | wc -l \s*4 (re) Doing it again clobbers the file rather than appending: $ hg export -o export_internal 1 2 3 4 $ grep HG export_internal | wc -l \s*4 (re) Exporting 4 changesets to stdout: $ hg export 1 2 3 4 | grep HG | wc -l \s*4 (re) Exporting revision -2 to a file: $ hg export -- -2 # HG changeset patch # User test # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID 5f17a83f5fbd9414006a5e563eab4c8a00729efd # Parent 747d3c68f8ec44bb35816bfcd59aeb50b9654c2f foo-10 diff -r 747d3c68f8ec -r 5f17a83f5fbd foo --- a/foo Thu Jan 01 00:00:00 1970 +0000 +++ b/foo Thu Jan 01 00:00:00 1970 +0000 @@ -8,3 +8,4 @@ foo-7 foo-8 foo-9 +foo-10 No filename should be printed if stdout is specified explicitly: $ hg export -v 1 -o - exporting patch: # HG changeset patch # User test # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID d1c9656e973cfb5aebd5499bbd2cb350e3b12266 # Parent 871558de6af2e8c244222f8eea69b782c94ce3df foo-1 diff -r 871558de6af2 -r d1c9656e973c foo --- a/foo Thu Jan 01 00:00:00 1970 +0000 +++ b/foo Thu Jan 01 00:00:00 1970 +0000 @@ -1,1 +1,2 @@ foo-0 +foo-1 Checking if only alphanumeric characters are used in the file name (%m option): $ echo "line" >> foo $ hg commit -m " !\"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_\`abcdefghijklmnopqrstuvwxyz{|}~" $ hg export -v -o %m.patch tip exporting patch: ____________0123456789_______ABCDEFGHIJKLMNOPQRSTUVWXYZ______abcdefghijklmnopqrstuvwxyz____.patch Catch exporting unknown revisions (especially empty revsets, see issue3353) $ hg export # HG changeset patch # User test # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID 197ecd81a57f760b54f34a58817ad5b04991fa47 # Parent f3acbafac161ec68f1598af38f794f28847ca5d3 !"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ diff -r f3acbafac161 -r 197ecd81a57f foo --- a/foo Thu Jan 01 00:00:00 1970 +0000 +++ b/foo Thu Jan 01 00:00:00 1970 +0000 @@ -10,3 +10,4 @@ foo-9 foo-10 foo-11 +line $ hg export "" hg: parse error: empty query [255] $ hg export 999 abort: unknown revision '999'! [255] $ hg export "not all()" abort: export requires at least one changeset [255] Check for color output $ cat <> $HGRCPATH > [color] > mode = ansi > [extensions] > color = > EOF $ hg export --color always --nodates tip # HG changeset patch # User test # Date 0 0 # Thu Jan 01 00:00:00 1970 +0000 # Node ID * (glob) # Parent * (glob) !"#$%&(,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ \x1b[0;1mdiff -r f3acbafac161 -r 197ecd81a57f foo\x1b[0m (esc) \x1b[0;31;1m--- a/foo\x1b[0m (esc) \x1b[0;32;1m+++ b/foo\x1b[0m (esc) \x1b[0;35m@@ -10,3 +10,4 @@\x1b[0m (esc) foo-9 foo-10 foo-11 \x1b[0;32m+line\x1b[0m (esc) $ cd .. mercurial-3.7.3/tests/test-mq.t0000644000175000017500000007573112676531525016065 0ustar mpmmpm00000000000000 $ checkundo() > { > if [ -f .hg/store/undo ]; then > echo ".hg/store/undo still exists after $1" > fi > } $ cat <> $HGRCPATH > [extensions] > mq = > [mq] > plain = true > EOF help $ hg help mq mq extension - manage a stack of patches This extension lets you work with a stack of patches in a Mercurial repository. It manages two stacks of patches - all known patches, and applied patches (subset of known patches). Known patches are represented as patch files in the .hg/patches directory. Applied patches are both patch files and changesets. Common tasks (use 'hg help command' for more details): create new patch qnew import existing patch qimport print patch series qseries print applied patches qapplied add known patch to applied stack qpush remove patch from applied stack qpop refresh contents of top applied patch qrefresh By default, mq will automatically use git patches when required to avoid losing file mode changes, copy records, binary files or empty files creations or deletions. This behavior can be configured with: [mq] git = auto/keep/yes/no If set to 'keep', mq will obey the [diff] section configuration while preserving existing git patches upon qrefresh. If set to 'yes' or 'no', mq will override the [diff] section and always generate git or regular patches, possibly losing data in the second case. It may be desirable for mq changesets to be kept in the secret phase (see 'hg help phases'), which can be enabled with the following setting: [mq] secret = True You will by default be managing a patch queue named "patches". You can create other, independent patch queues with the 'hg qqueue' command. If the working directory contains uncommitted files, qpush, qpop and qgoto abort immediately. If -f/--force is used, the changes are discarded. Setting: [mq] keepchanges = True make them behave as if --keep-changes were passed, and non-conflicting local changes will be tolerated and preserved. If incompatible options such as -f/--force or --exact are passed, this setting is ignored. This extension used to provide a strip command. This command now lives in the strip extension. list of commands: qapplied print the patches already applied qclone clone main and patch repository at same time qdelete remove patches from queue qdiff diff of the current patch and subsequent modifications qfinish move applied patches into repository history qfold fold the named patches into the current patch qgoto push or pop patches until named patch is at top of stack qguard set or print guards for a patch qheader print the header of the topmost or specified patch qimport import a patch or existing changeset qnew create a new patch qnext print the name of the next pushable patch qpop pop the current patch off the stack qprev print the name of the preceding applied patch qpush push the next patch onto the stack qqueue manage multiple patch queues qrefresh update the current patch qrename rename a patch qselect set or print guarded patches to push qseries print the entire series file qtop print the name of the current patch qunapplied print the patches not yet applied (use "hg help -v mq" to show built-in aliases and global options) $ hg init a $ cd a $ echo a > a $ hg ci -Ama adding a $ hg clone . ../k updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ mkdir b $ echo z > b/z $ hg ci -Ama adding b/z qinit $ hg qinit $ cd .. $ hg init b -R qinit $ hg -R b qinit $ hg init c qinit -c $ hg --cwd c qinit -c $ hg -R c/.hg/patches st A .hgignore A series qinit; qinit -c $ hg init d $ cd d $ hg qinit $ hg qinit -c qinit -c should create both files if they don't exist $ cat .hg/patches/.hgignore ^\.hg ^\.mq syntax: glob status guards $ cat .hg/patches/series $ hg qinit -c abort: repository $TESTTMP/d/.hg/patches already exists! (glob) [255] $ cd .. $ echo '% qinit; ; qinit -c' % qinit; ; qinit -c $ hg init e $ cd e $ hg qnew A $ checkundo qnew $ echo foo > foo $ hg phase -r qbase 0: draft $ hg add foo $ hg qrefresh $ hg phase -r qbase 0: draft $ hg qnew B $ echo >> foo $ hg qrefresh $ echo status >> .hg/patches/.hgignore $ echo bleh >> .hg/patches/.hgignore $ hg qinit -c adding .hg/patches/A (glob) adding .hg/patches/B (glob) $ hg -R .hg/patches status A .hgignore A A A B A series qinit -c shouldn't touch these files if they already exist $ cat .hg/patches/.hgignore status bleh $ cat .hg/patches/series A B add an untracked file $ echo >> .hg/patches/flaf status --mq with color (issue2096) $ hg status --mq --config extensions.color= --config color.mode=ansi --color=always \x1b[0;32;1mA \x1b[0m\x1b[0;32;1m.hgignore\x1b[0m (esc) \x1b[0;32;1mA \x1b[0m\x1b[0;32;1mA\x1b[0m (esc) \x1b[0;32;1mA \x1b[0m\x1b[0;32;1mB\x1b[0m (esc) \x1b[0;32;1mA \x1b[0m\x1b[0;32;1mseries\x1b[0m (esc) \x1b[0;35;1;4m? \x1b[0m\x1b[0;35;1;4mflaf\x1b[0m (esc) try the --mq option on a command provided by an extension $ hg purge --mq --verbose --config extensions.purge= removing file flaf $ cd .. #if no-outer-repo init --mq without repo $ mkdir f $ cd f $ hg init --mq abort: there is no Mercurial repository here (.hg not found) [255] $ cd .. #endif init --mq with repo path $ hg init g $ hg init --mq g $ test -d g/.hg/patches/.hg init --mq with nonexistent directory $ hg init --mq nonexistentdir abort: repository nonexistentdir not found! [255] init --mq with bundle (non "local") $ hg -R a bundle --all a.bundle >/dev/null $ hg init --mq a.bundle abort: only a local queue repository may be initialized [255] $ cd a $ hg qnew -m 'foo bar' test.patch $ echo '# comment' > .hg/patches/series.tmp $ echo >> .hg/patches/series.tmp # empty line $ cat .hg/patches/series >> .hg/patches/series.tmp $ mv .hg/patches/series.tmp .hg/patches/series qrefresh $ echo a >> a $ hg qrefresh $ cat .hg/patches/test.patch foo bar diff -r [a-f0-9]* a (re) --- a/a\t(?P.*) (re) \+\+\+ b/a\t(?P.*) (re) @@ -1,1 +1,2 @@ a +a empty qrefresh $ hg qrefresh -X a revision: $ hg diff -r -2 -r -1 patch: $ cat .hg/patches/test.patch foo bar working dir diff: $ hg diff --nodates -q --- a/a +++ b/a @@ -1,1 +1,2 @@ a +a restore things $ hg qrefresh $ checkundo qrefresh qpop $ hg qpop popping test.patch patch queue now empty $ checkundo qpop qpush with dump of tag cache Dump the tag cache to ensure that it has exactly one head after qpush. $ rm -f .hg/cache/tags2-visible $ hg tags > /dev/null .hg/cache/tags2-visible (pre qpush): $ cat .hg/cache/tags2-visible 1 [\da-f]{40} (re) $ hg qpush applying test.patch now at: test.patch $ hg phase -r qbase 2: draft $ hg tags > /dev/null .hg/cache/tags2-visible (post qpush): $ cat .hg/cache/tags2-visible 2 [\da-f]{40} (re) $ checkundo qpush $ cd .. pop/push outside repo $ hg -R a qpop popping test.patch patch queue now empty $ hg -R a qpush applying test.patch now at: test.patch $ cd a $ hg qnew test2.patch qrefresh in subdir $ cd b $ echo a > a $ hg add a $ hg qrefresh pop/push -a in subdir $ hg qpop -a popping test2.patch popping test.patch patch queue now empty $ hg --traceback qpush -a applying test.patch applying test2.patch now at: test2.patch setting columns & formatted tests truncating (issue1912) $ COLUMNS=4 hg qseries --config ui.formatted=true test.patch test2.patch $ COLUMNS=20 hg qseries --config ui.formatted=true -vs 0 A test.patch: f... 1 A test2.patch: $ hg qpop popping test2.patch now at: test.patch $ hg qseries -vs 0 A test.patch: foo bar 1 U test2.patch: $ hg sum | grep mq mq: 1 applied, 1 unapplied $ hg qpush applying test2.patch now at: test2.patch $ hg sum | grep mq mq: 2 applied $ hg qapplied test.patch test2.patch $ hg qtop test2.patch prev $ hg qapp -1 test.patch next $ hg qunapp -1 all patches applied [1] $ hg qpop popping test2.patch now at: test.patch commit should fail $ hg commit abort: cannot commit over an applied mq patch [255] push should fail if draft $ hg push ../../k pushing to ../../k abort: source has mq patches applied [255] import should fail $ hg st . $ echo foo >> ../a $ hg diff > ../../import.diff $ hg revert --no-backup ../a $ hg import ../../import.diff abort: cannot import over an applied patch [255] $ hg st import --no-commit should succeed $ hg import --no-commit ../../import.diff applying ../../import.diff $ hg st M a $ hg revert --no-backup ../a qunapplied $ hg qunapplied test2.patch qpush/qpop with index $ hg qnew test1b.patch $ echo 1b > 1b $ hg add 1b $ hg qrefresh $ hg qpush 2 applying test2.patch now at: test2.patch $ hg qpop 0 popping test2.patch popping test1b.patch now at: test.patch $ hg qpush test.patch+1 applying test1b.patch now at: test1b.patch $ hg qpush test.patch+2 applying test2.patch now at: test2.patch $ hg qpop test2.patch-1 popping test2.patch now at: test1b.patch $ hg qpop test2.patch-2 popping test1b.patch now at: test.patch $ hg qpush test1b.patch+1 applying test1b.patch applying test2.patch now at: test2.patch qpush --move $ hg qpop -a popping test2.patch popping test1b.patch popping test.patch patch queue now empty $ hg qguard test1b.patch -- -negguard $ hg qguard test2.patch -- +posguard $ hg qpush --move test2.patch # can't move guarded patch cannot push 'test2.patch' - guarded by '+posguard' [1] $ hg qselect posguard number of unguarded, unapplied patches has changed from 2 to 3 $ hg qpush --move test2.patch # move to front applying test2.patch now at: test2.patch $ hg qpush --move test1b.patch # negative guard unselected applying test1b.patch now at: test1b.patch $ hg qpush --move test.patch # noop move applying test.patch now at: test.patch $ hg qseries -v 0 A test2.patch 1 A test1b.patch 2 A test.patch $ hg qpop -a popping test.patch popping test1b.patch popping test2.patch patch queue now empty cleaning up $ hg qselect --none guards deactivated number of unguarded, unapplied patches has changed from 3 to 2 $ hg qguard --none test1b.patch $ hg qguard --none test2.patch $ hg qpush --move test.patch applying test.patch now at: test.patch $ hg qpush --move test1b.patch applying test1b.patch now at: test1b.patch $ hg qpush --move bogus # nonexistent patch abort: patch bogus not in series [255] $ hg qpush --move # no patch abort: please specify the patch to move [255] $ hg qpush --move test.patch # already applied abort: cannot push to a previous patch: test.patch [255] $ sed '2i\ > # make qtip index different in series and fullseries > ' `hg root`/.hg/patches/series > $TESTTMP/sedtmp $ cp $TESTTMP/sedtmp `hg root`/.hg/patches/series $ cat `hg root`/.hg/patches/series # comment # make qtip index different in series and fullseries test.patch test1b.patch test2.patch $ hg qpush --move test2.patch applying test2.patch now at: test2.patch series after move $ cat `hg root`/.hg/patches/series # comment # make qtip index different in series and fullseries test.patch test1b.patch test2.patch pop, qapplied, qunapplied $ hg qseries -v 0 A test.patch 1 A test1b.patch 2 A test2.patch qapplied -1 test.patch $ hg qapplied -1 test.patch only one patch applied [1] qapplied -1 test1b.patch $ hg qapplied -1 test1b.patch test.patch qapplied -1 test2.patch $ hg qapplied -1 test2.patch test1b.patch qapplied -1 $ hg qapplied -1 test1b.patch qapplied $ hg qapplied test.patch test1b.patch test2.patch qapplied test1b.patch $ hg qapplied test1b.patch test.patch test1b.patch qunapplied -1 $ hg qunapplied -1 all patches applied [1] qunapplied $ hg qunapplied popping $ hg qpop popping test2.patch now at: test1b.patch qunapplied -1 $ hg qunapplied -1 test2.patch qunapplied $ hg qunapplied test2.patch qunapplied test2.patch $ hg qunapplied test2.patch qunapplied -1 test2.patch $ hg qunapplied -1 test2.patch all patches applied [1] popping -a $ hg qpop -a popping test1b.patch popping test.patch patch queue now empty qapplied $ hg qapplied qapplied -1 $ hg qapplied -1 no patches applied [1] $ hg qpush applying test.patch now at: test.patch push should succeed $ hg qpop -a popping test.patch patch queue now empty $ hg push ../../k pushing to ../../k searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files we want to start with some patches applied $ hg qpush -a applying test.patch applying test1b.patch applying test2.patch now at: test2.patch % pops all patches and succeeds $ hg qpop -a popping test2.patch popping test1b.patch popping test.patch patch queue now empty % does nothing and succeeds $ hg qpop -a no patches applied % fails - nothing else to pop $ hg qpop no patches applied [1] % pushes a patch and succeeds $ hg qpush applying test.patch now at: test.patch % pops a patch and succeeds $ hg qpop popping test.patch patch queue now empty % pushes up to test1b.patch and succeeds $ hg qpush test1b.patch applying test.patch applying test1b.patch now at: test1b.patch % does nothing and succeeds $ hg qpush test1b.patch qpush: test1b.patch is already at the top % does nothing and succeeds $ hg qpop test1b.patch qpop: test1b.patch is already at the top % fails - can't push to this patch $ hg qpush test.patch abort: cannot push to a previous patch: test.patch [255] % fails - can't pop to this patch $ hg qpop test2.patch abort: patch test2.patch is not applied [255] % pops up to test.patch and succeeds $ hg qpop test.patch popping test1b.patch now at: test.patch % pushes all patches and succeeds $ hg qpush -a applying test1b.patch applying test2.patch now at: test2.patch % does nothing and succeeds $ hg qpush -a all patches are currently applied % fails - nothing else to push $ hg qpush patch series already fully applied [1] % does nothing and succeeds $ hg qpush test2.patch qpush: test2.patch is already at the top strip $ cd ../../b $ echo x>x $ hg ci -Ama adding x $ hg strip tip 0 files updated, 0 files merged, 1 files removed, 0 files unresolved saved backup bundle to $TESTTMP/b/.hg/strip-backup/*-backup.hg (glob) $ hg unbundle .hg/strip-backup/* adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) strip with local changes, should complain $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo y>y $ hg add y $ hg strip tip abort: local changes found [255] --force strip with local changes $ hg strip -f tip 0 files updated, 0 files merged, 1 files removed, 0 files unresolved saved backup bundle to $TESTTMP/b/.hg/strip-backup/770eb8fce608-0ddcae0f-backup.hg (glob) $ cd .. cd b; hg qrefresh $ hg init refresh $ cd refresh $ echo a > a $ hg ci -Ama adding a $ hg qnew -mfoo foo $ echo a >> a $ hg qrefresh $ mkdir b $ cd b $ echo f > f $ hg add f $ hg qrefresh $ cat ../.hg/patches/foo foo diff -r cb9a9f314b8b a --- a/a\t(?P.*) (re) \+\+\+ b/a\t(?P.*) (re) @@ -1,1 +1,2 @@ a +a diff -r cb9a9f314b8b b/f --- /dev/null\t(?P.*) (re) \+\+\+ b/b/f\t(?P.*) (re) @@ -0,0 +1,1 @@ +f hg qrefresh . $ hg qrefresh . $ cat ../.hg/patches/foo foo diff -r cb9a9f314b8b b/f --- /dev/null\t(?P.*) (re) \+\+\+ b/b/f\t(?P.*) (re) @@ -0,0 +1,1 @@ +f $ hg status M a qpush failure $ cd .. $ hg qrefresh $ hg qnew -mbar bar $ echo foo > foo $ echo bar > bar $ hg add foo bar $ hg qrefresh $ hg qpop -a popping bar popping foo patch queue now empty $ echo bar > foo $ hg qpush -a applying foo applying bar file foo already exists 1 out of 1 hunks FAILED -- saving rejects to file foo.rej patch failed, unable to continue (try -v) patch failed, rejects left in working directory errors during apply, please fix and qrefresh bar [2] $ hg st ? foo ? foo.rej mq tags $ hg log --template '{rev} {tags}\n' -r qparent:qtip 0 qparent 1 foo qbase 2 bar qtip tip mq revset $ hg log -r 'mq()' --template '{rev}\n' 1 2 $ hg help revsets | grep -i mq "mq()" Changesets managed by MQ. bad node in status $ hg qpop popping bar now at: foo $ hg strip -qn tip $ hg tip changeset: 0:cb9a9f314b8b tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a $ hg branches default 0:cb9a9f314b8b $ hg qpop no patches applied [1] $ cd .. git patches $ cat >>$HGRCPATH < [diff] > git = True > EOF $ hg init git $ cd git $ hg qinit $ hg qnew -m'new file' new $ echo foo > new #if execbit $ chmod +x new #endif $ hg add new $ hg qrefresh #if execbit $ cat .hg/patches/new new file diff --git a/new b/new new file mode 100755 --- /dev/null +++ b/new @@ -0,0 +1,1 @@ +foo #else $ cat .hg/patches/new new file diff --git a/new b/new new file mode 100644 --- /dev/null +++ b/new @@ -0,0 +1,1 @@ +foo #endif $ hg qnew -m'copy file' copy $ hg cp new copy $ hg qrefresh $ cat .hg/patches/copy copy file diff --git a/new b/copy copy from new copy to copy $ hg qpop popping copy now at: new $ hg qpush applying copy now at: copy $ hg qdiff diff --git a/new b/copy copy from new copy to copy $ cat >>$HGRCPATH < [diff] > git = False > EOF $ hg qdiff --git diff --git a/new b/copy copy from new copy to copy $ cd .. empty lines in status $ hg init emptystatus $ cd emptystatus $ hg qinit $ printf '\n\n' > .hg/patches/status $ hg qser $ cd .. bad line in status (without ":") $ hg init badstatus $ cd badstatus $ hg qinit $ printf 'babar has no colon in this line\n' > .hg/patches/status $ hg qser malformated mq status line: ['babar has no colon in this line'] $ cd .. test file addition in slow path $ hg init slow $ cd slow $ hg qinit $ echo foo > foo $ hg add foo $ hg ci -m 'add foo' $ hg qnew bar $ echo bar > bar $ hg add bar $ hg mv foo baz $ hg qrefresh --git $ hg up -C 0 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo >> foo $ hg ci -m 'change foo' created new head $ hg up -C 1 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg qrefresh --git $ cat .hg/patches/bar diff --git a/bar b/bar new file mode 100644 --- /dev/null +++ b/bar @@ -0,0 +1,1 @@ +bar diff --git a/foo b/baz rename from foo rename to baz $ hg log -v --template '{rev} {file_copies}\n' -r . 2 baz (foo) $ hg qrefresh --git $ cat .hg/patches/bar diff --git a/bar b/bar new file mode 100644 --- /dev/null +++ b/bar @@ -0,0 +1,1 @@ +bar diff --git a/foo b/baz rename from foo rename to baz $ hg log -v --template '{rev} {file_copies}\n' -r . 2 baz (foo) $ hg qrefresh $ grep 'diff --git' .hg/patches/bar diff --git a/bar b/bar diff --git a/foo b/baz test file move chains in the slow path $ hg up -C 1 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo >> foo $ hg ci -m 'change foo again' $ hg up -C 2 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg mv bar quux $ hg mv baz bleh $ hg qrefresh --git $ cat .hg/patches/bar diff --git a/foo b/bleh rename from foo rename to bleh diff --git a/quux b/quux new file mode 100644 --- /dev/null +++ b/quux @@ -0,0 +1,1 @@ +bar $ hg log -v --template '{rev} {file_copies}\n' -r . 3 bleh (foo) $ hg mv quux fred $ hg mv bleh barney $ hg qrefresh --git $ cat .hg/patches/bar diff --git a/foo b/barney rename from foo rename to barney diff --git a/fred b/fred new file mode 100644 --- /dev/null +++ b/fred @@ -0,0 +1,1 @@ +bar $ hg log -v --template '{rev} {file_copies}\n' -r . 3 barney (foo) refresh omitting an added file $ hg qnew baz $ echo newfile > newfile $ hg add newfile $ hg qrefresh $ hg st -A newfile C newfile $ hg qrefresh -X newfile $ hg st -A newfile A newfile $ hg revert newfile $ rm newfile $ hg qpop popping baz now at: bar test qdel/qrm $ hg qdel baz $ echo p >> .hg/patches/series $ hg qrm p $ hg qser bar create a git patch $ echo a > alexander $ hg add alexander $ hg qnew -f --git addalexander $ grep diff .hg/patches/addalexander diff --git a/alexander b/alexander create a git binary patch $ cat > writebin.py < import sys > path = sys.argv[1] > open(path, 'wb').write('BIN\x00ARY') > EOF $ python writebin.py bucephalus $ python "$TESTDIR/md5sum.py" bucephalus 8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus $ hg add bucephalus $ hg qnew -f --git addbucephalus $ grep diff .hg/patches/addbucephalus diff --git a/bucephalus b/bucephalus check binary patches can be popped and pushed $ hg qpop popping addbucephalus now at: addalexander $ test -f bucephalus && echo % bucephalus should not be there [1] $ hg qpush applying addbucephalus now at: addbucephalus $ test -f bucephalus $ python "$TESTDIR/md5sum.py" bucephalus 8ba2a2f3e77b55d03051ff9c24ad65e7 bucephalus strip again $ cd .. $ hg init strip $ cd strip $ touch foo $ hg add foo $ hg ci -m 'add foo' $ echo >> foo $ hg ci -m 'change foo 1' $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 1 >> foo $ hg ci -m 'change foo 2' created new head $ HGMERGE=true hg merge merging foo 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m merge $ hg log changeset: 3:99615015637b tag: tip parent: 2:20cbbe65cff7 parent: 1:d2871fc282d4 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: merge changeset: 2:20cbbe65cff7 parent: 0:53245c60e682 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change foo 2 changeset: 1:d2871fc282d4 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change foo 1 changeset: 0:53245c60e682 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo $ hg strip 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/strip/.hg/strip-backup/*-backup.hg (glob) $ checkundo strip $ hg log changeset: 1:20cbbe65cff7 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: change foo 2 changeset: 0:53245c60e682 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add foo $ cd .. qclone $ qlog() > { > echo 'main repo:' > hg log --template ' rev {rev}: {desc}\n' > echo 'patch repo:' > hg -R .hg/patches log --template ' rev {rev}: {desc}\n' > } $ hg init qclonesource $ cd qclonesource $ echo foo > foo $ hg add foo $ hg ci -m 'add foo' $ hg qinit $ hg qnew patch1 $ echo bar >> foo $ hg qrefresh -m 'change foo' $ cd .. repo with unversioned patch dir $ hg qclone qclonesource failure abort: versioned patch repository not found (see init --mq) [255] $ cd qclonesource $ hg qinit -c adding .hg/patches/patch1 (glob) $ hg qci -m checkpoint $ qlog main repo: rev 1: change foo rev 0: add foo patch repo: rev 0: checkpoint $ cd .. repo with patches applied $ hg qclone qclonesource qclonedest updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd qclonedest $ qlog main repo: rev 0: add foo patch repo: rev 0: checkpoint $ cd .. repo with patches unapplied $ cd qclonesource $ hg qpop -a popping patch1 patch queue now empty $ qlog main repo: rev 0: add foo patch repo: rev 0: checkpoint $ cd .. $ hg qclone qclonesource qclonedest2 updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd qclonedest2 $ qlog main repo: rev 0: add foo patch repo: rev 0: checkpoint $ cd .. Issue1033: test applying on an empty file $ hg init empty $ cd empty $ touch a $ hg ci -Am addempty adding a $ echo a > a $ hg qnew -f -e changea $ hg qpop popping changea patch queue now empty $ hg qpush applying changea now at: changea $ cd .. test qpush with --force, issue1087 $ hg init forcepush $ cd forcepush $ echo hello > hello.txt $ echo bye > bye.txt $ hg ci -Ama adding bye.txt adding hello.txt $ hg qnew -d '0 0' empty $ hg qpop popping empty patch queue now empty $ echo world >> hello.txt qpush should fail, local changes $ hg qpush abort: local changes found [255] apply force, should not discard changes with empty patch $ hg qpush -f applying empty patch empty is empty now at: empty $ hg diff --config diff.nodates=True diff -r d58265112590 hello.txt --- a/hello.txt +++ b/hello.txt @@ -1,1 +1,2 @@ hello +world $ hg qdiff --config diff.nodates=True diff -r 9ecee4f634e3 hello.txt --- a/hello.txt +++ b/hello.txt @@ -1,1 +1,2 @@ hello +world $ hg log -l1 -p changeset: 1:d58265112590 tag: empty tag: qbase tag: qtip tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: imported patch empty $ hg qref -d '0 0' $ hg qpop popping empty patch queue now empty $ echo universe >> hello.txt $ echo universe >> bye.txt qpush should fail, local changes $ hg qpush abort: local changes found [255] apply force, should discard changes in hello, but not bye $ hg qpush -f --verbose --config 'ui.origbackuppath=.hg/origbackups' applying empty creating directory: $TESTTMP/forcepush/.hg/origbackups (glob) saving current version of hello.txt as $TESTTMP/forcepush/.hg/origbackups/hello.txt.orig (glob) patching file hello.txt committing files: hello.txt committing manifest committing changelog now at: empty $ hg st M bye.txt $ hg diff --config diff.nodates=True diff -r ba252371dbc1 bye.txt --- a/bye.txt +++ b/bye.txt @@ -1,1 +1,2 @@ bye +universe $ hg qdiff --config diff.nodates=True diff -r 9ecee4f634e3 bye.txt --- a/bye.txt +++ b/bye.txt @@ -1,1 +1,2 @@ bye +universe diff -r 9ecee4f634e3 hello.txt --- a/hello.txt +++ b/hello.txt @@ -1,1 +1,3 @@ hello +world +universe test that the previous call to qpush with -f (--force) and --config actually put the orig files out of the working copy $ ls .hg/origbackups hello.txt.orig test popping revisions not in working dir ancestry $ hg qseries -v 0 A empty $ hg up qparent 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg qpop popping empty patch queue now empty $ cd .. $ hg init deletion-order $ cd deletion-order $ touch a $ hg ci -Aqm0 $ hg qnew rename-dir $ hg rm a $ hg qrefresh $ mkdir a b $ touch a/a b/b $ hg add -q a b $ hg qrefresh test popping must remove files added in subdirectories first $ hg qpop popping rename-dir patch queue now empty $ cd .. test case preservation through patch pushing especially on case insensitive filesystem $ hg init casepreserve $ cd casepreserve $ hg qnew add-file1 $ echo a > TeXtFiLe.TxT $ hg add TeXtFiLe.TxT $ hg qrefresh $ hg qnew add-file2 $ echo b > AnOtHeRFiLe.TxT $ hg add AnOtHeRFiLe.TxT $ hg qrefresh $ hg qnew modify-file $ echo c >> AnOtHeRFiLe.TxT $ hg qrefresh $ hg qapplied add-file1 add-file2 modify-file $ hg qpop -a popping modify-file popping add-file2 popping add-file1 patch queue now empty this qpush causes problems below, if case preservation on case insensitive filesystem is not enough: (1) unexpected "adding ..." messages are shown (2) patching fails in modification of (1) files $ hg qpush -a applying add-file1 applying add-file2 applying modify-file now at: modify-file Proper phase default with mq: 1. mq.secret=false $ rm .hg/store/phaseroots $ hg phase 'qparent::' -1: public 0: draft 1: draft 2: draft $ echo '[mq]' >> $HGRCPATH $ echo 'secret=true' >> $HGRCPATH $ rm -f .hg/store/phaseroots $ hg phase 'qparent::' -1: public 0: secret 1: secret 2: secret Test that qfinish change phase when mq.secret=true $ hg qfinish qbase patch add-file1 finalized without changeset message $ hg phase 'all()' 0: draft 1: secret 2: secret Test that qfinish respect phases.new-commit setting $ echo '[phases]' >> $HGRCPATH $ echo 'new-commit=secret' >> $HGRCPATH $ hg qfinish qbase patch add-file2 finalized without changeset message $ hg phase 'all()' 0: draft 1: secret 2: secret (restore env for next test) $ sed -e 's/new-commit=secret//' $HGRCPATH > $TESTTMP/sedtmp $ cp $TESTTMP/sedtmp $HGRCPATH $ hg qimport -r 1 --name add-file2 Test that qfinish preserve phase when mq.secret=false $ sed -e 's/secret=true/secret=false/' $HGRCPATH > $TESTTMP/sedtmp $ cp $TESTTMP/sedtmp $HGRCPATH $ hg qfinish qbase patch add-file2 finalized without changeset message $ hg phase 'all()' 0: draft 1: secret 2: secret Test that secret mq patch does not break hgweb $ cat > hgweb.cgi < from mercurial import demandimport; demandimport.enable() > from mercurial.hgweb import hgweb > from mercurial.hgweb import wsgicgi > import cgitb > cgitb.enable() > app = hgweb('.', 'test') > wsgicgi.launch(app) > HGWEB $ . "$TESTDIR/cgienv" #if msys $ PATH_INFO=//tags; export PATH_INFO #else $ PATH_INFO=/tags; export PATH_INFO #endif $ QUERY_STRING='style=raw' $ python hgweb.cgi | grep '^tip' tip [0-9a-f]{40} (re) $ cd .. Test interaction with revset (issue4426) $ hg init issue4426 $ cd issue4426 $ echo a > a $ hg ci -Am a adding a $ echo a >> a $ hg ci -m a $ echo a >> a $ hg ci -m a $ hg qimport -r 0:: reimport things $ hg qimport -r 1:: abort: revision 2 is already managed [255] $ cd .. mercurial-3.7.3/tests/test-rebase-obsolete.t0000644000175000017500000004551412676531525020517 0ustar mpmmpm00000000000000========================== Test rebase with obsolete ========================== Enable obsolete $ cat >> $HGRCPATH << EOF > [ui] > logtemplate= {rev}:{node|short} {desc|firstline} > [experimental] > evolution=createmarkers,allowunstable > [phases] > publish=False > [extensions] > rebase= > EOF Setup rebase canonical repo $ hg init base $ cd base $ hg unbundle "$TESTDIR/bundles/rebase.hg" adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up tip 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -G @ 7:02de42196ebe H | | o 6:eea13746799a G |/| o | 5:24b6387c8c8c F | | | o 4:9520eea781bc E |/ | o 3:32af7686d403 D | | | o 2:5fddd98957c8 C | | | o 1:42ccdea3bb16 B |/ o 0:cd010b8cd998 A $ cd .. simple rebase --------------------------------- $ hg clone base simple updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd simple $ hg up 32af7686d403 3 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg rebase -d eea13746799a rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" $ hg log -G @ 10:8eeb3c33ad33 D | o 9:2327fea05063 C | o 8:e4e5be0395b2 B | | o 7:02de42196ebe H | | o | 6:eea13746799a G |\| | o 5:24b6387c8c8c F | | o | 4:9520eea781bc E |/ o 0:cd010b8cd998 A $ hg log --hidden -G @ 10:8eeb3c33ad33 D | o 9:2327fea05063 C | o 8:e4e5be0395b2 B | | o 7:02de42196ebe H | | o | 6:eea13746799a G |\| | o 5:24b6387c8c8c F | | o | 4:9520eea781bc E |/ | x 3:32af7686d403 D | | | x 2:5fddd98957c8 C | | | x 1:42ccdea3bb16 B |/ o 0:cd010b8cd998 A $ hg debugobsolete 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 e4e5be0395b2cbd471ed22a26b1b6a1a0658a794 0 (*) {'user': 'test'} (glob) 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 2327fea05063f39961b14cb69435a9898dc9a245 0 (*) {'user': 'test'} (glob) 32af7686d403cf45b5d95f2d70cebea587ac806a 8eeb3c33ad33d452c89e5dcf611c347f978fb42b 0 (*) {'user': 'test'} (glob) $ cd .. empty changeset --------------------------------- $ hg clone base empty updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd empty $ hg up eea13746799a 1 files updated, 0 files merged, 1 files removed, 0 files unresolved We make a copy of both the first changeset in the rebased and some other in the set. $ hg graft 42ccdea3bb16 32af7686d403 grafting 1:42ccdea3bb16 "B" grafting 3:32af7686d403 "D" $ hg rebase -s 42ccdea3bb16 -d . rebasing 1:42ccdea3bb16 "B" note: rebase of 1:42ccdea3bb16 created no changes to commit rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" note: rebase of 3:32af7686d403 created no changes to commit $ hg log -G o 10:5ae4c968c6ac C | @ 9:08483444fef9 D | o 8:8877864f1edb B | | o 7:02de42196ebe H | | o | 6:eea13746799a G |\| | o 5:24b6387c8c8c F | | o | 4:9520eea781bc E |/ o 0:cd010b8cd998 A $ hg log --hidden -G o 10:5ae4c968c6ac C | @ 9:08483444fef9 D | o 8:8877864f1edb B | | o 7:02de42196ebe H | | o | 6:eea13746799a G |\| | o 5:24b6387c8c8c F | | o | 4:9520eea781bc E |/ | x 3:32af7686d403 D | | | x 2:5fddd98957c8 C | | | x 1:42ccdea3bb16 B |/ o 0:cd010b8cd998 A $ hg debugobsolete 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob) 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob) 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob) More complex case were part of the rebase set were already rebased $ hg rebase --rev 'desc(D)' --dest 'desc(H)' rebasing 9:08483444fef9 "D" $ hg debugobsolete 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob) 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob) 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob) 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (*) {'user': 'test'} (glob) $ hg log -G @ 11:4596109a6a43 D | | o 10:5ae4c968c6ac C | | | x 9:08483444fef9 D | | | o 8:8877864f1edb B | | o | 7:02de42196ebe H | | | o 6:eea13746799a G |/| o | 5:24b6387c8c8c F | | | o 4:9520eea781bc E |/ o 0:cd010b8cd998 A $ hg rebase --source 'desc(B)' --dest 'tip' --config experimental.rebaseskipobsolete=True rebasing 8:8877864f1edb "B" note: not rebasing 9:08483444fef9 "D", already in destination as 11:4596109a6a43 "D" rebasing 10:5ae4c968c6ac "C" $ hg debugobsolete 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 0 {cd010b8cd998f3981a5a8115f94f8da4ab506089} (*) {'user': 'test'} (glob) 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 5ae4c968c6aca831df823664e706c9d4aa34473d 0 (*) {'user': 'test'} (glob) 32af7686d403cf45b5d95f2d70cebea587ac806a 0 {5fddd98957c8a54a4d436dfe1da9d87f21a1b97b} (*) {'user': 'test'} (glob) 08483444fef91d6224f6655ee586a65d263ad34c 4596109a6a4328c398bde3a4a3b6737cfade3003 0 (*) {'user': 'test'} (glob) 8877864f1edb05d0e07dc4ba77b67a80a7b86672 462a34d07e599b87ea08676a449373fe4e2e1347 0 (*) {'user': 'test'} (glob) 5ae4c968c6aca831df823664e706c9d4aa34473d 98f6af4ee9539e14da4465128f894c274900b6e5 0 (*) {'user': 'test'} (glob) $ hg log --rev 'divergent()' $ hg log -G o 13:98f6af4ee953 C | o 12:462a34d07e59 B | @ 11:4596109a6a43 D | o 7:02de42196ebe H | | o 6:eea13746799a G |/| o | 5:24b6387c8c8c F | | | o 4:9520eea781bc E |/ o 0:cd010b8cd998 A $ hg log --style default --debug -r 4596109a6a4328c398bde3a4a3b6737cfade3003 changeset: 11:4596109a6a4328c398bde3a4a3b6737cfade3003 phase: draft parent: 7:02de42196ebee42ef284b6780a87cdc96e8eaab6 parent: -1:0000000000000000000000000000000000000000 manifest: 11:a91006e3a02f1edf631f7018e6e5684cf27dd905 user: Nicolas Dumazet date: Sat Apr 30 15:24:48 2011 +0200 files+: D extra: branch=default extra: rebase_source=08483444fef91d6224f6655ee586a65d263ad34c extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a description: D $ hg up -qr 'desc(G)' $ hg graft 4596109a6a4328c398bde3a4a3b6737cfade3003 grafting 11:4596109a6a43 "D" $ hg up -qr 'desc(E)' $ hg rebase -s tip -d . rebasing 14:9e36056a46e3 "D" (tip) $ hg log --style default --debug -r tip changeset: 15:627d4614809036ba22b9e7cb31638ddc06ab99ab tag: tip phase: draft parent: 4:9520eea781bcca16c1e15acc0ba14335a0e8e5ba parent: -1:0000000000000000000000000000000000000000 manifest: 15:648e8ede73ae3e497d093d3a4c8fcc2daa864f42 user: Nicolas Dumazet date: Sat Apr 30 15:24:48 2011 +0200 files+: D extra: branch=default extra: intermediate-source=4596109a6a4328c398bde3a4a3b6737cfade3003 extra: rebase_source=9e36056a46e37c9776168c7375734eebc70e294f extra: source=32af7686d403cf45b5d95f2d70cebea587ac806a description: D $ cd .. collapse rebase --------------------------------- $ hg clone base collapse updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd collapse $ hg rebase -s 42ccdea3bb16 -d eea13746799a --collapse rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" $ hg log -G o 8:4dc2197e807b Collapsed revision | | @ 7:02de42196ebe H | | o | 6:eea13746799a G |\| | o 5:24b6387c8c8c F | | o | 4:9520eea781bc E |/ o 0:cd010b8cd998 A $ hg log --hidden -G o 8:4dc2197e807b Collapsed revision | | @ 7:02de42196ebe H | | o | 6:eea13746799a G |\| | o 5:24b6387c8c8c F | | o | 4:9520eea781bc E |/ | x 3:32af7686d403 D | | | x 2:5fddd98957c8 C | | | x 1:42ccdea3bb16 B |/ o 0:cd010b8cd998 A $ hg id --debug -r tip 4dc2197e807bae9817f09905b50ab288be2dbbcf tip $ hg debugobsolete 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob) 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob) 32af7686d403cf45b5d95f2d70cebea587ac806a 4dc2197e807bae9817f09905b50ab288be2dbbcf 0 (*) {'user': 'test'} (glob) $ cd .. Rebase set has hidden descendants --------------------------------- We rebase a changeset which has a hidden changeset. The hidden changeset must not be rebased. $ hg clone base hidden updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd hidden $ hg rebase -s 5fddd98957c8 -d eea13746799a rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" $ hg rebase -s 42ccdea3bb16 -d 02de42196ebe rebasing 1:42ccdea3bb16 "B" $ hg log -G o 10:7c6027df6a99 B | | o 9:cf44d2f5a9f4 D | | | o 8:e273c5e7d2d2 C | | @ | 7:02de42196ebe H | | | o 6:eea13746799a G |/| o | 5:24b6387c8c8c F | | | o 4:9520eea781bc E |/ o 0:cd010b8cd998 A $ hg log --hidden -G o 10:7c6027df6a99 B | | o 9:cf44d2f5a9f4 D | | | o 8:e273c5e7d2d2 C | | @ | 7:02de42196ebe H | | | o 6:eea13746799a G |/| o | 5:24b6387c8c8c F | | | o 4:9520eea781bc E |/ | x 3:32af7686d403 D | | | x 2:5fddd98957c8 C | | | x 1:42ccdea3bb16 B |/ o 0:cd010b8cd998 A $ hg debugobsolete 5fddd98957c8a54a4d436dfe1da9d87f21a1b97b e273c5e7d2d29df783dce9f9eaa3ac4adc69c15d 0 (*) {'user': 'test'} (glob) 32af7686d403cf45b5d95f2d70cebea587ac806a cf44d2f5a9f4297a62be94cbdd3dff7c7dc54258 0 (*) {'user': 'test'} (glob) 42ccdea3bb16d28e1848c95fe2e44c000f3f21b1 7c6027df6a99d93f461868e5433f63bde20b6dfb 0 (*) {'user': 'test'} (glob) Test that rewriting leaving instability behind is allowed --------------------------------------------------------------------- $ hg log -r 'children(8)' 9:cf44d2f5a9f4 D (no-eol) $ hg rebase -r 8 rebasing 8:e273c5e7d2d2 "C" $ hg log -G o 11:0d8f238b634c C | o 10:7c6027df6a99 B | | o 9:cf44d2f5a9f4 D | | | x 8:e273c5e7d2d2 C | | @ | 7:02de42196ebe H | | | o 6:eea13746799a G |/| o | 5:24b6387c8c8c F | | | o 4:9520eea781bc E |/ o 0:cd010b8cd998 A Test multiple root handling ------------------------------------ $ hg rebase --dest 4 --rev '7+11+9' rebasing 7:02de42196ebe "H" rebasing 9:cf44d2f5a9f4 "D" not rebasing ignored 10:7c6027df6a99 "B" rebasing 11:0d8f238b634c "C" (tip) $ hg log -G o 14:1e8370e38cca C | | o 13:102b4c1d889b D | | @ | 12:bfe264faf697 H |/ | o 10:7c6027df6a99 B | | | x 7:02de42196ebe H | | +---o 6:eea13746799a G | |/ | o 5:24b6387c8c8c F | | o | 4:9520eea781bc E |/ o 0:cd010b8cd998 A $ cd .. test on rebase dropping a merge (setup) $ hg init dropmerge $ cd dropmerge $ hg unbundle "$TESTDIR/bundles/rebase.hg" adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up 3 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 7 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m 'M' $ echo I > I $ hg add I $ hg ci -m I $ hg log -G @ 9:4bde274eefcf I | o 8:53a6a128b2b7 M |\ | o 7:02de42196ebe H | | | | o 6:eea13746799a G | |/| | o | 5:24b6387c8c8c F | | | | | o 4:9520eea781bc E | |/ o | 3:32af7686d403 D | | o | 2:5fddd98957c8 C | | o | 1:42ccdea3bb16 B |/ o 0:cd010b8cd998 A (actual test) $ hg rebase --dest 6 --rev '((desc(H) + desc(D))::) - desc(M)' rebasing 3:32af7686d403 "D" rebasing 7:02de42196ebe "H" not rebasing ignored 8:53a6a128b2b7 "M" rebasing 9:4bde274eefcf "I" (tip) $ hg log -G @ 12:acd174b7ab39 I | o 11:6c11a6218c97 H | | o 10:b5313c85b22e D |/ | o 8:53a6a128b2b7 M | |\ | | x 7:02de42196ebe H | | | o---+ 6:eea13746799a G | | | | | o 5:24b6387c8c8c F | | | o---+ 4:9520eea781bc E / / x | 3:32af7686d403 D | | o | 2:5fddd98957c8 C | | o | 1:42ccdea3bb16 B |/ o 0:cd010b8cd998 A Test hidden changesets in the rebase set (issue4504) $ hg up --hidden 9 3 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo J > J $ hg add J $ hg commit -m J $ hg debugobsolete `hg log --rev . -T '{node}'` $ hg rebase --rev .~1::. --dest 'max(desc(D))' --traceback rebasing 9:4bde274eefcf "I" rebasing 13:06edfc82198f "J" (tip) $ hg log -G @ 15:5ae8a643467b J | o 14:9ad579b4a5de I | | o 12:acd174b7ab39 I | | | o 11:6c11a6218c97 H | | o | 10:b5313c85b22e D |/ | o 8:53a6a128b2b7 M | |\ | | x 7:02de42196ebe H | | | o---+ 6:eea13746799a G | | | | | o 5:24b6387c8c8c F | | | o---+ 4:9520eea781bc E / / x | 3:32af7686d403 D | | o | 2:5fddd98957c8 C | | o | 1:42ccdea3bb16 B |/ o 0:cd010b8cd998 A $ hg up 14 -C 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo "K" > K $ hg add K $ hg commit --amend -m "K" $ echo "L" > L $ hg add L $ hg commit -m "L" $ hg up '.^' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo "M" > M $ hg add M $ hg commit --amend -m "M" $ hg log -G @ 20:bfaedf8eb73b M | | o 18:97219452e4bd L | | | x 17:fc37a630c901 K |/ | o 15:5ae8a643467b J | | | x 14:9ad579b4a5de I |/ | o 12:acd174b7ab39 I | | | o 11:6c11a6218c97 H | | o | 10:b5313c85b22e D |/ | o 8:53a6a128b2b7 M | |\ | | x 7:02de42196ebe H | | | o---+ 6:eea13746799a G | | | | | o 5:24b6387c8c8c F | | | o---+ 4:9520eea781bc E / / x | 3:32af7686d403 D | | o | 2:5fddd98957c8 C | | o | 1:42ccdea3bb16 B |/ o 0:cd010b8cd998 A $ hg rebase -s 14 -d 18 --config experimental.rebaseskipobsolete=True note: not rebasing 14:9ad579b4a5de "I", already in destination as 17:fc37a630c901 "K" rebasing 15:5ae8a643467b "J" $ cd .. Skip obsolete changeset even with multiple hops ----------------------------------------------- setup $ hg init obsskip $ cd obsskip $ cat << EOF >> .hg/hgrc > [experimental] > rebaseskipobsolete = True > [extensions] > strip = > EOF $ echo A > A $ hg add A $ hg commit -m A $ echo B > B $ hg add B $ hg commit -m B0 $ hg commit --amend -m B1 $ hg commit --amend -m B2 $ hg up --hidden 'desc(B0)' 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo C > C $ hg add C $ hg commit -m C Rebase finds its way in a chain of marker $ hg rebase -d 'desc(B2)' note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 3:261e70097290 "B2" rebasing 4:212cb178bcbb "C" (tip) Even when the chain include missing node $ hg up --hidden 'desc(B0)' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo D > D $ hg add D $ hg commit -m D $ hg --hidden strip -r 'desc(B1)' saved backup bundle to $TESTTMP/obsskip/.hg/strip-backup/86f6414ccda7-b1c452ee-backup.hg (glob) $ hg rebase -d 'desc(B2)' note: not rebasing 1:a8b11f55fb19 "B0", already in destination as 2:261e70097290 "B2" rebasing 5:1a79b7535141 "D" (tip) $ hg up 4 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo "O" > O $ hg add O $ hg commit -m O $ echo "P" > P $ hg add P $ hg commit -m P $ hg log -G @ 8:8d47583e023f P | o 7:360bbaa7d3ce O | | o 6:9c48361117de D | | o | 4:ff2c4d47b71d C |/ o 2:261e70097290 B2 | o 0:4a2df7238c3b A $ hg debugobsolete `hg log -r 7 -T '{node}\n'` --config experimental.evolution=all $ hg rebase -d 6 -r "4::" rebasing 4:ff2c4d47b71d "C" note: not rebasing 7:360bbaa7d3ce "O", it has no successor rebasing 8:8d47583e023f "P" (tip) If all the changeset to be rebased are obsolete and present in the destination, we should display a friendly error message $ hg log -G @ 10:121d9e3bc4c6 P | o 9:4be60e099a77 C | o 6:9c48361117de D | o 2:261e70097290 B2 | o 0:4a2df7238c3b A $ hg up 9 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo "non-relevant change" > nonrelevant $ hg add nonrelevant $ hg commit -m nonrelevant created new head $ hg debugobsolete `hg log -r 11 -T '{node}\n'` --config experimental.evolution=all $ hg rebase -r . -d 10 abort: all requested changesets have equivalents or were marked as obsolete (to force the rebase, set the config experimental.rebaseskipobsolete to False) [255] If a rebase is going to create divergence, it should abort $ hg log -G @ 11:f44da1f4954c nonrelevant | | o 10:121d9e3bc4c6 P |/ o 9:4be60e099a77 C | o 6:9c48361117de D | o 2:261e70097290 B2 | o 0:4a2df7238c3b A $ hg up 9 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo "john" > doe $ hg add doe $ hg commit -m "john doe" created new head $ hg up 10 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo "foo" > bar $ hg add bar $ hg commit --amend -m "10'" $ hg up 10 --hidden 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo "bar" > foo $ hg add foo $ hg commit -m "bar foo" $ hg log -G @ 15:73568ab6879d bar foo | | o 14:77d874d096a2 10' | | | | o 12:3eb461388009 john doe | |/ x | 10:121d9e3bc4c6 P |/ o 9:4be60e099a77 C | o 6:9c48361117de D | o 2:261e70097290 B2 | o 0:4a2df7238c3b A $ hg summary parent: 15:73568ab6879d tip bar foo branch: default commit: (clean) update: 2 new changesets, 3 branch heads (merge) phases: 8 draft unstable: 1 changesets $ hg rebase -s 10 -d 12 abort: this rebase will cause divergence (to force the rebase please set rebase.allowdivergence=True) [255] $ hg log -G @ 15:73568ab6879d bar foo | | o 14:77d874d096a2 10' | | | | o 12:3eb461388009 john doe | |/ x | 10:121d9e3bc4c6 P |/ o 9:4be60e099a77 C | o 6:9c48361117de D | o 2:261e70097290 B2 | o 0:4a2df7238c3b A With rebase.allowdivergence=True, rebase can create divergence $ hg rebase -s 10 -d 12 --config rebase.allowdivergence=True rebasing 10:121d9e3bc4c6 "P" rebasing 15:73568ab6879d "bar foo" (tip) $ hg summary parent: 17:61bd55f69bc4 tip bar foo branch: default commit: (clean) update: 1 new changesets, 2 branch heads (merge) phases: 8 draft divergent: 2 changesets mercurial-3.7.3/tests/test-acl.t0000644000175000017500000022507212676531525016202 0ustar mpmmpm00000000000000 > do_push() > { > user=$1 > shift > echo "Pushing as user $user" > echo 'hgrc = """' > sed -n '/\[[ha]/,$p' b/.hg/hgrc | grep -v fakegroups.py > echo '"""' > if test -f acl.config; then > echo 'acl.config = """' > cat acl.config > echo '"""' > fi > # On AIX /etc/profile sets LOGNAME read-only. So > # LOGNAME=$user hg --cws a --debug push ../b > # fails with "This variable is read only." > # Use env to work around this. > env LOGNAME=$user hg --cwd a --debug push ../b > hg --cwd b rollback > hg --cwd b --quiet tip > echo > } > init_config() > { > cat > fakegroups.py < from hgext import acl > def fakegetusers(ui, group): > try: > return acl._getusersorig(ui, group) > except: > return ["fred", "betty"] > acl._getusersorig = acl._getusers > acl._getusers = fakegetusers > EOF > rm -f acl.config > cat > $config < [hooks] > pretxnchangegroup.acl = python:hgext.acl.hook > [acl] > sources = push > [extensions] > f=`pwd`/fakegroups.py > EOF > } $ cat << EOF >> $HGRCPATH > [experimental] > # drop me once bundle2 is the default, > # added to get test change early. > bundle2-exp = True > EOF $ hg init a $ cd a $ mkdir foo foo/Bar quux $ echo 'in foo' > foo/file.txt $ echo 'in foo/Bar' > foo/Bar/file.txt $ echo 'in quux' > quux/file.py $ hg add -q $ hg ci -m 'add files' -d '1000000 0' $ echo >> foo/file.txt $ hg ci -m 'change foo/file' -d '1000001 0' $ echo >> foo/Bar/file.txt $ hg ci -m 'change foo/Bar/file' -d '1000002 0' $ echo >> quux/file.py $ hg ci -m 'change quux/file' -d '1000003 0' $ hg tip --quiet 3:911600dab2ae $ cd .. $ hg clone -r 0 a b adding changesets adding manifests adding file changes added 1 changesets with 3 changes to 3 files updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ config=b/.hg/hgrc Extension disabled for lack of a hook $ do_push fred Pushing as user fred hgrc = """ """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files bundle2-input-part: total payload size 1606 bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955" bundle2-input-bundle: 3 parts total updating the branch cache bundle2-output-bundle: "HG20", 2 parts total bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-bundle: 1 parts total listing keys for "phases" repository tip rolled back to revision 0 (undo push) 0:6675d58eff77 $ echo '[hooks]' >> $config $ echo 'pretxnchangegroup.acl = python:hgext.acl.hook' >> $config Extension disabled for lack of acl.sources $ do_push fred Pushing as user fred hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: changes have source "push" - skipping bundle2-input-part: total payload size 1606 bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955" bundle2-input-bundle: 3 parts total updating the branch cache bundle2-output-bundle: "HG20", 2 parts total bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-bundle: 1 parts total listing keys for "phases" repository tip rolled back to revision 0 (undo push) 0:6675d58eff77 No [acl.allow]/[acl.deny] $ echo '[acl]' >> $config $ echo 'sources = push' >> $config $ do_push fred Pushing as user fred hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "fred" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow not enabled acl: acl.deny not enabled acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" acl: path access granted: "911600dab2ae" bundle2-input-part: total payload size 1606 bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955" bundle2-input-bundle: 3 parts total updating the branch cache bundle2-output-bundle: "HG20", 2 parts total bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-bundle: 1 parts total listing keys for "phases" repository tip rolled back to revision 0 (undo push) 0:6675d58eff77 Empty [acl.allow] $ echo '[acl.allow]' >> $config $ do_push fred Pushing as user fred hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [acl.allow] """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "fred" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow enabled, 0 entries for user fred acl: acl.deny not enabled acl: branch access granted: "ef1ea85a6374" on branch "default" error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374") bundle2-input-part: total payload size 1606 bundle2-input-bundle: 3 parts total transaction abort! rollback completed abort: acl: user "fred" not allowed on "foo/file.txt" (changeset "ef1ea85a6374") no rollback information available 0:6675d58eff77 fred is allowed inside foo/ $ echo 'foo/** = fred' >> $config $ do_push fred Pushing as user fred hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [acl.allow] foo/** = fred """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "fred" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow enabled, 1 entries for user fred acl: acl.deny not enabled acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae") bundle2-input-part: total payload size 1606 bundle2-input-bundle: 3 parts total transaction abort! rollback completed abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae") no rollback information available 0:6675d58eff77 Empty [acl.deny] $ echo '[acl.deny]' >> $config $ do_push barney Pushing as user barney hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [acl.allow] foo/** = fred [acl.deny] """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "barney" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow enabled, 0 entries for user barney acl: acl.deny enabled, 0 entries for user barney acl: branch access granted: "ef1ea85a6374" on branch "default" error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374") bundle2-input-part: total payload size 1606 bundle2-input-bundle: 3 parts total transaction abort! rollback completed abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374") no rollback information available 0:6675d58eff77 fred is allowed inside foo/, but not foo/bar/ (case matters) $ echo 'foo/bar/** = fred' >> $config $ do_push fred Pushing as user fred hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [acl.allow] foo/** = fred [acl.deny] foo/bar/** = fred """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "fred" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow enabled, 1 entries for user fred acl: acl.deny enabled, 1 entries for user fred acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" error: pretxnchangegroup.acl hook failed: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae") bundle2-input-part: total payload size 1606 bundle2-input-bundle: 3 parts total transaction abort! rollback completed abort: acl: user "fred" not allowed on "quux/file.py" (changeset "911600dab2ae") no rollback information available 0:6675d58eff77 fred is allowed inside foo/, but not foo/Bar/ $ echo 'foo/Bar/** = fred' >> $config $ do_push fred Pushing as user fred hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [acl.allow] foo/** = fred [acl.deny] foo/bar/** = fred foo/Bar/** = fred """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "fred" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow enabled, 1 entries for user fred acl: acl.deny enabled, 2 entries for user fred acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8") bundle2-input-part: total payload size 1606 bundle2-input-bundle: 3 parts total transaction abort! rollback completed abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8") no rollback information available 0:6675d58eff77 $ echo 'barney is not mentioned => not allowed anywhere' barney is not mentioned => not allowed anywhere $ do_push barney Pushing as user barney hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [acl.allow] foo/** = fred [acl.deny] foo/bar/** = fred foo/Bar/** = fred """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "barney" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow enabled, 0 entries for user barney acl: acl.deny enabled, 0 entries for user barney acl: branch access granted: "ef1ea85a6374" on branch "default" error: pretxnchangegroup.acl hook failed: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374") bundle2-input-part: total payload size 1606 bundle2-input-bundle: 3 parts total transaction abort! rollback completed abort: acl: user "barney" not allowed on "foo/file.txt" (changeset "ef1ea85a6374") no rollback information available 0:6675d58eff77 barney is allowed everywhere $ echo '[acl.allow]' >> $config $ echo '** = barney' >> $config $ do_push barney Pushing as user barney hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [acl.allow] foo/** = fred [acl.deny] foo/bar/** = fred foo/Bar/** = fred [acl.allow] ** = barney """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "barney" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow enabled, 1 entries for user barney acl: acl.deny enabled, 0 entries for user barney acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" acl: path access granted: "911600dab2ae" bundle2-input-part: total payload size 1606 bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955" bundle2-input-bundle: 3 parts total updating the branch cache bundle2-output-bundle: "HG20", 2 parts total bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-bundle: 1 parts total listing keys for "phases" repository tip rolled back to revision 0 (undo push) 0:6675d58eff77 wilma can change files with a .txt extension $ echo '**/*.txt = wilma' >> $config $ do_push wilma Pushing as user wilma hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [acl.allow] foo/** = fred [acl.deny] foo/bar/** = fred foo/Bar/** = fred [acl.allow] ** = barney **/*.txt = wilma """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "wilma" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow enabled, 1 entries for user wilma acl: acl.deny enabled, 0 entries for user wilma acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" error: pretxnchangegroup.acl hook failed: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae") bundle2-input-part: total payload size 1606 bundle2-input-bundle: 3 parts total transaction abort! rollback completed abort: acl: user "wilma" not allowed on "quux/file.py" (changeset "911600dab2ae") no rollback information available 0:6675d58eff77 file specified by acl.config does not exist $ echo '[acl]' >> $config $ echo 'config = ../acl.config' >> $config $ do_push barney Pushing as user barney hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [acl.allow] foo/** = fred [acl.deny] foo/bar/** = fred foo/Bar/** = fred [acl.allow] ** = barney **/*.txt = wilma [acl] config = ../acl.config """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "barney" error: pretxnchangegroup.acl hook raised an exception: [Errno 2] No such file or directory: '../acl.config' bundle2-input-part: total payload size 1606 bundle2-input-bundle: 3 parts total transaction abort! rollback completed abort: No such file or directory: ../acl.config no rollback information available 0:6675d58eff77 betty is allowed inside foo/ by a acl.config file $ echo '[acl.allow]' >> acl.config $ echo 'foo/** = betty' >> acl.config $ do_push betty Pushing as user betty hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [acl.allow] foo/** = fred [acl.deny] foo/bar/** = fred foo/Bar/** = fred [acl.allow] ** = barney **/*.txt = wilma [acl] config = ../acl.config """ acl.config = """ [acl.allow] foo/** = betty """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "betty" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow enabled, 1 entries for user betty acl: acl.deny enabled, 0 entries for user betty acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" error: pretxnchangegroup.acl hook failed: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae") bundle2-input-part: total payload size 1606 bundle2-input-bundle: 3 parts total transaction abort! rollback completed abort: acl: user "betty" not allowed on "quux/file.py" (changeset "911600dab2ae") no rollback information available 0:6675d58eff77 acl.config can set only [acl.allow]/[acl.deny] $ echo '[hooks]' >> acl.config $ echo 'changegroup.acl = false' >> acl.config $ do_push barney Pushing as user barney hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [acl.allow] foo/** = fred [acl.deny] foo/bar/** = fred foo/Bar/** = fred [acl.allow] ** = barney **/*.txt = wilma [acl] config = ../acl.config """ acl.config = """ [acl.allow] foo/** = betty [hooks] changegroup.acl = false """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "barney" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow enabled, 1 entries for user barney acl: acl.deny enabled, 0 entries for user barney acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" acl: path access granted: "911600dab2ae" bundle2-input-part: total payload size 1606 bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955" bundle2-input-bundle: 3 parts total updating the branch cache bundle2-output-bundle: "HG20", 2 parts total bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-bundle: 1 parts total listing keys for "phases" repository tip rolled back to revision 0 (undo push) 0:6675d58eff77 asterisk $ init_config asterisk test $ echo '[acl.allow]' >> $config $ echo "** = fred" >> $config fred is always allowed $ do_push fred Pushing as user fred hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] [acl.allow] ** = fred """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "fred" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow enabled, 1 entries for user fred acl: acl.deny not enabled acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" acl: path access granted: "911600dab2ae" bundle2-input-part: total payload size 1606 bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955" bundle2-input-bundle: 3 parts total updating the branch cache bundle2-output-bundle: "HG20", 2 parts total bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-bundle: 1 parts total listing keys for "phases" repository tip rolled back to revision 0 (undo push) 0:6675d58eff77 $ echo '[acl.deny]' >> $config $ echo "foo/Bar/** = *" >> $config no one is allowed inside foo/Bar/ $ do_push fred Pushing as user fred hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] [acl.allow] ** = fred [acl.deny] foo/Bar/** = * """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "fred" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow enabled, 1 entries for user fred acl: acl.deny enabled, 1 entries for user fred acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8") bundle2-input-part: total payload size 1606 bundle2-input-bundle: 3 parts total transaction abort! rollback completed abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8") no rollback information available 0:6675d58eff77 Groups $ init_config OS-level groups $ echo '[acl.allow]' >> $config $ echo "** = @group1" >> $config @group1 is always allowed $ do_push fred Pushing as user fred hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] [acl.allow] ** = @group1 """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "fred" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: "group1" not defined in [acl.groups] acl: acl.allow enabled, 1 entries for user fred acl: acl.deny not enabled acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" acl: path access granted: "911600dab2ae" bundle2-input-part: total payload size 1606 bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955" bundle2-input-bundle: 3 parts total updating the branch cache bundle2-output-bundle: "HG20", 2 parts total bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-bundle: 1 parts total listing keys for "phases" repository tip rolled back to revision 0 (undo push) 0:6675d58eff77 $ echo '[acl.deny]' >> $config $ echo "foo/Bar/** = @group1" >> $config @group is allowed inside anything but foo/Bar/ $ do_push fred Pushing as user fred hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] [acl.allow] ** = @group1 [acl.deny] foo/Bar/** = @group1 """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" invalid branchheads cache (served): tip differs listing keys for "bookmarks" 3 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 bundle2-output-bundle: "HG20", 4 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae adding manifests adding file changes adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 3 changesets with 3 changes to 3 files calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "fred" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: "group1" not defined in [acl.groups] acl: acl.allow enabled, 1 entries for user fred acl: "group1" not defined in [acl.groups] acl: acl.deny enabled, 1 entries for user fred acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" error: pretxnchangegroup.acl hook failed: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8") bundle2-input-part: total payload size 1606 bundle2-input-bundle: 3 parts total transaction abort! rollback completed abort: acl: user "fred" denied on "foo/Bar/file.txt" (changeset "f9cafe1212c8") no rollback information available 0:6675d58eff77 Invalid group Disable the fakegroups trick to get real failures $ grep -v fakegroups $config > config.tmp $ mv config.tmp $config $ echo '[acl.allow]' >> $config $ echo "** = @unlikelytoexist" >> $config $ do_push fred 2>&1 | grep unlikelytoexist ** = @unlikelytoexist acl: "unlikelytoexist" not defined in [acl.groups] error: pretxnchangegroup.acl hook failed: group 'unlikelytoexist' is undefined abort: group 'unlikelytoexist' is undefined Branch acl tests setup $ init_config $ cd b $ hg up 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch foobar marked working directory as branch foobar (branches are permanent and global, did you want a bookmark?) $ hg commit -m 'create foobar' $ echo 'foo contents' > abc.txt $ hg add abc.txt $ hg commit -m 'foobar contents' $ cd .. $ hg --cwd a pull ../b pulling from ../b searching for changes adding changesets adding manifests adding file changes added 2 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads) Create additional changeset on foobar branch $ cd a $ hg up -C foobar 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 'foo contents2' > abc.txt $ hg commit -m 'foobar contents2' $ cd .. No branch acls specified $ do_push astro Pushing as user astro hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" listing keys for "bookmarks" 4 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae add changeset e8fc755d4d82 adding manifests adding file changes adding abc.txt revisions adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 4 changesets with 4 changes to 4 files (+1 heads) calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "astro" acl: acl.allow.branches not enabled acl: acl.deny.branches not enabled acl: acl.allow not enabled acl: acl.deny not enabled acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" acl: path access granted: "911600dab2ae" acl: branch access granted: "e8fc755d4d82" on branch "foobar" acl: path access granted: "e8fc755d4d82" bundle2-input-part: total payload size 2101 bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955" bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:e8fc755d4d8217ee5b0c2bb41558c40d43b92c01" bundle2-input-bundle: 4 parts total updating the branch cache bundle2-output-bundle: "HG20", 3 parts total bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-bundle: 2 parts total listing keys for "phases" repository tip rolled back to revision 2 (undo push) 2:fb35475503ef Branch acl deny test $ echo "[acl.deny.branches]" >> $config $ echo "foobar = *" >> $config $ do_push astro Pushing as user astro hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] [acl.deny.branches] foobar = * """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" listing keys for "bookmarks" 4 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae add changeset e8fc755d4d82 adding manifests adding file changes adding abc.txt revisions adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 4 changesets with 4 changes to 4 files (+1 heads) calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "astro" acl: acl.allow.branches not enabled acl: acl.deny.branches enabled, 1 entries for user astro acl: acl.allow not enabled acl: acl.deny not enabled acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" acl: path access granted: "911600dab2ae" error: pretxnchangegroup.acl hook failed: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82") bundle2-input-part: total payload size 2101 bundle2-input-bundle: 4 parts total transaction abort! rollback completed abort: acl: user "astro" denied on branch "foobar" (changeset "e8fc755d4d82") no rollback information available 2:fb35475503ef Branch acl empty allow test $ init_config $ echo "[acl.allow.branches]" >> $config $ do_push astro Pushing as user astro hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] [acl.allow.branches] """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" listing keys for "bookmarks" 4 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae add changeset e8fc755d4d82 adding manifests adding file changes adding abc.txt revisions adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 4 changesets with 4 changes to 4 files (+1 heads) calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "astro" acl: acl.allow.branches enabled, 0 entries for user astro acl: acl.deny.branches not enabled acl: acl.allow not enabled acl: acl.deny not enabled error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374") bundle2-input-part: total payload size 2101 bundle2-input-bundle: 4 parts total transaction abort! rollback completed abort: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374") no rollback information available 2:fb35475503ef Branch acl allow other $ init_config $ echo "[acl.allow.branches]" >> $config $ echo "* = george" >> $config $ do_push astro Pushing as user astro hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] [acl.allow.branches] * = george """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" listing keys for "bookmarks" 4 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae add changeset e8fc755d4d82 adding manifests adding file changes adding abc.txt revisions adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 4 changesets with 4 changes to 4 files (+1 heads) calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "astro" acl: acl.allow.branches enabled, 0 entries for user astro acl: acl.deny.branches not enabled acl: acl.allow not enabled acl: acl.deny not enabled error: pretxnchangegroup.acl hook failed: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374") bundle2-input-part: total payload size 2101 bundle2-input-bundle: 4 parts total transaction abort! rollback completed abort: acl: user "astro" not allowed on branch "default" (changeset "ef1ea85a6374") no rollback information available 2:fb35475503ef $ do_push george Pushing as user george hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] [acl.allow.branches] * = george """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" listing keys for "bookmarks" 4 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae add changeset e8fc755d4d82 adding manifests adding file changes adding abc.txt revisions adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 4 changesets with 4 changes to 4 files (+1 heads) calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "george" acl: acl.allow.branches enabled, 1 entries for user george acl: acl.deny.branches not enabled acl: acl.allow not enabled acl: acl.deny not enabled acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" acl: path access granted: "911600dab2ae" acl: branch access granted: "e8fc755d4d82" on branch "foobar" acl: path access granted: "e8fc755d4d82" bundle2-input-part: total payload size 2101 bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955" bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:e8fc755d4d8217ee5b0c2bb41558c40d43b92c01" bundle2-input-bundle: 4 parts total updating the branch cache bundle2-output-bundle: "HG20", 3 parts total bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-bundle: 2 parts total listing keys for "phases" repository tip rolled back to revision 2 (undo push) 2:fb35475503ef Branch acl conflicting allow asterisk ends up applying to all branches and allowing george to push foobar into the remote $ init_config $ echo "[acl.allow.branches]" >> $config $ echo "foobar = astro" >> $config $ echo "* = george" >> $config $ do_push george Pushing as user george hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] [acl.allow.branches] foobar = astro * = george """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" listing keys for "bookmarks" 4 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae add changeset e8fc755d4d82 adding manifests adding file changes adding abc.txt revisions adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 4 changesets with 4 changes to 4 files (+1 heads) calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "george" acl: acl.allow.branches enabled, 1 entries for user george acl: acl.deny.branches not enabled acl: acl.allow not enabled acl: acl.deny not enabled acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" acl: path access granted: "911600dab2ae" acl: branch access granted: "e8fc755d4d82" on branch "foobar" acl: path access granted: "e8fc755d4d82" bundle2-input-part: total payload size 2101 bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955" bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:e8fc755d4d8217ee5b0c2bb41558c40d43b92c01" bundle2-input-bundle: 4 parts total updating the branch cache bundle2-output-bundle: "HG20", 3 parts total bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-bundle: 2 parts total listing keys for "phases" repository tip rolled back to revision 2 (undo push) 2:fb35475503ef Branch acl conflicting deny $ init_config $ echo "[acl.deny.branches]" >> $config $ echo "foobar = astro" >> $config $ echo "default = astro" >> $config $ echo "* = george" >> $config $ do_push george Pushing as user george hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] [acl.deny.branches] foobar = astro default = astro * = george """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" listing keys for "bookmarks" 4 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae add changeset e8fc755d4d82 adding manifests adding file changes adding abc.txt revisions adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 4 changesets with 4 changes to 4 files (+1 heads) calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "george" acl: acl.allow.branches not enabled acl: acl.deny.branches enabled, 1 entries for user george acl: acl.allow not enabled acl: acl.deny not enabled error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374") bundle2-input-part: total payload size 2101 bundle2-input-bundle: 4 parts total transaction abort! rollback completed abort: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374") no rollback information available 2:fb35475503ef User 'astro' must not be denied $ init_config $ echo "[acl.deny.branches]" >> $config $ echo "default = !astro" >> $config $ do_push astro Pushing as user astro hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] [acl.deny.branches] default = !astro """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" listing keys for "bookmarks" 4 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae add changeset e8fc755d4d82 adding manifests adding file changes adding abc.txt revisions adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 4 changesets with 4 changes to 4 files (+1 heads) calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "astro" acl: acl.allow.branches not enabled acl: acl.deny.branches enabled, 0 entries for user astro acl: acl.allow not enabled acl: acl.deny not enabled acl: branch access granted: "ef1ea85a6374" on branch "default" acl: path access granted: "ef1ea85a6374" acl: branch access granted: "f9cafe1212c8" on branch "default" acl: path access granted: "f9cafe1212c8" acl: branch access granted: "911600dab2ae" on branch "default" acl: path access granted: "911600dab2ae" acl: branch access granted: "e8fc755d4d82" on branch "foobar" acl: path access granted: "e8fc755d4d82" bundle2-input-part: total payload size 2101 bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:911600dab2ae7a9baff75958b84fe606851ce955" bundle2-input-part: "pushkey" (params: 4 mandatory) supported pushing key for "phases:e8fc755d4d8217ee5b0c2bb41558c40d43b92c01" bundle2-input-bundle: 4 parts total updating the branch cache bundle2-output-bundle: "HG20", 3 parts total bundle2-output-part: "reply:changegroup" (advisory) (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-output-part: "reply:pushkey" (params: 0 advisory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "reply:changegroup" (advisory) (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-part: "reply:pushkey" (params: 0 advisory) supported bundle2-input-bundle: 2 parts total listing keys for "phases" repository tip rolled back to revision 2 (undo push) 2:fb35475503ef Non-astro users must be denied $ do_push george Pushing as user george hgrc = """ [hooks] pretxnchangegroup.acl = python:hgext.acl.hook [acl] sources = push [extensions] [acl.deny.branches] default = !astro """ pushing to ../b query 1; heads searching for changes all remote heads known locally listing keys for "phases" checking for updated bookmarks listing keys for "bookmarks" listing keys for "bookmarks" 4 changesets found list of changesets: ef1ea85a6374b77d6da9dcda9541f498f2d17df7 f9cafe1212c8c6fa1120d14a556e18cc44ff8bdd 911600dab2ae7a9baff75958b84fe606851ce955 e8fc755d4d8217ee5b0c2bb41558c40d43b92c01 bundle2-output-bundle: "HG20", 5 parts total bundle2-output-part: "replycaps" 155 bytes payload bundle2-output-part: "check:heads" streamed payload bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-output-part: "pushkey" (params: 4 mandatory) empty payload bundle2-input-bundle: with-transaction bundle2-input-part: "replycaps" supported bundle2-input-part: total payload size 155 bundle2-input-part: "check:heads" supported bundle2-input-part: total payload size 20 bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset ef1ea85a6374 add changeset f9cafe1212c8 add changeset 911600dab2ae add changeset e8fc755d4d82 adding manifests adding file changes adding abc.txt revisions adding foo/Bar/file.txt revisions adding foo/file.txt revisions adding quux/file.py revisions added 4 changesets with 4 changes to 4 files (+1 heads) calling hook pretxnchangegroup.acl: hgext.acl.hook acl: checking access for user "george" acl: acl.allow.branches not enabled acl: acl.deny.branches enabled, 1 entries for user george acl: acl.allow not enabled acl: acl.deny not enabled error: pretxnchangegroup.acl hook failed: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374") bundle2-input-part: total payload size 2101 bundle2-input-bundle: 4 parts total transaction abort! rollback completed abort: acl: user "george" denied on branch "default" (changeset "ef1ea85a6374") no rollback information available 2:fb35475503ef mercurial-3.7.3/tests/test-bundle2-pushback.t0000644000175000017500000000527012676531525020570 0ustar mpmmpm00000000000000 $ cat > bundle2.py << EOF > """A small extension to test bundle2 pushback parts. > Current bundle2 implementation doesn't provide a way to generate those > parts, so they must be created by extensions. > """ > from mercurial import bundle2, pushkey, exchange, util > def _newhandlechangegroup(op, inpart): > """This function wraps the changegroup part handler for getbundle. > It issues an additional pushkey part to send a new > bookmark back to the client""" > result = bundle2.handlechangegroup(op, inpart) > if 'pushback' in op.reply.capabilities: > params = {'namespace': 'bookmarks', > 'key': 'new-server-mark', > 'old': '', > 'new': 'tip'} > encodedparams = [(k, pushkey.encode(v)) for (k,v) in params.items()] > op.reply.newpart('pushkey', mandatoryparams=encodedparams) > else: > op.reply.newpart('output', data='pushback not enabled') > return result > _newhandlechangegroup.params = bundle2.handlechangegroup.params > bundle2.parthandlermapping['changegroup'] = _newhandlechangegroup > EOF $ cat >> $HGRCPATH < [ui] > ssh = python "$TESTDIR/dummyssh" > username = nobody > > [alias] > tglog = log -G -T "{desc} [{phase}:{node|short}]" > EOF Set up server repository $ hg init server $ cd server $ echo c0 > f0 $ hg commit -Am 0 adding f0 Set up client repository $ cd .. $ hg clone ssh://user@dummy/server client -q $ cd client Enable extension $ cat >> $HGRCPATH < [extensions] > bundle2=$TESTTMP/bundle2.py > [experimental] > bundle2-exp = True > EOF Without config $ cd ../client $ echo c1 > f1 $ hg commit -Am 1 adding f1 $ hg push pushing to ssh://user@dummy/server searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files remote: pushback not enabled $ hg bookmark no bookmarks set $ cd ../server $ hg tglog o 1 [public:2b9c7234e035] | @ 0 [public:6cee5c8f3e5b] With config $ cd ../client $ echo '[experimental]' >> .hg/hgrc $ echo 'bundle2.pushback = True' >> .hg/hgrc $ echo c2 > f2 $ hg commit -Am 2 adding f2 $ hg push pushing to ssh://user@dummy/server searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files $ hg bookmark new-server-mark 2:0a76dfb2e179 $ cd ../server $ hg tglog o 2 [public:0a76dfb2e179] | o 1 [public:2b9c7234e035] | @ 0 [public:6cee5c8f3e5b] mercurial-3.7.3/tests/test-eol-hook.t0000644000175000017500000001370212676531525017153 0ustar mpmmpm00000000000000Test the EOL hook $ hg init main $ cat > main/.hg/hgrc < [hooks] > pretxnchangegroup = python:hgext.eol.hook > EOF $ hg clone main fork updating to branch default 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd fork Create repo $ cat > .hgeol < [patterns] > mixed.txt = BIN > crlf.txt = CRLF > **.txt = native > EOF $ hg add .hgeol $ hg commit -m 'Commit .hgeol' $ printf "first\nsecond\nthird\n" > a.txt $ hg add a.txt $ hg commit -m 'LF a.txt' $ hg push ../main pushing to ../main searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files $ printf "first\r\nsecond\r\nthird\n" > a.txt $ hg commit -m 'CRLF a.txt' $ hg push ../main pushing to ../main searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files error: pretxnchangegroup hook failed: end-of-line check failed: a.txt in a8ee6548cd86 should not have CRLF line endings transaction abort! rollback completed abort: end-of-line check failed: a.txt in a8ee6548cd86 should not have CRLF line endings [255] $ printf "first\nsecond\nthird\n" > a.txt $ hg commit -m 'LF a.txt (fixed)' $ hg push ../main pushing to ../main searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files $ printf "first\nsecond\nthird\n" > crlf.txt $ hg add crlf.txt $ hg commit -m 'LF crlf.txt' $ hg push ../main pushing to ../main searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files error: pretxnchangegroup hook failed: end-of-line check failed: crlf.txt in 004ba2132725 should not have LF line endings transaction abort! rollback completed abort: end-of-line check failed: crlf.txt in 004ba2132725 should not have LF line endings [255] $ printf "first\r\nsecond\r\nthird\r\n" > crlf.txt $ hg commit -m 'CRLF crlf.txt (fixed)' $ hg push ../main pushing to ../main searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files $ printf "first\r\nsecond" > b.txt $ hg add b.txt $ hg commit -m 'CRLF b.txt' $ hg push ../main pushing to ../main searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files error: pretxnchangegroup hook failed: end-of-line check failed: b.txt in fbcf9b1025f5 should not have CRLF line endings transaction abort! rollback completed abort: end-of-line check failed: b.txt in fbcf9b1025f5 should not have CRLF line endings [255] $ hg up -r -2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ printf "some\nother\nfile" > c.txt $ hg add c.txt $ hg commit -m "LF c.txt, b.txt doesn't exist here" created new head $ hg push -f ../main pushing to ../main searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) error: pretxnchangegroup hook failed: end-of-line check failed: b.txt in fbcf9b1025f5 should not have CRLF line endings transaction abort! rollback completed abort: end-of-line check failed: b.txt in fbcf9b1025f5 should not have CRLF line endings [255] Test checkheadshook alias $ cat > ../main/.hg/hgrc < [hooks] > pretxnchangegroup = python:hgext.eol.checkheadshook > EOF $ hg push -f ../main pushing to ../main searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) error: pretxnchangegroup hook failed: end-of-line check failed: b.txt in fbcf9b1025f5 should not have CRLF line endings transaction abort! rollback completed abort: end-of-line check failed: b.txt in fbcf9b1025f5 should not have CRLF line endings [255] We can fix the head and push again $ hg up 6 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ printf "first\nsecond" > b.txt $ hg ci -m "remove CRLF from b.txt" $ hg push -f ../main pushing to ../main searching for changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 2 files (+1 heads) $ hg -R ../main rollback repository tip rolled back to revision 5 (undo push) Test it still fails with checkallhook $ cat > ../main/.hg/hgrc < [hooks] > pretxnchangegroup = python:hgext.eol.checkallhook > EOF $ hg push -f ../main pushing to ../main searching for changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 2 files (+1 heads) error: pretxnchangegroup hook failed: end-of-line check failed: b.txt in fbcf9b1025f5 should not have CRLF line endings transaction abort! rollback completed abort: end-of-line check failed: b.txt in fbcf9b1025f5 should not have CRLF line endings [255] But we can push the clean head $ hg push -r7 -f ../main pushing to ../main searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Test multiple files/revisions output $ printf "another\r\nbad\r\none" > d.txt $ hg add d.txt $ hg ci -m "add d.txt" $ hg push -f ../main pushing to ../main searching for changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 2 files (+1 heads) error: pretxnchangegroup hook failed: end-of-line check failed: b.txt in fbcf9b1025f5 should not have CRLF line endings d.txt in a7040e68714f should not have CRLF line endings transaction abort! rollback completed abort: end-of-line check failed: b.txt in fbcf9b1025f5 should not have CRLF line endings d.txt in a7040e68714f should not have CRLF line endings [255] $ cd .. mercurial-3.7.3/tests/fakepatchtime.py0000644000175000017500000000174712676531525017461 0ustar mpmmpm00000000000000# extension to emulate invoking 'patch.internalpatch()' at the time # specified by '[fakepatchtime] fakenow' from __future__ import absolute_import from mercurial import ( extensions, patch as patchmod, util, ) def internalpatch(orig, ui, repo, patchobj, strip, prefix='', files=None, eolmode='strict', similarity=0): if files is None: files = set() r = orig(ui, repo, patchobj, strip, prefix=prefix, files=files, eolmode=eolmode, similarity=similarity) fakenow = ui.config('fakepatchtime', 'fakenow') if fakenow: # parsing 'fakenow' in YYYYmmddHHMM format makes comparison between # 'fakenow' value and 'touch -t YYYYmmddHHMM' argument easy fakenow = util.parsedate(fakenow, ['%Y%m%d%H%M'])[0] for f in files: repo.wvfs.utime(f, (fakenow, fakenow)) return r def extsetup(ui): extensions.wrapfunction(patchmod, 'internalpatch', internalpatch) mercurial-3.7.3/tests/test-execute-bit.t0000644000175000017500000000103512676531525017650 0ustar mpmmpm00000000000000#require execbit $ hg init $ echo a > a $ hg ci -Am'not executable' adding a $ chmod +x a $ hg ci -m'executable' $ hg id 79abf14474dc tip Make sure we notice the change of mode if the cached size == -1: $ hg rm a $ hg revert -r 0 a $ hg debugstate n 0 -1 unset a $ hg status M a $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id d69afc33ff8a $ test -x a && echo executable -- bad || echo not executable -- good not executable -- good mercurial-3.7.3/tests/test-conflict.t0000644000175000017500000001236612676531525017244 0ustar mpmmpm00000000000000 $ hg init $ cat << EOF > a > Small Mathematical Series. > One > Two > Three > Four > Five > Hop we are done. > EOF $ hg add a $ hg commit -m ancestor $ cat << EOF > a > Small Mathematical Series. > 1 > 2 > 3 > 4 > 5 > Hop we are done. > EOF $ hg commit -m branch1 $ hg co 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat << EOF > a > Small Mathematical Series. > 1 > 2 > 3 > 6 > 8 > Hop we are done. > EOF $ hg commit -m branch2 created new head $ hg merge 1 merging a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg id 618808747361+c0c68e4fe667+ tip $ cat a Small Mathematical Series. <<<<<<< local: 618808747361 - test: branch2 1 2 3 6 8 ======= 1 2 3 4 5 >>>>>>> other: c0c68e4fe667 - test: branch1 Hop we are done. $ hg status M a ? a.orig Verify custom conflict markers $ hg up -q --clean . $ printf "\n[ui]\nmergemarkertemplate={author} {rev}\n" >> .hg/hgrc $ hg merge 1 merging a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ cat a Small Mathematical Series. <<<<<<< local: test 2 1 2 3 6 8 ======= 1 2 3 4 5 >>>>>>> other: test 1 Hop we are done. Verify line splitting of custom conflict marker which causes multiple lines $ hg up -q --clean . $ cat >> .hg/hgrc < [ui] > mergemarkertemplate={author} {rev}\nfoo\nbar\nbaz > EOF $ hg -q merge 1 warning: conflicts while merging a! (edit, then use 'hg resolve --mark') [1] $ cat a Small Mathematical Series. <<<<<<< local: test 2 1 2 3 6 8 ======= 1 2 3 4 5 >>>>>>> other: test 1 Hop we are done. Verify line trimming of custom conflict marker using multi-byte characters $ hg up -q --clean . $ python < fp = open('logfile', 'w') > fp.write('12345678901234567890123456789012345678901234567890' + > '1234567890') # there are 5 more columns for 80 columns > > # 2 x 4 = 8 columns, but 3 x 4 = 12 bytes > fp.write(u'\u3042\u3044\u3046\u3048'.encode('utf-8')) > > fp.close() > EOF $ hg add logfile $ hg --encoding utf-8 commit --logfile logfile $ cat >> .hg/hgrc < [ui] > mergemarkertemplate={desc|firstline} > EOF $ hg -q --encoding utf-8 merge 1 warning: conflicts while merging a! (edit, then use 'hg resolve --mark') [1] $ cat a Small Mathematical Series. <<<<<<< local: 123456789012345678901234567890123456789012345678901234567890\xe3\x81\x82... (esc) 1 2 3 6 8 ======= 1 2 3 4 5 >>>>>>> other: branch1 Hop we are done. Verify basic conflict markers $ hg up -q --clean 2 $ printf "\n[ui]\nmergemarkers=basic\n" >> .hg/hgrc $ hg merge 1 merging a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ cat a Small Mathematical Series. <<<<<<< local 1 2 3 6 8 ======= 1 2 3 4 5 >>>>>>> other Hop we are done. internal:merge3 $ hg up -q --clean . $ hg merge 1 --tool internal:merge3 merging a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ cat a Small Mathematical Series. <<<<<<< local 1 2 3 6 8 ||||||| base One Two Three Four Five ======= 1 2 3 4 5 >>>>>>> other Hop we are done. Add some unconflicting changes on each head, to make sure we really are merging, unlike :local and :other $ hg up -C 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ printf "\n\nEnd of file\n" >> a $ hg ci -m "Add some stuff at the end" $ hg up -r 1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ printf "Start of file\n\n\n" > tmp $ cat a >> tmp $ mv tmp a $ hg ci -m "Add some stuff at the beginning" Now test :merge-other and :merge-local $ hg merge merging a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') 1 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg resolve --tool :merge-other a merging a (no more unresolved files) $ cat a Start of file Small Mathematical Series. 1 2 3 6 8 Hop we are done. End of file $ hg up -C 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge --tool :merge-local merging a 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat a Start of file Small Mathematical Series. 1 2 3 4 5 Hop we are done. End of file mercurial-3.7.3/tests/test-rebase-scenario-global.t0000644000175000017500000003240412676531525021736 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > > [phases] > publish=False > > [alias] > tglog = log -G --template "{rev}: '{desc}' {branches}\n" > EOF $ hg init a $ cd a $ hg unbundle "$TESTDIR/bundles/rebase.hg" adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up tip 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. Rebasing D onto H - simple rebase: (this also tests that editor is invoked if '--edit' is specified) $ hg clone -q -u . a a1 $ cd a1 $ hg tglog @ 7: 'H' | | o 6: 'G' |/| o | 5: 'F' | | | o 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ hg status --rev "3^1" --rev 3 A D $ HGEDITOR=cat hg rebase -s 3 -d 7 --edit rebasing 3:32af7686d403 "D" D HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: Nicolas Dumazet HG: branch 'default' HG: added D saved backup bundle to $TESTTMP/a1/.hg/strip-backup/32af7686d403-6f7dface-backup.hg (glob) $ hg tglog o 7: 'D' | @ 6: 'H' | | o 5: 'G' |/| o | 4: 'F' | | | o 3: 'E' |/ | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ cd .. D onto F - intermediate point: (this also tests that editor is not invoked if '--edit' is not specified) $ hg clone -q -u . a a2 $ cd a2 $ HGEDITOR=cat hg rebase -s 3 -d 5 rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a2/.hg/strip-backup/32af7686d403-6f7dface-backup.hg (glob) $ hg tglog o 7: 'D' | | @ 6: 'H' |/ | o 5: 'G' |/| o | 4: 'F' | | | o 3: 'E' |/ | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ cd .. E onto H - skip of G: $ hg clone -q -u . a a3 $ cd a3 $ hg rebase -s 4 -d 7 rebasing 4:9520eea781bc "E" rebasing 6:eea13746799a "G" note: rebase of 6:eea13746799a created no changes to commit saved backup bundle to $TESTTMP/a3/.hg/strip-backup/9520eea781bc-fcd8edd4-backup.hg (glob) $ hg tglog o 6: 'E' | @ 5: 'H' | o 4: 'F' | | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ cd .. F onto E - rebase of a branching point (skip G): $ hg clone -q -u . a a4 $ cd a4 $ hg rebase -s 5 -d 4 rebasing 5:24b6387c8c8c "F" rebasing 6:eea13746799a "G" note: rebase of 6:eea13746799a created no changes to commit rebasing 7:02de42196ebe "H" (tip) saved backup bundle to $TESTTMP/a4/.hg/strip-backup/24b6387c8c8c-c3fe765d-backup.hg (glob) $ hg tglog @ 6: 'H' | o 5: 'F' | o 4: 'E' | | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ cd .. G onto H - merged revision having a parent in ancestors of target: $ hg clone -q -u . a a5 $ cd a5 $ hg rebase -s 6 -d 7 rebasing 6:eea13746799a "G" saved backup bundle to $TESTTMP/a5/.hg/strip-backup/eea13746799a-883828ed-backup.hg (glob) $ hg tglog o 7: 'G' |\ | @ 6: 'H' | | | o 5: 'F' | | o | 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ cd .. F onto B - G maintains E as parent: $ hg clone -q -u . a a6 $ cd a6 $ hg rebase -s 5 -d 1 rebasing 5:24b6387c8c8c "F" rebasing 6:eea13746799a "G" rebasing 7:02de42196ebe "H" (tip) saved backup bundle to $TESTTMP/a6/.hg/strip-backup/24b6387c8c8c-c3fe765d-backup.hg (glob) $ hg tglog @ 7: 'H' | | o 6: 'G' |/| o | 5: 'F' | | | o 4: 'E' | | | | o 3: 'D' | | | +---o 2: 'C' | | o | 1: 'B' |/ o 0: 'A' $ cd .. These will fail (using --source): G onto F - rebase onto an ancestor: $ hg clone -q -u . a a7 $ cd a7 $ hg rebase -s 6 -d 5 nothing to rebase [1] F onto G - rebase onto a descendant: $ hg rebase -s 5 -d 6 abort: source is ancestor of destination [255] G onto B - merge revision with both parents not in ancestors of target: $ hg rebase -s 6 -d 1 rebasing 6:eea13746799a "G" abort: cannot use revision 6 as base, result would have 3 parents [255] These will abort gracefully (using --base): G onto G - rebase onto same changeset: $ hg rebase -b 6 -d 6 nothing to rebase - eea13746799a is both "base" and destination [1] G onto F - rebase onto an ancestor: $ hg rebase -b 6 -d 5 nothing to rebase [1] F onto G - rebase onto a descendant: $ hg rebase -b 5 -d 6 nothing to rebase - "base" 24b6387c8c8c is already an ancestor of destination eea13746799a [1] C onto A - rebase onto an ancestor: $ hg rebase -d 0 -s 2 rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a7/.hg/strip-backup/5fddd98957c8-f9244fa1-backup.hg (glob) $ hg tglog o 7: 'D' | o 6: 'C' | | @ 5: 'H' | | | | o 4: 'G' | |/| | o | 3: 'F' |/ / | o 2: 'E' |/ | o 1: 'B' |/ o 0: 'A' Check rebasing public changeset $ hg pull --config phases.publish=True -q -r 6 . # update phase of 6 $ hg rebase -d 0 -b 6 nothing to rebase [1] $ hg rebase -d 5 -b 6 abort: can't rebase public changeset e1c4361dd923 (see "hg help phases" for details) [255] $ hg rebase -d 5 -b 6 --keep rebasing 6:e1c4361dd923 "C" rebasing 7:c9659aac0000 "D" (tip) Check rebasing mutable changeset Source phase greater or equal to destination phase: new changeset get the phase of source: $ hg id -n 5 $ hg rebase -s9 -d0 rebasing 9:2b23e52411f4 "D" (tip) saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2b23e52411f4-f942decf-backup.hg (glob) $ hg id -n # check we updated back to parent 5 $ hg log --template "{phase}\n" -r 9 draft $ hg rebase -s9 -d1 rebasing 9:2cb10d0cfc6c "D" (tip) saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2cb10d0cfc6c-ddb0f256-backup.hg (glob) $ hg log --template "{phase}\n" -r 9 draft $ hg phase --force --secret 9 $ hg rebase -s9 -d0 rebasing 9:c5b12b67163a "D" (tip) saved backup bundle to $TESTTMP/a7/.hg/strip-backup/c5b12b67163a-4e372053-backup.hg (glob) $ hg log --template "{phase}\n" -r 9 secret $ hg rebase -s9 -d1 rebasing 9:2a0524f868ac "D" (tip) saved backup bundle to $TESTTMP/a7/.hg/strip-backup/2a0524f868ac-cefd8574-backup.hg (glob) $ hg log --template "{phase}\n" -r 9 secret Source phase lower than destination phase: new changeset get the phase of destination: $ hg rebase -s8 -d9 rebasing 8:6d4f22462821 "C" saved backup bundle to $TESTTMP/a7/.hg/strip-backup/6d4f22462821-3441f70b-backup.hg (glob) $ hg log --template "{phase}\n" -r 'rev(9)' secret $ cd .. Test for revset We need a bit different graph All destination are B $ hg init ah $ cd ah $ hg unbundle "$TESTDIR/bundles/rebase-revset.hg" adding changesets adding manifests adding file changes added 9 changesets with 9 changes to 9 files (+2 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg tglog o 8: 'I' | o 7: 'H' | o 6: 'G' | | o 5: 'F' | | | o 4: 'E' |/ o 3: 'D' | o 2: 'C' | | o 1: 'B' |/ o 0: 'A' $ cd .. Simple case with keep: Source on have two descendant heads but ask for one $ hg clone -q -u . ah ah1 $ cd ah1 $ hg rebase -r '2::8' -d 1 abort: can't remove original changesets with unrebased descendants (use --keep to keep original changesets) [255] $ hg rebase -r '2::8' -d 1 -k rebasing 2:c9e50f6cdc55 "C" rebasing 3:ffd453c31098 "D" rebasing 6:3d8a618087a7 "G" rebasing 7:72434a4e60b0 "H" rebasing 8:479ddb54a924 "I" (tip) $ hg tglog o 13: 'I' | o 12: 'H' | o 11: 'G' | o 10: 'D' | o 9: 'C' | | o 8: 'I' | | | o 7: 'H' | | | o 6: 'G' | | | | o 5: 'F' | | | | | o 4: 'E' | |/ | o 3: 'D' | | | o 2: 'C' | | o | 1: 'B' |/ o 0: 'A' $ cd .. Base on have one descendant heads we ask for but common ancestor have two $ hg clone -q -u . ah ah2 $ cd ah2 $ hg rebase -r '3::8' -d 1 abort: can't remove original changesets with unrebased descendants (use --keep to keep original changesets) [255] $ hg rebase -r '3::8' -d 1 --keep rebasing 3:ffd453c31098 "D" rebasing 6:3d8a618087a7 "G" rebasing 7:72434a4e60b0 "H" rebasing 8:479ddb54a924 "I" (tip) $ hg tglog o 12: 'I' | o 11: 'H' | o 10: 'G' | o 9: 'D' | | o 8: 'I' | | | o 7: 'H' | | | o 6: 'G' | | | | o 5: 'F' | | | | | o 4: 'E' | |/ | o 3: 'D' | | | o 2: 'C' | | o | 1: 'B' |/ o 0: 'A' $ cd .. rebase subset $ hg clone -q -u . ah ah3 $ cd ah3 $ hg rebase -r '3::7' -d 1 abort: can't remove original changesets with unrebased descendants (use --keep to keep original changesets) [255] $ hg rebase -r '3::7' -d 1 --keep rebasing 3:ffd453c31098 "D" rebasing 6:3d8a618087a7 "G" rebasing 7:72434a4e60b0 "H" $ hg tglog o 11: 'H' | o 10: 'G' | o 9: 'D' | | o 8: 'I' | | | o 7: 'H' | | | o 6: 'G' | | | | o 5: 'F' | | | | | o 4: 'E' | |/ | o 3: 'D' | | | o 2: 'C' | | o | 1: 'B' |/ o 0: 'A' $ cd .. rebase subset with multiple head $ hg clone -q -u . ah ah4 $ cd ah4 $ hg rebase -r '3::(7+5)' -d 1 abort: can't remove original changesets with unrebased descendants (use --keep to keep original changesets) [255] $ hg rebase -r '3::(7+5)' -d 1 --keep rebasing 3:ffd453c31098 "D" rebasing 4:c01897464e7f "E" rebasing 5:41bfcc75ed73 "F" rebasing 6:3d8a618087a7 "G" rebasing 7:72434a4e60b0 "H" $ hg tglog o 13: 'H' | o 12: 'G' | | o 11: 'F' | | | o 10: 'E' |/ o 9: 'D' | | o 8: 'I' | | | o 7: 'H' | | | o 6: 'G' | | | | o 5: 'F' | | | | | o 4: 'E' | |/ | o 3: 'D' | | | o 2: 'C' | | o | 1: 'B' |/ o 0: 'A' $ cd .. More advanced tests rebase on ancestor with revset $ hg clone -q -u . ah ah5 $ cd ah5 $ hg rebase -r '6::' -d 2 rebasing 6:3d8a618087a7 "G" rebasing 7:72434a4e60b0 "H" rebasing 8:479ddb54a924 "I" (tip) saved backup bundle to $TESTTMP/ah5/.hg/strip-backup/3d8a618087a7-b4f73f31-backup.hg (glob) $ hg tglog o 8: 'I' | o 7: 'H' | o 6: 'G' | | o 5: 'F' | | | o 4: 'E' | | | o 3: 'D' |/ o 2: 'C' | | o 1: 'B' |/ o 0: 'A' $ cd .. rebase with multiple root. We rebase E and G on B We would expect heads are I, F if it was supported $ hg clone -q -u . ah ah6 $ cd ah6 $ hg rebase -r '(4+6)::' -d 1 rebasing 4:c01897464e7f "E" rebasing 5:41bfcc75ed73 "F" rebasing 6:3d8a618087a7 "G" rebasing 7:72434a4e60b0 "H" rebasing 8:479ddb54a924 "I" (tip) saved backup bundle to $TESTTMP/ah6/.hg/strip-backup/3d8a618087a7-aae93a24-backup.hg (glob) $ hg tglog o 8: 'I' | o 7: 'H' | o 6: 'G' | | o 5: 'F' | | | o 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | o | 1: 'B' |/ o 0: 'A' $ cd .. More complex rebase with multiple roots each root have a different common ancestor with the destination and this is a detach (setup) $ hg clone -q -u . a a8 $ cd a8 $ echo I > I $ hg add I $ hg commit -m I $ hg up 4 1 files updated, 0 files merged, 3 files removed, 0 files unresolved $ echo I > J $ hg add J $ hg commit -m J created new head $ echo I > K $ hg add K $ hg commit -m K $ hg tglog @ 10: 'K' | o 9: 'J' | | o 8: 'I' | | | o 7: 'H' | | +---o 6: 'G' | |/ | o 5: 'F' | | o | 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' (actual test) $ hg rebase --dest 'desc(G)' --rev 'desc(K) + desc(I)' rebasing 8:e7ec4e813ba6 "I" rebasing 10:23a4ace37988 "K" (tip) saved backup bundle to $TESTTMP/a8/.hg/strip-backup/23a4ace37988-b06984b3-backup.hg (glob) $ hg log --rev 'children(desc(G))' changeset: 9:adb617877056 parent: 6:eea13746799a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: I changeset: 10:882431a34a0e tag: tip parent: 6:eea13746799a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: K $ hg tglog @ 10: 'K' | | o 9: 'I' |/ | o 8: 'J' | | | | o 7: 'H' | | | o---+ 6: 'G' |/ / | o 5: 'F' | | o | 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' Test that rebase is not confused by $CWD disappearing during rebase (issue4121) $ cd .. $ hg init cwd-vanish $ cd cwd-vanish $ touch initial-file $ hg add initial-file $ hg commit -m 'initial commit' $ touch dest-file $ hg add dest-file $ hg commit -m 'dest commit' $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ touch other-file $ hg add other-file $ hg commit -m 'first source commit' created new head $ mkdir subdir $ cd subdir $ touch subfile $ hg add subfile $ hg commit -m 'second source with subdir' $ hg rebase -b . -d 1 --traceback rebasing 2:779a07b1b7a0 "first source commit" rebasing 3:a7d6f3a00bf3 "second source with subdir" (tip) saved backup bundle to $TESTTMP/cwd-vanish/.hg/strip-backup/779a07b1b7a0-853e0073-backup.hg (glob) Test experimental revset $ cd .. $ hg log -r '_destrebase()' changeset: 3:1910d5ff34ea tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: second source with subdir mercurial-3.7.3/tests/test-histedit-fold-non-commute.t0000644000175000017500000002150412676531525022433 0ustar mpmmpm00000000000000 $ . "$TESTDIR/histedit-helpers.sh" $ cat >> $HGRCPATH < [extensions] > histedit= > EOF $ initrepo () > { > hg init $1 > cd $1 > for x in a b c d e f ; do > echo $x$x$x$x$x > $x > hg add $x > done > hg ci -m 'Initial commit' > for x in a b c d e f ; do > echo $x > $x > hg ci -m $x > done > echo 'I can haz no commute' > e > hg ci -m 'does not commute with e' > cd .. > } $ initrepo r $ cd r Initial generation of the command files $ EDITED="$TESTTMP/editedhistory" $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 3 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 4 >> $EDITED $ hg log --template 'fold {node|short} {rev} {desc}\n' -r 7 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 5 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 6 >> $EDITED $ cat $EDITED pick 65a9a84f33fd 3 c pick 00f1c5383965 4 d fold 39522b764e3d 7 does not commute with e pick 7b4e2f4b7bcd 5 e pick 500cac37a696 6 f log before edit $ hg log --graph @ changeset: 7:39522b764e3d | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: does not commute with e | o changeset: 6:500cac37a696 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 5:7b4e2f4b7bcd | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 4:00f1c5383965 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 3:65a9a84f33fd | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 2:da6535b52e45 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 1:c1f09da44841 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: a | o changeset: 0:1715188a53c7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Initial commit edit the history $ hg histedit 3 --commands $EDITED 2>&1 | fixbundle 2 files updated, 0 files merged, 0 files removed, 0 files unresolved merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (fold 39522b764e3d) (hg histedit --continue to resume) fix up $ echo 'I can haz no commute' > e $ hg resolve --mark e (no more unresolved files) continue: hg histedit --continue $ cat > cat.py < import sys > print open(sys.argv[1]).read() > print > print > EOF $ HGEDITOR="python cat.py" hg histedit --continue 2>&1 | fixbundle | grep -v '2 files removed' 2 files updated, 0 files merged, 0 files removed, 0 files unresolved d *** does not commute with e HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: changed d HG: changed e 2 files updated, 0 files merged, 0 files removed, 0 files unresolved merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (pick 7b4e2f4b7bcd) (hg histedit --continue to resume) just continue this time $ hg revert -r 'p1()' e $ hg resolve --mark e (no more unresolved files) continue: hg histedit --continue $ hg histedit --continue 2>&1 | fixbundle 7b4e2f4b7bcd: empty changeset log after edit $ hg log --graph @ changeset: 5:d9cf42e54966 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 4:10486af2e984 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 3:65a9a84f33fd | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 2:da6535b52e45 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 1:c1f09da44841 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: a | o changeset: 0:1715188a53c7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Initial commit contents of e $ hg cat e I can haz no commute manifest $ hg manifest a b c d e f $ cd .. Repeat test using "roll", not "fold". "roll" folds in changes but drops message $ initrepo r2 $ cd r2 Initial generation of the command files $ EDITED="$TESTTMP/editedhistory.2" $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 3 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 4 >> $EDITED $ hg log --template 'roll {node|short} {rev} {desc}\n' -r 7 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 5 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 6 >> $EDITED $ cat $EDITED pick 65a9a84f33fd 3 c pick 00f1c5383965 4 d roll 39522b764e3d 7 does not commute with e pick 7b4e2f4b7bcd 5 e pick 500cac37a696 6 f log before edit $ hg log --graph @ changeset: 7:39522b764e3d | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: does not commute with e | o changeset: 6:500cac37a696 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 5:7b4e2f4b7bcd | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 4:00f1c5383965 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 3:65a9a84f33fd | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 2:da6535b52e45 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 1:c1f09da44841 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: a | o changeset: 0:1715188a53c7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Initial commit edit the history $ hg histedit 3 --commands $EDITED 2>&1 | fixbundle 2 files updated, 0 files merged, 0 files removed, 0 files unresolved merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (roll 39522b764e3d) (hg histedit --continue to resume) fix up $ echo 'I can haz no commute' > e $ hg resolve --mark e (no more unresolved files) continue: hg histedit --continue $ hg histedit --continue 2>&1 | fixbundle | grep -v '2 files removed' 2 files updated, 0 files merged, 0 files removed, 0 files unresolved 2 files updated, 0 files merged, 0 files removed, 0 files unresolved merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (pick 7b4e2f4b7bcd) (hg histedit --continue to resume) just continue this time $ hg revert -r 'p1()' e $ hg resolve --mark e (no more unresolved files) continue: hg histedit --continue $ hg histedit --continue 2>&1 | fixbundle 7b4e2f4b7bcd: empty changeset log after edit $ hg log --graph @ changeset: 5:e7c4f5d4eb75 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 4:803d1bb561fc | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 3:65a9a84f33fd | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 2:da6535b52e45 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 1:c1f09da44841 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: a | o changeset: 0:1715188a53c7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Initial commit contents of e $ hg cat e I can haz no commute manifest $ hg manifest a b c d e f description is taken from rollup target commit $ hg log --debug --rev 4 changeset: 4:803d1bb561fceac3129ec778db9da249a3106fc3 phase: draft parent: 3:65a9a84f33fdeb1ad5679b3941ec885d2b24027b parent: -1:0000000000000000000000000000000000000000 manifest: 4:b068a323d969f22af1296ec6a5ea9384cef437ac user: test date: Thu Jan 01 00:00:00 1970 +0000 files: d e extra: branch=default extra: histedit_source=00f1c53839651fa5c76d423606811ea5455a79d0,39522b764e3d26103f08bd1fa2ccd3e3d7dbcf4e description: d done with repo r2 $ cd .. mercurial-3.7.3/tests/test-notify.t0000644000175000017500000003733312676531525016754 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [extensions] > notify= > > [hooks] > incoming.notify = python:hgext.notify.hook > > [notify] > sources = pull > diffstat = False > > [usersubs] > foo@bar = * > > [reposubs] > * = baz > EOF $ hg help notify notify extension - hooks for sending email push notifications This extension implements hooks to send email notifications when changesets are sent from or received by the local repository. First, enable the extension as explained in 'hg help extensions', and register the hook you want to run. "incoming" and "changegroup" hooks are run when changesets are received, while "outgoing" hooks are for changesets sent to another repository: [hooks] # one email for each incoming changeset incoming.notify = python:hgext.notify.hook # one email for all incoming changesets changegroup.notify = python:hgext.notify.hook # one email for all outgoing changesets outgoing.notify = python:hgext.notify.hook This registers the hooks. To enable notification, subscribers must be assigned to repositories. The "[usersubs]" section maps multiple repositories to a given recipient. The "[reposubs]" section maps multiple recipients to a single repository: [usersubs] # key is subscriber email, value is a comma-separated list of repo patterns user@host = pattern [reposubs] # key is repo pattern, value is a comma-separated list of subscriber emails pattern = user@host A "pattern" is a "glob" matching the absolute path to a repository, optionally combined with a revset expression. A revset expression, if present, is separated from the glob by a hash. Example: [reposubs] */widgets#branch(release) = qa-team@example.com This sends to "qa-team@example.com" whenever a changeset on the "release" branch triggers a notification in any repository ending in "widgets". In order to place them under direct user management, "[usersubs]" and "[reposubs]" sections may be placed in a separate "hgrc" file and incorporated by reference: [notify] config = /path/to/subscriptionsfile Notifications will not be sent until the "notify.test" value is set to "False"; see below. Notifications content can be tweaked with the following configuration entries: notify.test If "True", print messages to stdout instead of sending them. Default: True. notify.sources Space-separated list of change sources. Notifications are activated only when a changeset's source is in this list. Sources may be: "serve" changesets received via http or ssh "pull" changesets received via "hg pull" "unbundle" changesets received via "hg unbundle" "push" changesets sent or received via "hg push" "bundle" changesets sent via "hg unbundle" Default: serve. notify.strip Number of leading slashes to strip from url paths. By default, notifications reference repositories with their absolute path. "notify.strip" lets you turn them into relative paths. For example, "notify.strip=3" will change "/long/path/repository" into "repository". Default: 0. notify.domain Default email domain for sender or recipients with no explicit domain. notify.style Style file to use when formatting emails. notify.template Template to use when formatting emails. notify.incoming Template to use when run as an incoming hook, overriding "notify.template". notify.outgoing Template to use when run as an outgoing hook, overriding "notify.template". notify.changegroup Template to use when running as a changegroup hook, overriding "notify.template". notify.maxdiff Maximum number of diff lines to include in notification email. Set to 0 to disable the diff, or -1 to include all of it. Default: 300. notify.maxsubject Maximum number of characters in email's subject line. Default: 67. notify.diffstat Set to True to include a diffstat before diff content. Default: True. notify.merge If True, send notifications for merge changesets. Default: True. notify.mbox If set, append mails to this mbox file instead of sending. Default: None. notify.fromauthor If set, use the committer of the first changeset in a changegroup for the "From" field of the notification mail. If not set, take the user from the pushing repo. Default: False. If set, the following entries will also be used to customize the notifications: email.from Email "From" address to use if none can be found in the generated email content. web.baseurl Root repository URL to combine with repository paths when making references. See also "notify.strip". no commands defined $ hg init a $ echo a > a/a commit $ hg --cwd a commit -Ama -d '0 0' adding a clone $ hg --traceback clone a b updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a >> a/a commit $ hg --traceback --cwd a commit -Amb -d '1 0' on Mac OS X 10.5 the tmp path is very long so would get stripped in the subject line $ cat <> $HGRCPATH > [notify] > maxsubject = 200 > EOF the python call below wraps continuation lines, which appear on Mac OS X 10.5 because of the very long subject line pull (minimal config) $ hg --traceback --cwd b pull ../a | \ > $PYTHON -c 'import sys,re; print re.sub("\n[\t ]", " ", sys.stdin.read()),' pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Date: * (glob) Subject: changeset in $TESTTMP/b: b From: test X-Hg-Notification: changeset 0647d048b600 Message-Id: <*> (glob) To: baz, foo@bar changeset 0647d048b600 in $TESTTMP/b (glob) details: $TESTTMP/b?cmd=changeset;node=0647d048b600 description: b diffs (6 lines): diff -r cb9a9f314b8b -r 0647d048b600 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -1,1 +1,2 @@ a +a (run 'hg update' to get a working copy) $ cat <> $HGRCPATH > [notify] > config = `pwd`/.notify.conf > domain = test.com > strip = 42 > template = Subject: {desc|firstline|strip}\nFrom: {author}\nX-Test: foo\n\nchangeset {node|short} in {webroot}\ndescription:\n\t{desc|tabindent|strip} > > [web] > baseurl = http://test/ > EOF fail for config file is missing $ hg --cwd b rollback repository tip rolled back to revision 0 (undo pull) $ hg --cwd b pull ../a 2>&1 | grep 'error.*\.notify\.conf' > /dev/null && echo pull failed pull failed $ touch ".notify.conf" pull $ hg --cwd b rollback repository tip rolled back to revision 0 (undo pull) $ hg --traceback --cwd b pull ../a | \ > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),' pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Test: foo Date: * (glob) Subject: b From: test@test.com X-Hg-Notification: changeset 0647d048b600 Message-Id: <*> (glob) To: baz@test.com, foo@bar changeset 0647d048b600 in b description: b diffs (6 lines): diff -r cb9a9f314b8b -r 0647d048b600 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -1,1 +1,2 @@ a +a (run 'hg update' to get a working copy) $ cat << EOF >> $HGRCPATH > [hooks] > incoming.notify = python:hgext.notify.hook > > [notify] > sources = pull > diffstat = True > EOF pull $ hg --cwd b rollback repository tip rolled back to revision 0 (undo pull) $ hg --traceback --cwd b pull ../a | \ > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),' pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Test: foo Date: * (glob) Subject: b From: test@test.com X-Hg-Notification: changeset 0647d048b600 Message-Id: <*> (glob) To: baz@test.com, foo@bar changeset 0647d048b600 in b description: b diffstat: a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diffs (6 lines): diff -r cb9a9f314b8b -r 0647d048b600 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:01 1970 +0000 @@ -1,1 +1,2 @@ a +a (run 'hg update' to get a working copy) test merge $ cd a $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a >> a $ hg ci -Am adda2 -d '2 0' created new head $ hg merge 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m merge -d '3 0' $ cd .. $ hg --traceback --cwd b pull ../a | \ > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),' pulling from ../a searching for changes adding changesets adding manifests adding file changes added 2 changesets with 0 changes to 0 files Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Test: foo Date: * (glob) Subject: adda2 From: test@test.com X-Hg-Notification: changeset 0a184ce6067f Message-Id: <*> (glob) To: baz@test.com, foo@bar changeset 0a184ce6067f in b description: adda2 diffstat: a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diffs (6 lines): diff -r cb9a9f314b8b -r 0a184ce6067f a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:02 1970 +0000 @@ -1,1 +1,2 @@ a +a Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Test: foo Date: * (glob) Subject: merge From: test@test.com X-Hg-Notification: changeset 6a0cf76b2701 Message-Id: <*> (glob) To: baz@test.com, foo@bar changeset 6a0cf76b2701 in b description: merge (run 'hg update' to get a working copy) non-ascii content and truncation of multi-byte subject $ cat <> $HGRCPATH > [notify] > maxsubject = 4 > EOF $ echo a >> a/a $ hg --cwd a --encoding utf-8 commit -A -d '0 0' \ > -m `$PYTHON -c 'print "\xc3\xa0\xc3\xa1\xc3\xa2\xc3\xa3\xc3\xa4"'` $ hg --traceback --cwd b --encoding utf-8 pull ../a | \ > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),' pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit X-Test: foo Date: * (glob) Subject: \xc3\xa0... (esc) From: test@test.com X-Hg-Notification: changeset 7ea05ad269dc Message-Id: <*> (glob) To: baz@test.com, foo@bar changeset 7ea05ad269dc in b description: \xc3\xa0\xc3\xa1\xc3\xa2\xc3\xa3\xc3\xa4 (esc) diffstat: a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diffs (7 lines): diff -r 6a0cf76b2701 -r 7ea05ad269dc a --- a/a Thu Jan 01 00:00:03 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -1,2 +1,3 @@ a a +a (run 'hg update' to get a working copy) long lines $ cat <> $HGRCPATH > [notify] > maxsubject = 67 > test = False > mbox = mbox > EOF $ $PYTHON -c 'file("a/a", "ab").write("no" * 500 + "\n")' $ hg --cwd a commit -A -m "long line" $ hg --traceback --cwd b pull ../a pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files notify: sending 2 subscribers 1 changes (run 'hg update' to get a working copy) $ $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", file("b/mbox").read()),' From test@test.com ... ... .. ..:..:.. .... (re) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: quoted-printable X-Test: foo Date: * (glob) Subject: long line From: test@test.com X-Hg-Notification: changeset e0be44cf638b Message-Id: (glob) To: baz@test.com, foo@bar changeset e0be44cf638b in b description: long line diffstat: a | 1 + 1 files changed, 1 insertions(+), 0 deletions(-) diffs (8 lines): diff -r 7ea05ad269dc -r e0be44cf638b a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +1,4 @@ a a a +nonononononononononononononononononononononononononononononononononononono= nononononononononononononononononononononononononononononononononononononon= ononononononononononononononononononononononononononononononononononononono= nononononononononononononononononononononononononononononononononononononon= ononononononononononononononononononononononononononononononononononononono= nononononononononononononononononononononononononononononononononononononon= ononononononononononononononononononononononononononononononononononononono= nononononononononononononononononononononononononononononononononononononon= ononononononononononononononononononononononononononononononononononononono= nononononononononononononononononononononononononononononononononononononon= ononononononononononononononononononononononononononononononononononononono= nononononononononononononononononononononononononononononononononononononon= ononononononononononononononononononononononononononononononononononononono= nonononononononononononono revset selection: send to address that matches branch and repo $ cat << EOF >> $HGRCPATH > [hooks] > incoming.notify = python:hgext.notify.hook > > [notify] > sources = pull > test = True > diffstat = False > maxdiff = 0 > > [reposubs] > */a#branch(test) = will_no_be_send@example.com > */b#branch(test) = notify@example.com > EOF $ hg --cwd a branch test marked working directory as branch test (branches are permanent and global, did you want a bookmark?) $ echo a >> a/a $ hg --cwd a ci -m test -d '1 0' $ hg --traceback --cwd b pull ../a | \ > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),' pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Test: foo Date: * (glob) Subject: test From: test@test.com X-Hg-Notification: changeset fbbcbc516f2f Message-Id: (glob) To: baz@test.com, foo@bar, notify@example.com changeset fbbcbc516f2f in b description: test (run 'hg update' to get a working copy) revset selection: don't send to address that waits for mails from different branch $ hg --cwd a update default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a >> a/a $ hg --cwd a ci -m test -d '1 0' $ hg --traceback --cwd b pull ../a | \ > $PYTHON -c 'import sys,re; print re.sub("\n\t", " ", sys.stdin.read()),' pulling from ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 0 files (+1 heads) Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Test: foo Date: * (glob) Subject: test From: test@test.com X-Hg-Notification: changeset 38b42fa092de Message-Id: (glob) To: baz@test.com, foo@bar changeset 38b42fa092de in b description: test (run 'hg heads' to see heads) mercurial-3.7.3/tests/test-http.t0000644000175000017500000003041512676531525016415 0ustar mpmmpm00000000000000#require serve $ hg init test $ cd test $ echo foo>foo $ mkdir foo.d foo.d/bAr.hg.d foo.d/baR.d.hg $ echo foo>foo.d/foo $ echo bar>foo.d/bAr.hg.d/BaR $ echo bar>foo.d/baR.d.hg/bAR $ hg commit -A -m 1 adding foo adding foo.d/bAr.hg.d/BaR adding foo.d/baR.d.hg/bAR adding foo.d/foo $ hg serve -p $HGPORT -d --pid-file=../hg1.pid -E ../error.log $ hg --config server.uncompressed=False serve -p $HGPORT1 -d --pid-file=../hg2.pid Test server address cannot be reused #if windows $ hg serve -p $HGPORT1 2>&1 abort: cannot start server at ':$HGPORT1': * (glob) [255] #else $ hg serve -p $HGPORT1 2>&1 abort: cannot start server at ':$HGPORT1': Address already in use [255] #endif $ cd .. $ cat hg1.pid hg2.pid >> $DAEMON_PIDS clone via stream $ hg clone --uncompressed http://localhost:$HGPORT/ copy 2>&1 streaming all changes 6 files to transfer, 606 bytes of data transferred * bytes in * seconds (*/sec) (glob) searching for changes no changes found updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg verify -R copy checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 1 changesets, 4 total revisions try to clone via stream, should use pull instead $ hg clone --uncompressed http://localhost:$HGPORT1/ copy2 requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 4 changes to 4 files updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved clone via pull $ hg clone http://localhost:$HGPORT1/ copy-pull requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 4 changes to 4 files updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg verify -R copy-pull checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 1 changesets, 4 total revisions $ cd test $ echo bar > bar $ hg commit -A -d '1 0' -m 2 adding bar $ cd .. clone over http with --update $ hg clone http://localhost:$HGPORT1/ updated --update 0 requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 5 changes to 5 files updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg log -r . -R updated changeset: 0:8b6053c928fe user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1 $ rm -rf updated incoming via HTTP $ hg clone http://localhost:$HGPORT1/ --rev 0 partial adding changesets adding manifests adding file changes added 1 changesets with 4 changes to 4 files updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd partial $ touch LOCAL $ hg ci -qAm LOCAL $ hg incoming http://localhost:$HGPORT1/ --template '{desc}\n' comparing with http://localhost:$HGPORT1/ searching for changes 2 $ cd .. pull $ cd copy-pull $ echo '[hooks]' >> .hg/hgrc $ echo "changegroup = printenv.py changegroup" >> .hg/hgrc $ hg pull pulling from http://localhost:$HGPORT1/ searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files changegroup hook: HG_NODE=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_NODE_LAST=5fed3813f7f5e1824344fdc9cf8f63bb662c292d HG_SOURCE=pull HG_TXNID=TXN:* HG_URL=http://localhost:$HGPORT1/ (glob) (run 'hg update' to get a working copy) $ cd .. clone from invalid URL $ hg clone http://localhost:$HGPORT/bad abort: HTTP Error 404: Not Found [255] test http authentication + use the same server to test server side streaming preference $ cd test $ cat << EOT > userpass.py > import base64 > from mercurial.hgweb import common > def perform_authentication(hgweb, req, op): > auth = req.env.get('HTTP_AUTHORIZATION') > if not auth: > raise common.ErrorResponse(common.HTTP_UNAUTHORIZED, 'who', > [('WWW-Authenticate', 'Basic Realm="mercurial"')]) > if base64.b64decode(auth.split()[1]).split(':', 1) != ['user', 'pass']: > raise common.ErrorResponse(common.HTTP_FORBIDDEN, 'no') > def extsetup(): > common.permhooks.insert(0, perform_authentication) > EOT $ hg --config extensions.x=userpass.py serve -p $HGPORT2 -d --pid-file=pid \ > --config server.preferuncompressed=True \ > --config web.push_ssl=False --config web.allow_push=* -A ../access.log $ cat pid >> $DAEMON_PIDS $ cat << EOF > get_pass.py > import getpass > def newgetpass(arg): > return "pass" > getpass.getpass = newgetpass > EOF $ hg id http://localhost:$HGPORT2/ abort: http authorization required for http://localhost:$HGPORT2/ [255] $ hg id http://localhost:$HGPORT2/ abort: http authorization required for http://localhost:$HGPORT2/ [255] $ hg id --config ui.interactive=true --config extensions.getpass=get_pass.py http://user@localhost:$HGPORT2/ http authorization required for http://localhost:$HGPORT2/ realm: mercurial user: user password: 5fed3813f7f5 $ hg id http://user:pass@localhost:$HGPORT2/ 5fed3813f7f5 $ echo '[auth]' >> .hg/hgrc $ echo 'l.schemes=http' >> .hg/hgrc $ echo 'l.prefix=lo' >> .hg/hgrc $ echo 'l.username=user' >> .hg/hgrc $ echo 'l.password=pass' >> .hg/hgrc $ hg id http://localhost:$HGPORT2/ 5fed3813f7f5 $ hg id http://localhost:$HGPORT2/ 5fed3813f7f5 $ hg id http://user@localhost:$HGPORT2/ 5fed3813f7f5 $ hg clone http://user:pass@localhost:$HGPORT2/ dest 2>&1 streaming all changes 7 files to transfer, 916 bytes of data transferred * bytes in * seconds (*/sec) (glob) searching for changes no changes found updating to branch default 5 files updated, 0 files merged, 0 files removed, 0 files unresolved --pull should override server's preferuncompressed $ hg clone --pull http://user:pass@localhost:$HGPORT2/ dest-pull 2>&1 requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 5 changes to 5 files updating to branch default 5 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id http://user2@localhost:$HGPORT2/ abort: http authorization required for http://localhost:$HGPORT2/ [255] $ hg id http://user:pass2@localhost:$HGPORT2/ abort: HTTP Error 403: no [255] $ hg -R dest tag -r tip top $ hg -R dest push http://user:pass@localhost:$HGPORT2/ pushing to http://user:***@localhost:$HGPORT2/ searching for changes remote: adding changesets remote: adding manifests remote: adding file changes remote: added 1 changesets with 1 changes to 1 files $ hg rollback -q $ cut -c38- ../access.log "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=branchmap HTTP/1.1" 200 - "GET /?cmd=stream_out HTTP/1.1" 401 - "GET /?cmd=stream_out HTTP/1.1" 200 - "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D5fed3813f7f5e1824344fdc9cf8f63bb662c292d "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=0&common=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=phase%2Cbookmarks "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D "GET /?cmd=getbundle HTTP/1.1" 401 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=phase%2Cbookmarks "GET /?cmd=getbundle HTTP/1.1" 200 - x-hgarg-1:bundlecaps=HG20%2Cbundle2%3DHG20%250Achangegroup%253D01%252C02%250Adigests%253Dmd5%252Csha1%252Csha512%250Aerror%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250Ahgtagsfnodes%250Alistkeys%250Apushkey%250Aremote-changegroup%253Dhttp%252Chttps&cg=1&common=0000000000000000000000000000000000000000&heads=5fed3813f7f5e1824344fdc9cf8f63bb662c292d&listkeys=phase%2Cbookmarks "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=lookup HTTP/1.1" 200 - x-hgarg-1:key=tip "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=namespaces "GET /?cmd=listkeys HTTP/1.1" 403 - x-hgarg-1:namespace=namespaces "GET /?cmd=capabilities HTTP/1.1" 200 - "GET /?cmd=batch HTTP/1.1" 200 - x-hgarg-1:cmds=heads+%3Bknown+nodes%3D7f4e523d01f2cc3765ac8934da3d14db775ff872 "GET /?cmd=listkeys HTTP/1.1" 401 - x-hgarg-1:namespace=phases "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "GET /?cmd=branchmap HTTP/1.1" 200 - "GET /?cmd=branchmap HTTP/1.1" 200 - "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=bookmarks "POST /?cmd=unbundle HTTP/1.1" 200 - x-hgarg-1:heads=666f726365 "GET /?cmd=listkeys HTTP/1.1" 200 - x-hgarg-1:namespace=phases $ cd .. clone of serve with repo in root and unserved subrepo (issue2970) $ hg --cwd test init sub $ echo empty > test/sub/empty $ hg --cwd test/sub add empty $ hg --cwd test/sub commit -qm 'add empty' $ hg --cwd test/sub tag -r 0 something $ echo sub = sub > test/.hgsub $ hg --cwd test add .hgsub $ hg --cwd test commit -qm 'add subrepo' $ hg clone http://localhost:$HGPORT noslash-clone requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 7 changes to 7 files updating to branch default abort: HTTP Error 404: Not Found [255] $ hg clone http://localhost:$HGPORT/ slash-clone requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 7 changes to 7 files updating to branch default abort: HTTP Error 404: Not Found [255] check error log $ cat error.log mercurial-3.7.3/tests/test-issue2137.t0000644000175000017500000000305312676531525017101 0ustar mpmmpm00000000000000https://bz.mercurial-scm.org/2137 Setup: create a little extension that has 3 side-effects: 1) ensure changelog data is not inlined 2) make revlog to use lazyparser 3) test that repo.lookup() works 1 and 2 are preconditions for the bug; 3 is the bug. $ cat > commitwrapper.py < from mercurial import extensions, node, revlog > > def reposetup(ui, repo): > class wraprepo(repo.__class__): > def commit(self, *args, **kwargs): > result = super(wraprepo, self).commit(*args, **kwargs) > tip1 = node.short(repo.changelog.tip()) > tip2 = node.short(repo.lookup(tip1)) > assert tip1 == tip2 > ui.write('new tip: %s\n' % tip1) > return result > repo.__class__ = wraprepo > > def extsetup(ui): > revlog._maxinline = 8 # split out 00changelog.d early > revlog._prereadsize = 8 # use revlog.lazyparser > EOF $ cat >> $HGRCPATH < [extensions] > commitwrapper = `pwd`/commitwrapper.py > EOF $ hg init repo1 $ cd repo1 $ echo a > a $ hg commit -A -m'add a with a long commit message to make the changelog a bit bigger' adding a new tip: 553596fad57b Test that new changesets are visible to repo.lookup(): $ echo a >> a $ hg commit -m'one more commit to demonstrate the bug' new tip: 799ae3599e0e $ hg tip changeset: 1:799ae3599e0e tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: one more commit to demonstrate the bug $ cd .. mercurial-3.7.3/tests/test-revset.t0000644000175000017500000011323212676531525016745 0ustar mpmmpm00000000000000 $ HGENCODING=utf-8 $ export HGENCODING $ cat > testrevset.py << EOF > import mercurial.revset > > baseset = mercurial.revset.baseset > > def r3232(repo, subset, x): > """"simple revset that return [3,2,3,2] > > revisions duplicated on purpose. > """ > if 3 not in subset: > if 2 in subset: > return baseset([2,2]) > return baseset() > return baseset([3,3,2,2]) > > mercurial.revset.symbols['r3232'] = r3232 > EOF $ cat >> $HGRCPATH << EOF > [extensions] > testrevset=$TESTTMP/testrevset.py > EOF $ try() { > hg debugrevspec --debug "$@" > } $ log() { > hg log --template '{rev}\n' -r "$1" > } $ hg init repo $ cd repo $ echo a > a $ hg branch a marked working directory as branch a (branches are permanent and global, did you want a bookmark?) $ hg ci -Aqm0 $ echo b > b $ hg branch b marked working directory as branch b $ hg ci -Aqm1 $ rm a $ hg branch a-b-c- marked working directory as branch a-b-c- $ hg ci -Aqm2 -u Bob $ hg log -r "extra('branch', 'a-b-c-')" --template '{rev}\n' 2 $ hg log -r "extra('branch')" --template '{rev}\n' 0 1 2 $ hg log -r "extra('branch', 're:a')" --template '{rev} {branch}\n' 0 a 2 a-b-c- $ hg co 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch +a+b+c+ marked working directory as branch +a+b+c+ $ hg ci -Aqm3 $ hg co 2 # interleave 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo bb > b $ hg branch -- -a-b-c- marked working directory as branch -a-b-c- $ hg ci -Aqm4 -d "May 12 2005" $ hg co 3 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch !a/b/c/ marked working directory as branch !a/b/c/ $ hg ci -Aqm"5 bug" $ hg merge 4 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg branch _a_b_c_ marked working directory as branch _a_b_c_ $ hg ci -Aqm"6 issue619" $ hg branch .a.b.c. marked working directory as branch .a.b.c. $ hg ci -Aqm7 $ hg branch all marked working directory as branch all $ hg co 4 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch é marked working directory as branch \xc3\xa9 (esc) $ hg ci -Aqm9 $ hg tag -r6 1.0 $ hg bookmark -r6 xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx $ hg clone --quiet -U -r 7 . ../remote1 $ hg clone --quiet -U -r 8 . ../remote2 $ echo "[paths]" >> .hg/hgrc $ echo "default = ../remote1" >> .hg/hgrc trivial $ try 0:1 (range ('symbol', '0') ('symbol', '1')) * set: 0 1 $ try --optimize : (rangeall None) * optimized: (range ('string', '0') ('string', 'tip')) * set: 0 1 2 3 4 5 6 7 8 9 $ try 3::6 (dagrange ('symbol', '3') ('symbol', '6')) * set: 3 5 6 $ try '0|1|2' (or ('symbol', '0') ('symbol', '1') ('symbol', '2')) * set: 0 1 2 names that should work without quoting $ try a ('symbol', 'a') * set: 0 $ try b-a (minus ('symbol', 'b') ('symbol', 'a')) * set: > 1 $ try _a_b_c_ ('symbol', '_a_b_c_') * set: 6 $ try _a_b_c_-a (minus ('symbol', '_a_b_c_') ('symbol', 'a')) * set: > 6 $ try .a.b.c. ('symbol', '.a.b.c.') * set: 7 $ try .a.b.c.-a (minus ('symbol', '.a.b.c.') ('symbol', 'a')) * set: > 7 names that should be caught by fallback mechanism $ try -- '-a-b-c-' ('symbol', '-a-b-c-') * set: 4 $ log -a-b-c- 4 $ try '+a+b+c+' ('symbol', '+a+b+c+') * set: 3 $ try '+a+b+c+:' (rangepost ('symbol', '+a+b+c+')) * set: 3 4 5 6 7 8 9 $ try ':+a+b+c+' (rangepre ('symbol', '+a+b+c+')) * set: 0 1 2 3 $ try -- '-a-b-c-:+a+b+c+' (range ('symbol', '-a-b-c-') ('symbol', '+a+b+c+')) * set: 4 3 $ log '-a-b-c-:+a+b+c+' 4 3 $ try -- -a-b-c--a # complains (minus (minus (minus (negate ('symbol', 'a')) ('symbol', 'b')) ('symbol', 'c')) (negate ('symbol', 'a'))) abort: unknown revision '-a'! [255] $ try é ('symbol', '\xc3\xa9') * set: 9 no quoting needed $ log ::a-b-c- 0 1 2 quoting needed $ try '"-a-b-c-"-a' (minus ('string', '-a-b-c-') ('symbol', 'a')) * set: > 4 $ log '1 or 2' 1 2 $ log '1|2' 1 2 $ log '1 and 2' $ log '1&2' $ try '1&2|3' # precedence - and is higher (or (and ('symbol', '1') ('symbol', '2')) ('symbol', '3')) * set: , > 3 $ try '1|2&3' (or ('symbol', '1') (and ('symbol', '2') ('symbol', '3'))) * set: , > 1 $ try '1&2&3' # associativity (and (and ('symbol', '1') ('symbol', '2')) ('symbol', '3')) * set: $ try '1|(2|3)' (or ('symbol', '1') (group (or ('symbol', '2') ('symbol', '3')))) * set: , > 1 2 3 $ log '1.0' # tag 6 $ log 'a' # branch 0 $ log '2785f51ee' 0 $ log 'date(2005)' 4 $ log 'date(this is a test)' hg: parse error at 10: unexpected token: symbol [255] $ log 'date()' hg: parse error: date requires a string [255] $ log 'date' abort: unknown revision 'date'! [255] $ log 'date(' hg: parse error at 5: not a prefix: end [255] $ log 'date("\xy")' hg: parse error: invalid \x escape [255] $ log 'date(tip)' abort: invalid date: 'tip' [255] $ log '0:date' abort: unknown revision 'date'! [255] $ log '::"date"' abort: unknown revision 'date'! [255] $ hg book date -r 4 $ log '0:date' 0 1 2 3 4 $ log '::date' 0 1 2 4 $ log '::"date"' 0 1 2 4 $ log 'date(2005) and 1::' 4 $ hg book -d date keyword arguments $ log 'extra(branch, value=a)' 0 $ log 'extra(branch, a, b)' hg: parse error: extra takes at most 2 arguments [255] $ log 'extra(a, label=b)' hg: parse error: extra got multiple values for keyword argument 'label' [255] $ log 'extra(label=branch, default)' hg: parse error: extra got an invalid argument [255] $ log 'extra(branch, foo+bar=baz)' hg: parse error: extra got an invalid argument [255] $ log 'extra(unknown=branch)' hg: parse error: extra got an unexpected keyword argument 'unknown' [255] $ try 'foo=bar|baz' (keyvalue ('symbol', 'foo') (or ('symbol', 'bar') ('symbol', 'baz'))) hg: parse error: can't use a key-value pair in this context [255] Test that symbols only get parsed as functions if there's an opening parenthesis. $ hg book only -r 9 $ log 'only(only)' # Outer "only" is a function, inner "only" is the bookmark 8 9 ancestor can accept 0 or more arguments $ log 'ancestor()' $ log 'ancestor(1)' 1 $ log 'ancestor(4,5)' 1 $ log 'ancestor(4,5) and 4' $ log 'ancestor(0,0,1,3)' 0 $ log 'ancestor(3,1,5,3,5,1)' 1 $ log 'ancestor(0,1,3,5)' 0 $ log 'ancestor(1,2,3,4,5)' 1 test ancestors $ log 'ancestors(5)' 0 1 3 5 $ log 'ancestor(ancestors(5))' 0 $ log '::r3232()' 0 1 2 3 $ log 'author(bob)' 2 $ log 'author("re:bob|test")' 0 1 2 3 4 5 6 7 8 9 $ log 'branch(é)' 8 9 $ log 'branch(a)' 0 $ hg log -r 'branch("re:a")' --template '{rev} {branch}\n' 0 a 2 a-b-c- 3 +a+b+c+ 4 -a-b-c- 5 !a/b/c/ 6 _a_b_c_ 7 .a.b.c. $ log 'children(ancestor(4,5))' 2 3 $ log 'closed()' $ log 'contains(a)' 0 1 3 5 $ log 'contains("../repo/a")' 0 1 3 5 $ log 'desc(B)' 5 $ log 'descendants(2 or 3)' 2 3 4 5 6 7 8 9 $ log 'file("b*")' 1 4 $ log 'filelog("b")' 1 4 $ log 'filelog("../repo/b")' 1 4 $ log 'follow()' 0 1 2 4 8 9 $ log 'grep("issue\d+")' 6 $ try 'grep("(")' # invalid regular expression (func ('symbol', 'grep') ('string', '(')) hg: parse error: invalid match pattern: unbalanced parenthesis [255] $ try 'grep("\bissue\d+")' (func ('symbol', 'grep') ('string', '\x08issue\\d+')) * set: > $ try 'grep(r"\bissue\d+")' (func ('symbol', 'grep') ('string', '\\bissue\\d+')) * set: > 6 $ try 'grep(r"\")' hg: parse error at 7: unterminated string [255] $ log 'head()' 0 1 2 3 4 5 6 7 9 $ log 'heads(6::)' 7 $ log 'keyword(issue)' 6 $ log 'keyword("test a")' $ log 'limit(head(), 1)' 0 $ log 'limit(author("re:bob|test"), 3, 5)' 5 6 7 $ log 'limit(author("re:bob|test"), offset=6)' 6 $ log 'limit(author("re:bob|test"), offset=10)' $ log 'limit(all(), 1, -1)' hg: parse error: negative offset [255] $ log 'matching(6)' 6 $ log 'matching(6:7, "phase parents user date branch summary files description substate")' 6 7 Testing min and max max: simple $ log 'max(contains(a))' 5 max: simple on unordered set) $ log 'max((4+0+2+5+7) and contains(a))' 5 max: no result $ log 'max(contains(stringthatdoesnotappearanywhere))' max: no result on unordered set $ log 'max((4+0+2+5+7) and contains(stringthatdoesnotappearanywhere))' min: simple $ log 'min(contains(a))' 0 min: simple on unordered set $ log 'min((4+0+2+5+7) and contains(a))' 0 min: empty $ log 'min(contains(stringthatdoesnotappearanywhere))' min: empty on unordered set $ log 'min((4+0+2+5+7) and contains(stringthatdoesnotappearanywhere))' $ log 'merge()' 6 $ log 'branchpoint()' 1 4 $ log 'modifies(b)' 4 $ log 'modifies("path:b")' 4 $ log 'modifies("*")' 4 6 $ log 'modifies("set:modified()")' 4 $ log 'id(5)' 2 $ log 'only(9)' 8 9 $ log 'only(8)' 8 $ log 'only(9, 5)' 2 4 8 9 $ log 'only(7 + 9, 5 + 2)' 4 6 7 8 9 Test empty set input $ log 'only(p2())' $ log 'only(p1(), p2())' 0 1 2 4 8 9 Test '%' operator $ log '9%' 8 9 $ log '9%5' 2 4 8 9 $ log '(7 + 9)%(5 + 2)' 4 6 7 8 9 Test opreand of '%' is optimized recursively (issue4670) $ try --optimize '8:9-8%' (onlypost (minus (range ('symbol', '8') ('symbol', '9')) ('symbol', '8'))) * optimized: (func ('symbol', 'only') (and (range ('symbol', '8') ('symbol', '9')) (not ('symbol', '8')))) * set: 8 9 $ try --optimize '(9)%(5)' (only (group ('symbol', '9')) (group ('symbol', '5'))) * optimized: (func ('symbol', 'only') (list ('symbol', '9') ('symbol', '5'))) * set: 2 4 8 9 Test the order of operations $ log '7 + 9%5 + 2' 7 2 4 8 9 Test explicit numeric revision $ log 'rev(-2)' $ log 'rev(-1)' -1 $ log 'rev(0)' 0 $ log 'rev(9)' 9 $ log 'rev(10)' $ log 'rev(tip)' hg: parse error: rev expects a number [255] Test hexadecimal revision $ log 'id(2)' abort: 00changelog.i@2: ambiguous identifier! [255] $ log 'id(23268)' 4 $ log 'id(2785f51eece)' 0 $ log 'id(d5d0dcbdc4d9ff5dbb2d336f32f0bb561c1a532c)' 8 $ log 'id(d5d0dcbdc4a)' $ log 'id(d5d0dcbdc4w)' $ log 'id(d5d0dcbdc4d9ff5dbb2d336f32f0bb561c1a532d)' $ log 'id(d5d0dcbdc4d9ff5dbb2d336f32f0bb561c1a532q)' $ log 'id(1.0)' $ log 'id(xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx)' Test null revision $ log '(null)' -1 $ log '(null:0)' -1 0 $ log '(0:null)' 0 -1 $ log 'null::0' -1 0 $ log 'null:tip - 0:' -1 $ log 'null: and null::' | head -1 -1 $ log 'null: or 0:' | head -2 -1 0 $ log 'ancestors(null)' -1 $ log 'reverse(null:)' | tail -2 0 -1 BROKEN: should be '-1' $ log 'first(null:)' BROKEN: should be '-1' $ log 'min(null:)' $ log 'tip:null and all()' | tail -2 1 0 Test working-directory revision $ hg debugrevspec 'wdir()' 2147483647 $ hg debugrevspec 'tip or wdir()' 9 2147483647 $ hg debugrevspec '0:tip and wdir()' $ log '0:wdir()' | tail -3 8 9 2147483647 $ log 'wdir():0' | head -3 2147483647 9 8 $ log 'wdir():wdir()' 2147483647 $ log '(all() + wdir()) & min(. + wdir())' 9 $ log '(all() + wdir()) & max(. + wdir())' 2147483647 $ log '(all() + wdir()) & first(wdir() + .)' 2147483647 $ log '(all() + wdir()) & last(. + wdir())' 2147483647 $ log 'outgoing()' 8 9 $ log 'outgoing("../remote1")' 8 9 $ log 'outgoing("../remote2")' 3 5 6 7 9 $ log 'p1(merge())' 5 $ log 'p2(merge())' 4 $ log 'parents(merge())' 4 5 $ log 'p1(branchpoint())' 0 2 $ log 'p2(branchpoint())' $ log 'parents(branchpoint())' 0 2 $ log 'removes(a)' 2 6 $ log 'roots(all())' 0 $ log 'reverse(2 or 3 or 4 or 5)' 5 4 3 2 $ log 'reverse(all())' 9 8 7 6 5 4 3 2 1 0 $ log 'reverse(all()) & filelog(b)' 4 1 $ log 'rev(5)' 5 $ log 'sort(limit(reverse(all()), 3))' 7 8 9 $ log 'sort(2 or 3 or 4 or 5, date)' 2 3 5 4 $ log 'tagged()' 6 $ log 'tag()' 6 $ log 'tag(1.0)' 6 $ log 'tag(tip)' 9 test sort revset -------------------------------------------- test when adding two unordered revsets $ log 'sort(keyword(issue) or modifies(b))' 4 6 test when sorting a reversed collection in the same way it is $ log 'sort(reverse(all()), -rev)' 9 8 7 6 5 4 3 2 1 0 test when sorting a reversed collection $ log 'sort(reverse(all()), rev)' 0 1 2 3 4 5 6 7 8 9 test sorting two sorted collections in different orders $ log 'sort(outgoing() or reverse(removes(a)), rev)' 2 6 8 9 test sorting two sorted collections in different orders backwards $ log 'sort(outgoing() or reverse(removes(a)), -rev)' 9 8 6 2 test subtracting something from an addset $ log '(outgoing() or removes(a)) - removes(a)' 8 9 test intersecting something with an addset $ log 'parents(outgoing() or removes(a))' 1 4 5 8 test that `or` operation combines elements in the right order: $ log '3:4 or 2:5' 3 4 2 5 $ log '3:4 or 5:2' 3 4 5 2 $ log 'sort(3:4 or 2:5)' 2 3 4 5 $ log 'sort(3:4 or 5:2)' 2 3 4 5 test that more than one `-r`s are combined in the right order and deduplicated: $ hg log -T '{rev}\n' -r 3 -r 3 -r 4 -r 5:2 -r 'ancestors(4)' 3 4 5 2 0 1 test that `or` operation skips duplicated revisions from right-hand side $ try 'reverse(1::5) or ancestors(4)' (or (func ('symbol', 'reverse') (dagrange ('symbol', '1') ('symbol', '5'))) (func ('symbol', 'ancestors') ('symbol', '4'))) * set: , > 5 3 1 0 2 4 $ try 'sort(ancestors(4) or reverse(1::5))' (func ('symbol', 'sort') (or (func ('symbol', 'ancestors') ('symbol', '4')) (func ('symbol', 'reverse') (dagrange ('symbol', '1') ('symbol', '5'))))) * set: , > 0 1 2 3 4 5 test optimization of trivial `or` operation $ try --optimize '0|(1)|"2"|-2|tip|null' (or ('symbol', '0') (group ('symbol', '1')) ('string', '2') (negate ('symbol', '2')) ('symbol', 'tip') ('symbol', 'null')) * optimized: (func ('symbol', '_list') ('string', '0\x001\x002\x00-2\x00tip\x00null')) * set: 0 1 2 8 9 -1 $ try --optimize '0|1|2:3' (or ('symbol', '0') ('symbol', '1') (range ('symbol', '2') ('symbol', '3'))) * optimized: (or (func ('symbol', '_list') ('string', '0\x001')) (range ('symbol', '2') ('symbol', '3'))) * set: , > 0 1 2 3 $ try --optimize '0:1|2|3:4|5|6' (or (range ('symbol', '0') ('symbol', '1')) ('symbol', '2') (range ('symbol', '3') ('symbol', '4')) ('symbol', '5') ('symbol', '6')) * optimized: (or (range ('symbol', '0') ('symbol', '1')) ('symbol', '2') (range ('symbol', '3') ('symbol', '4')) (func ('symbol', '_list') ('string', '5\x006'))) * set: , >, , >> 0 1 2 3 4 5 6 test that `_list` should be narrowed by provided `subset` $ log '0:2 and (null|1|2|3)' 1 2 test that `_list` should remove duplicates $ log '0|1|2|1|2|-1|tip' 0 1 2 9 test unknown revision in `_list` $ log '0|unknown' abort: unknown revision 'unknown'! [255] test integer range in `_list` $ log '-1|-10' 9 0 $ log '-10|-11' abort: unknown revision '-11'! [255] $ log '9|10' abort: unknown revision '10'! [255] test '0000' != '0' in `_list` $ log '0|0000' 0 -1 test ',' in `_list` $ log '0,1' hg: parse error: can't use a list in this context (see hg help "revsets.x or y") [255] $ try '0,1,2' (list ('symbol', '0') ('symbol', '1') ('symbol', '2')) hg: parse error: can't use a list in this context (see hg help "revsets.x or y") [255] test that chained `or` operations make balanced addsets $ try '0:1|1:2|2:3|3:4|4:5' (or (range ('symbol', '0') ('symbol', '1')) (range ('symbol', '1') ('symbol', '2')) (range ('symbol', '2') ('symbol', '3')) (range ('symbol', '3') ('symbol', '4')) (range ('symbol', '4') ('symbol', '5'))) * set: , >, , , >>> 0 1 2 3 4 5 no crash by empty group "()" while optimizing `or` operations $ try --optimize '0|()' (or ('symbol', '0') (group None)) * optimized: (or ('symbol', '0') None) hg: parse error: missing argument [255] test that chained `or` operations never eat up stack (issue4624) (uses `0:1` instead of `0` to avoid future optimization of trivial revisions) $ hg log -T '{rev}\n' -r "`python -c "print '|'.join(['0:1'] * 500)"`" 0 1 test that repeated `-r` options never eat up stack (issue4565) (uses `-r 0::1` to avoid possible optimization at old-style parser) $ hg log -T '{rev}\n' `python -c "for i in xrange(500): print '-r 0::1 ',"` 0 1 check that conversion to only works $ try --optimize '::3 - ::1' (minus (dagrangepre ('symbol', '3')) (dagrangepre ('symbol', '1'))) * optimized: (func ('symbol', 'only') (list ('symbol', '3') ('symbol', '1'))) * set: 3 $ try --optimize 'ancestors(1) - ancestors(3)' (minus (func ('symbol', 'ancestors') ('symbol', '1')) (func ('symbol', 'ancestors') ('symbol', '3'))) * optimized: (func ('symbol', 'only') (list ('symbol', '1') ('symbol', '3'))) * set: $ try --optimize 'not ::2 and ::6' (and (not (dagrangepre ('symbol', '2'))) (dagrangepre ('symbol', '6'))) * optimized: (func ('symbol', 'only') (list ('symbol', '6') ('symbol', '2'))) * set: 3 4 5 6 $ try --optimize 'ancestors(6) and not ancestors(4)' (and (func ('symbol', 'ancestors') ('symbol', '6')) (not (func ('symbol', 'ancestors') ('symbol', '4')))) * optimized: (func ('symbol', 'only') (list ('symbol', '6') ('symbol', '4'))) * set: 3 5 6 no crash by empty group "()" while optimizing to "only()" $ try --optimize '::1 and ()' (and (dagrangepre ('symbol', '1')) (group None)) * optimized: (and None (func ('symbol', 'ancestors') ('symbol', '1'))) hg: parse error: missing argument [255] we can use patterns when searching for tags $ log 'tag("1..*")' abort: tag '1..*' does not exist! [255] $ log 'tag("re:1..*")' 6 $ log 'tag("re:[0-9].[0-9]")' 6 $ log 'tag("literal:1.0")' 6 $ log 'tag("re:0..*")' $ log 'tag(unknown)' abort: tag 'unknown' does not exist! [255] $ log 'tag("re:unknown")' $ log 'present(tag("unknown"))' $ log 'present(tag("re:unknown"))' $ log 'branch(unknown)' abort: unknown revision 'unknown'! [255] $ log 'branch("literal:unknown")' abort: branch 'unknown' does not exist! [255] $ log 'branch("re:unknown")' $ log 'present(branch("unknown"))' $ log 'present(branch("re:unknown"))' $ log 'user(bob)' 2 $ log '4::8' 4 8 $ log '4:8' 4 5 6 7 8 $ log 'sort(!merge() & (modifies(b) | user(bob) | keyword(bug) | keyword(issue) & 1::9), "-date")' 4 2 5 $ log 'not 0 and 0:2' 1 2 $ log 'not 1 and 0:2' 0 2 $ log 'not 2 and 0:2' 0 1 $ log '(1 and 2)::' $ log '(1 and 2):' $ log '(1 and 2):3' $ log 'sort(head(), -rev)' 9 7 6 5 4 3 2 1 0 $ log '4::8 - 8' 4 $ log 'matching(1 or 2 or 3) and (2 or 3 or 1)' 2 3 1 $ log 'named("unknown")' abort: namespace 'unknown' does not exist! [255] $ log 'named("re:unknown")' abort: no namespace exists that match 'unknown'! [255] $ log 'present(named("unknown"))' $ log 'present(named("re:unknown"))' $ log 'tag()' 6 $ log 'named("tags")' 6 issue2437 $ log '3 and p1(5)' 3 $ log '4 and p2(6)' 4 $ log '1 and parents(:2)' 1 $ log '2 and children(1:)' 2 $ log 'roots(all()) or roots(all())' 0 $ hg debugrevspec 'roots(all()) or roots(all())' 0 $ log 'heads(branch(é)) or heads(branch(é))' 9 $ log 'ancestors(8) and (heads(branch("-a-b-c-")) or heads(branch(é)))' 4 issue2654: report a parse error if the revset was not completely parsed $ log '1 OR 2' hg: parse error at 2: invalid token [255] or operator should preserve ordering: $ log 'reverse(2::4) or tip' 4 2 9 parentrevspec $ log 'merge()^0' 6 $ log 'merge()^' 5 $ log 'merge()^1' 5 $ log 'merge()^2' 4 $ log 'merge()^^' 3 $ log 'merge()^1^' 3 $ log 'merge()^^^' 1 $ log 'merge()~0' 6 $ log 'merge()~1' 5 $ log 'merge()~2' 3 $ log 'merge()~2^1' 1 $ log 'merge()~3' 1 $ log '(-3:tip)^' 4 6 8 $ log 'tip^foo' hg: parse error: ^ expects a number 0, 1, or 2 [255] Bogus function gets suggestions $ log 'add()' hg: parse error: unknown identifier: add (did you mean adds?) [255] $ log 'added()' hg: parse error: unknown identifier: added (did you mean adds?) [255] $ log 'remo()' hg: parse error: unknown identifier: remo (did you mean one of remote, removes?) [255] $ log 'babar()' hg: parse error: unknown identifier: babar [255] Bogus function with a similar internal name doesn't suggest the internal name $ log 'matches()' hg: parse error: unknown identifier: matches (did you mean matching?) [255] Undocumented functions aren't suggested as similar either $ log 'wdir2()' hg: parse error: unknown identifier: wdir2 [255] multiple revspecs $ hg log -r 'tip~1:tip' -r 'tip~2:tip~1' --template '{rev}\n' 8 9 4 5 6 7 test usage in revpair (with "+") (real pair) $ hg diff -r 'tip^^' -r 'tip' diff -r 2326846efdab -r 24286f4ae135 .hgtags --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0 $ hg diff -r 'tip^^::tip' diff -r 2326846efdab -r 24286f4ae135 .hgtags --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0 (single rev) $ hg diff -r 'tip^' -r 'tip^' $ hg diff -r 'tip^:tip^' (single rev that does not looks like a range) $ hg diff -r 'tip^::tip^ or tip^' diff -r d5d0dcbdc4d9 .hgtags --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgtags * (glob) @@ -0,0 +1,1 @@ +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0 $ hg diff -r 'tip^ or tip^' diff -r d5d0dcbdc4d9 .hgtags --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgtags * (glob) @@ -0,0 +1,1 @@ +e0cc66ef77e8b6f711815af4e001a6594fde3ba5 1.0 (no rev) $ hg diff -r 'author("babar") or author("celeste")' abort: empty revision range [255] aliases: $ echo '[revsetalias]' >> .hg/hgrc $ echo 'm = merge()' >> .hg/hgrc (revset aliases can override builtin revsets) $ echo 'p2($1) = p1($1)' >> .hg/hgrc $ echo 'sincem = descendants(m)' >> .hg/hgrc $ echo 'd($1) = reverse(sort($1, date))' >> .hg/hgrc $ echo 'rs(ARG1, ARG2) = reverse(sort(ARG1, ARG2))' >> .hg/hgrc $ echo 'rs4(ARG1, ARGA, ARGB, ARG2) = reverse(sort(ARG1, ARG2))' >> .hg/hgrc $ try m ('symbol', 'm') (func ('symbol', 'merge') None) * set: > 6 $ HGPLAIN=1 $ export HGPLAIN $ try m ('symbol', 'm') abort: unknown revision 'm'! [255] $ HGPLAINEXCEPT=revsetalias $ export HGPLAINEXCEPT $ try m ('symbol', 'm') (func ('symbol', 'merge') None) * set: > 6 $ unset HGPLAIN $ unset HGPLAINEXCEPT $ try 'p2(.)' (func ('symbol', 'p2') ('symbol', '.')) (func ('symbol', 'p1') ('symbol', '.')) * set: 8 $ HGPLAIN=1 $ export HGPLAIN $ try 'p2(.)' (func ('symbol', 'p2') ('symbol', '.')) * set: $ HGPLAINEXCEPT=revsetalias $ export HGPLAINEXCEPT $ try 'p2(.)' (func ('symbol', 'p2') ('symbol', '.')) (func ('symbol', 'p1') ('symbol', '.')) * set: 8 $ unset HGPLAIN $ unset HGPLAINEXCEPT test alias recursion $ try sincem ('symbol', 'sincem') (func ('symbol', 'descendants') (func ('symbol', 'merge') None)) * set: >, > 6 7 test infinite recursion $ echo 'recurse1 = recurse2' >> .hg/hgrc $ echo 'recurse2 = recurse1' >> .hg/hgrc $ try recurse1 ('symbol', 'recurse1') hg: parse error: infinite expansion of revset alias "recurse1" detected [255] $ echo 'level1($1, $2) = $1 or $2' >> .hg/hgrc $ echo 'level2($1, $2) = level1($2, $1)' >> .hg/hgrc $ try "level2(level1(1, 2), 3)" (func ('symbol', 'level2') (list (func ('symbol', 'level1') (list ('symbol', '1') ('symbol', '2'))) ('symbol', '3'))) (or ('symbol', '3') (or ('symbol', '1') ('symbol', '2'))) * set: , > 3 1 2 test nesting and variable passing $ echo 'nested($1) = nested2($1)' >> .hg/hgrc $ echo 'nested2($1) = nested3($1)' >> .hg/hgrc $ echo 'nested3($1) = max($1)' >> .hg/hgrc $ try 'nested(2:5)' (func ('symbol', 'nested') (range ('symbol', '2') ('symbol', '5'))) (func ('symbol', 'max') (range ('symbol', '2') ('symbol', '5'))) * set: 5 test chained `or` operations are flattened at parsing phase $ echo 'chainedorops($1, $2, $3) = $1|$2|$3' >> .hg/hgrc $ try 'chainedorops(0:1, 1:2, 2:3)' (func ('symbol', 'chainedorops') (list (range ('symbol', '0') ('symbol', '1')) (range ('symbol', '1') ('symbol', '2')) (range ('symbol', '2') ('symbol', '3')))) (or (range ('symbol', '0') ('symbol', '1')) (range ('symbol', '1') ('symbol', '2')) (range ('symbol', '2') ('symbol', '3'))) * set: , , >> 0 1 2 3 test variable isolation, variable placeholders are rewritten as string then parsed and matched again as string. Check they do not leak too far away. $ echo 'injectparamasstring = max("$1")' >> .hg/hgrc $ echo 'callinjection($1) = descendants(injectparamasstring)' >> .hg/hgrc $ try 'callinjection(2:5)' (func ('symbol', 'callinjection') (range ('symbol', '2') ('symbol', '5'))) (func ('symbol', 'descendants') (func ('symbol', 'max') ('string', '$1'))) abort: unknown revision '$1'! [255] $ echo 'injectparamasstring2 = max(_aliasarg("$1"))' >> .hg/hgrc $ echo 'callinjection2($1) = descendants(injectparamasstring2)' >> .hg/hgrc $ try 'callinjection2(2:5)' (func ('symbol', 'callinjection2') (range ('symbol', '2') ('symbol', '5'))) abort: failed to parse the definition of revset alias "injectparamasstring2": unknown identifier: _aliasarg [255] $ hg debugrevspec --debug --config revsetalias.anotherbadone='branch(' "tip" ('symbol', 'tip') warning: failed to parse the definition of revset alias "anotherbadone": at 7: not a prefix: end warning: failed to parse the definition of revset alias "injectparamasstring2": unknown identifier: _aliasarg * set: 9 >>> data = file('.hg/hgrc', 'rb').read() >>> file('.hg/hgrc', 'wb').write(data.replace('_aliasarg', '')) $ try 'tip' ('symbol', 'tip') * set: 9 $ hg debugrevspec --debug --config revsetalias.'bad name'='tip' "tip" ('symbol', 'tip') warning: failed to parse the declaration of revset alias "bad name": at 4: invalid token * set: 9 $ echo 'strictreplacing($1, $10) = $10 or desc("$1")' >> .hg/hgrc $ try 'strictreplacing("foo", tip)' (func ('symbol', 'strictreplacing') (list ('string', 'foo') ('symbol', 'tip'))) (or ('symbol', 'tip') (func ('symbol', 'desc') ('string', '$1'))) * set: , >> 9 $ try 'd(2:5)' (func ('symbol', 'd') (range ('symbol', '2') ('symbol', '5'))) (func ('symbol', 'reverse') (func ('symbol', 'sort') (list (range ('symbol', '2') ('symbol', '5')) ('symbol', 'date')))) * set: 4 5 3 2 $ try 'rs(2 or 3, date)' (func ('symbol', 'rs') (list (or ('symbol', '2') ('symbol', '3')) ('symbol', 'date'))) (func ('symbol', 'reverse') (func ('symbol', 'sort') (list (or ('symbol', '2') ('symbol', '3')) ('symbol', 'date')))) * set: 3 2 $ try 'rs()' (func ('symbol', 'rs') None) hg: parse error: invalid number of arguments: 0 [255] $ try 'rs(2)' (func ('symbol', 'rs') ('symbol', '2')) hg: parse error: invalid number of arguments: 1 [255] $ try 'rs(2, data, 7)' (func ('symbol', 'rs') (list ('symbol', '2') ('symbol', 'data') ('symbol', '7'))) hg: parse error: invalid number of arguments: 3 [255] $ try 'rs4(2 or 3, x, x, date)' (func ('symbol', 'rs4') (list (or ('symbol', '2') ('symbol', '3')) ('symbol', 'x') ('symbol', 'x') ('symbol', 'date'))) (func ('symbol', 'reverse') (func ('symbol', 'sort') (list (or ('symbol', '2') ('symbol', '3')) ('symbol', 'date')))) * set: 3 2 issue4553: check that revset aliases override existing hash prefix $ hg log -qr e 6:e0cc66ef77e8 $ hg log -qr e --config revsetalias.e="all()" 0:2785f51eece5 1:d75937da8da0 2:5ed5505e9f1c 3:8528aa5637f2 4:2326846efdab 5:904fa392b941 6:e0cc66ef77e8 7:013af1973af4 8:d5d0dcbdc4d9 9:24286f4ae135 $ hg log -qr e: --config revsetalias.e="0" 0:2785f51eece5 1:d75937da8da0 2:5ed5505e9f1c 3:8528aa5637f2 4:2326846efdab 5:904fa392b941 6:e0cc66ef77e8 7:013af1973af4 8:d5d0dcbdc4d9 9:24286f4ae135 $ hg log -qr :e --config revsetalias.e="9" 0:2785f51eece5 1:d75937da8da0 2:5ed5505e9f1c 3:8528aa5637f2 4:2326846efdab 5:904fa392b941 6:e0cc66ef77e8 7:013af1973af4 8:d5d0dcbdc4d9 9:24286f4ae135 $ hg log -qr e: 6:e0cc66ef77e8 7:013af1973af4 8:d5d0dcbdc4d9 9:24286f4ae135 $ hg log -qr :e 0:2785f51eece5 1:d75937da8da0 2:5ed5505e9f1c 3:8528aa5637f2 4:2326846efdab 5:904fa392b941 6:e0cc66ef77e8 issue2549 - correct optimizations $ log 'limit(1 or 2 or 3, 2) and not 2' 1 $ log 'max(1 or 2) and not 2' $ log 'min(1 or 2) and not 1' $ log 'last(1 or 2, 1) and not 2' issue4289 - ordering of built-ins $ hg log -M -q -r 3:2 3:8528aa5637f2 2:5ed5505e9f1c test revsets started with 40-chars hash (issue3669) $ ISSUE3669_TIP=`hg tip --template '{node}'` $ hg log -r "${ISSUE3669_TIP}" --template '{rev}\n' 9 $ hg log -r "${ISSUE3669_TIP}^" --template '{rev}\n' 8 test or-ed indirect predicates (issue3775) $ log '6 or 6^1' | sort 5 6 $ log '6^1 or 6' | sort 5 6 $ log '4 or 4~1' | sort 2 4 $ log '4~1 or 4' | sort 2 4 $ log '(0 or 2):(4 or 6) or 0 or 6' | sort 0 1 2 3 4 5 6 $ log '0 or 6 or (0 or 2):(4 or 6)' | sort 0 1 2 3 4 5 6 tests for 'remote()' predicate: #. (csets in remote) (id) (remote) 1. less than local current branch "default" 2. same with local specified "default" 3. more than local specified specified $ hg clone --quiet -U . ../remote3 $ cd ../remote3 $ hg update -q 7 $ echo r > r $ hg ci -Aqm 10 $ log 'remote()' 7 $ log 'remote("a-b-c-")' 2 $ cd ../repo $ log 'remote(".a.b.c.", "../remote3")' tests for concatenation of strings/symbols by "##" $ try "278 ## '5f5' ## 1ee ## 'ce5'" (_concat (_concat (_concat ('symbol', '278') ('string', '5f5')) ('symbol', '1ee')) ('string', 'ce5')) ('string', '2785f51eece5') * set: 0 $ echo 'cat4($1, $2, $3, $4) = $1 ## $2 ## $3 ## $4' >> .hg/hgrc $ try "cat4(278, '5f5', 1ee, 'ce5')" (func ('symbol', 'cat4') (list ('symbol', '278') ('string', '5f5') ('symbol', '1ee') ('string', 'ce5'))) (_concat (_concat (_concat ('symbol', '278') ('string', '5f5')) ('symbol', '1ee')) ('string', 'ce5')) ('string', '2785f51eece5') * set: 0 (check concatenation in alias nesting) $ echo 'cat2($1, $2) = $1 ## $2' >> .hg/hgrc $ echo 'cat2x2($1, $2, $3, $4) = cat2($1 ## $2, $3 ## $4)' >> .hg/hgrc $ log "cat2x2(278, '5f5', 1ee, 'ce5')" 0 (check operator priority) $ echo 'cat2n2($1, $2, $3, $4) = $1 ## $2 or $3 ## $4~2' >> .hg/hgrc $ log "cat2n2(2785f5, 1eece5, 24286f, 4ae135)" 0 4 $ cd .. prepare repository that has "default" branches of multiple roots $ hg init namedbranch $ cd namedbranch $ echo default0 >> a $ hg ci -Aqm0 $ echo default1 >> a $ hg ci -m1 $ hg branch -q stable $ echo stable2 >> a $ hg ci -m2 $ echo stable3 >> a $ hg ci -m3 $ hg update -q null $ echo default4 >> a $ hg ci -Aqm4 $ echo default5 >> a $ hg ci -m5 "null" revision belongs to "default" branch (issue4683) $ log 'branch(null)' 0 1 4 5 "null" revision belongs to "default" branch, but it shouldn't appear in set unless explicitly specified (issue4682) $ log 'children(branch(default))' 1 2 5 $ cd .. test author/desc/keyword in problematic encoding # unicode: cp932: # u30A2 0x83 0x41(= 'A') # u30C2 0x83 0x61(= 'a') $ hg init problematicencoding $ cd problematicencoding $ python > setup.sh < print u''' > echo a > text > hg add text > hg --encoding utf-8 commit -u '\u30A2' -m none > echo b > text > hg --encoding utf-8 commit -u '\u30C2' -m none > echo c > text > hg --encoding utf-8 commit -u none -m '\u30A2' > echo d > text > hg --encoding utf-8 commit -u none -m '\u30C2' > '''.encode('utf-8') > EOF $ sh < setup.sh test in problematic encoding $ python > test.sh < print u''' > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30A2)' > echo ==== > hg --encoding cp932 log --template '{rev}\\n' -r 'author(\u30C2)' > echo ==== > hg --encoding cp932 log --template '{rev}\\n' -r 'desc(\u30A2)' > echo ==== > hg --encoding cp932 log --template '{rev}\\n' -r 'desc(\u30C2)' > echo ==== > hg --encoding cp932 log --template '{rev}\\n' -r 'keyword(\u30A2)' > echo ==== > hg --encoding cp932 log --template '{rev}\\n' -r 'keyword(\u30C2)' > '''.encode('cp932') > EOF $ sh < test.sh 0 ==== 1 ==== 2 ==== 3 ==== 0 2 ==== 1 3 test error message of bad revset $ hg log -r 'foo\\' hg: parse error at 3: syntax error in revset 'foo\\' [255] $ cd .. Test registrar.delayregistrar via revset.extpredicate 'extpredicate' decorator shouldn't register any functions until 'setup()' on it. $ cd repo $ cat < $TESTTMP/custompredicate.py > from mercurial import revset > > revsetpredicate = revset.extpredicate() > > @revsetpredicate('custom1()') > def custom1(repo, subset, x): > return revset.baseset([1]) > @revsetpredicate('custom2()') > def custom2(repo, subset, x): > return revset.baseset([2]) > > def uisetup(ui): > if ui.configbool('custompredicate', 'enabled'): > revsetpredicate.setup() > EOF $ cat < .hg/hgrc > [extensions] > custompredicate = $TESTTMP/custompredicate.py > EOF $ hg debugrevspec "custom1()" hg: parse error: unknown identifier: custom1 [255] $ hg debugrevspec "custom2()" hg: parse error: unknown identifier: custom2 [255] $ hg debugrevspec "custom1() or custom2()" --config custompredicate.enabled=true 1 2 $ cd .. mercurial-3.7.3/tests/test-revlog-ancestry.py0000644000175000017500000000333612676531525020751 0ustar mpmmpm00000000000000import os from mercurial import hg, ui, merge u = ui.ui() repo = hg.repository(u, 'test1', create=1) os.chdir('test1') def commit(text, time): repo.commit(text=text, date="%d 0" % time) def addcommit(name, time): f = open(name, 'w') f.write('%s\n' % name) f.close() repo[None].add([name]) commit(name, time) def update(rev): merge.update(repo, rev, False, True) def merge_(rev): merge.update(repo, rev, True, False) if __name__ == '__main__': addcommit("A", 0) addcommit("B", 1) update(0) addcommit("C", 2) merge_(1) commit("D", 3) update(2) addcommit("E", 4) addcommit("F", 5) update(3) addcommit("G", 6) merge_(5) commit("H", 7) update(5) addcommit("I", 8) # Ancestors print 'Ancestors of 5' for r in repo.changelog.ancestors([5]): print r, print '\nAncestors of 6 and 5' for r in repo.changelog.ancestors([6, 5]): print r, print '\nAncestors of 5 and 4' for r in repo.changelog.ancestors([5, 4]): print r, print '\nAncestors of 7, stop at 6' for r in repo.changelog.ancestors([7], 6): print r, print '\nAncestors of 7, including revs' for r in repo.changelog.ancestors([7], inclusive=True): print r, print '\nAncestors of 7, 5 and 3, including revs' for r in repo.changelog.ancestors([7, 5, 3], inclusive=True): print r, # Descendants print '\n\nDescendants of 5' for r in repo.changelog.descendants([5]): print r, print '\nDescendants of 5 and 3' for r in repo.changelog.descendants([5, 3]): print r, print '\nDescendants of 5 and 4' for r in repo.changelog.descendants([5, 4]): print r, mercurial-3.7.3/tests/test-addremove.t0000644000175000017500000000360312676531525017403 0ustar mpmmpm00000000000000 $ hg init rep $ cd rep $ mkdir dir $ touch foo dir/bar $ hg -v addremove adding dir/bar adding foo $ hg -v commit -m "add 1" committing files: dir/bar foo committing manifest committing changelog committed changeset 0:6f7f953567a2 $ cd dir/ $ touch ../foo_2 bar_2 $ hg -v addremove adding dir/bar_2 adding foo_2 $ hg -v commit -m "add 2" committing files: dir/bar_2 foo_2 committing manifest committing changelog committed changeset 1:e65414bf35c5 $ cd .. $ hg forget foo $ hg -v addremove adding foo $ hg forget foo #if windows $ hg -v addremove nonexistent nonexistent: The system cannot find the file specified [1] #else $ hg -v addremove nonexistent nonexistent: No such file or directory [1] #endif $ cd .. $ hg init subdir $ cd subdir $ mkdir dir $ cd dir $ touch a.py $ hg addremove 'glob:*.py' adding a.py $ hg forget a.py $ hg addremove -I 'glob:*.py' adding a.py $ hg forget a.py $ hg addremove adding dir/a.py $ cd .. $ hg init sim $ cd sim $ echo a > a $ echo a >> a $ echo a >> a $ echo c > c $ hg commit -Ama adding a adding c $ mv a b $ rm c $ echo d > d $ hg addremove -n -s 50 # issue 1696 removing a adding b removing c adding d recording removal of a as rename to b (100% similar) $ hg addremove -s 50 removing a adding b removing c adding d recording removal of a as rename to b (100% similar) $ hg commit -mb $ cp b c $ hg forget b $ hg addremove -s 50 adding b adding c $ rm c #if windows $ hg ci -A -m "c" nonexistent nonexistent: The system cannot find the file specified abort: failed to mark all new/missing files as added/removed [255] #else $ hg ci -A -m "c" nonexistent nonexistent: No such file or directory abort: failed to mark all new/missing files as added/removed [255] #endif $ hg st ! c $ cd .. mercurial-3.7.3/tests/test-simple-update.t0000644000175000017500000000232512676531525020206 0ustar mpmmpm00000000000000 $ hg init test $ cd test $ echo foo>foo $ hg addremove adding foo $ hg commit -m "1" $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions $ hg clone . ../branch updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../branch $ hg co 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo bar>>foo $ hg commit -m "2" $ cd ../test $ hg pull ../branch pulling from ../branch searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions $ hg co 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat foo foo bar $ hg manifest --debug 6f4310b00b9a147241b071a60c28a650827fb03d 644 foo update to rev 0 with a date $ hg upd -d foo 0 abort: you can't specify a revision and a date [255] $ cd .. mercurial-3.7.3/tests/test-contrib-perf.t0000644000175000017500000000773212676531525020036 0ustar mpmmpm00000000000000#require test-repo Set vars: $ CONTRIBDIR="$TESTDIR/../contrib" Prepare repo: $ hg init $ echo this is file a > a $ hg add a $ hg commit -m first $ echo adding to file a >> a $ hg commit -m second $ echo adding more to file a >> a $ hg commit -m third $ hg up -r 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo merge-this >> a $ hg commit -m merge-able created new head $ hg up -r 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved perfstatus $ cat >> $HGRCPATH << EOF > [extensions] > perfstatusext=$CONTRIBDIR/perf.py > [perf] > presleep=0 > stub=on > parentscount=1 > EOF $ hg help perfstatusext perfstatusext extension - helper extension to measure performance list of commands: perfaddremove (no help text available) perfancestors (no help text available) perfancestorset (no help text available) perfannotate (no help text available) perfbranchmap benchmark the update of a branchmap perfcca (no help text available) perfchangeset (no help text available) perfctxfiles (no help text available) perfdiffwd Profile diff of working directory changes perfdirfoldmap (no help text available) perfdirs (no help text available) perfdirstate (no help text available) perfdirstatedirs (no help text available) perfdirstatefoldmap (no help text available) perfdirstatewrite (no help text available) perffncacheencode (no help text available) perffncacheload (no help text available) perffncachewrite (no help text available) perfheads (no help text available) perfindex (no help text available) perfloadmarkers benchmark the time to parse the on-disk markers for a repo perflog (no help text available) perflookup (no help text available) perflrucachedict (no help text available) perfmanifest (no help text available) perfmergecalculate (no help text available) perfmoonwalk benchmark walking the changelog backwards perfnodelookup (no help text available) perfparents (no help text available) perfpathcopies (no help text available) perfrawfiles (no help text available) perfrevlog Benchmark reading a series of revisions from a revlog. perfrevlogrevision Benchmark obtaining a revlog revision. perfrevrange (no help text available) perfrevset benchmark the execution time of a revset perfstartup (no help text available) perfstatus (no help text available) perftags (no help text available) perftemplating (no help text available) perfvolatilesets benchmark the computation of various volatile set perfwalk (no help text available) (use "hg help -v perfstatusext" to show built-in aliases and global options) $ hg perfaddremove $ hg perfancestors $ hg perfancestorset 2 $ hg perfannotate a $ hg perfbranchmap $ hg perfcca $ hg perfchangeset 2 $ hg perfctxfiles 2 $ hg perfdiffwd $ hg perfdirfoldmap $ hg perfdirs $ hg perfdirstate $ hg perfdirstatedirs $ hg perfdirstatefoldmap $ hg perfdirstatewrite $ hg perffncacheencode $ hg perffncacheload $ hg perffncachewrite $ hg perfheads $ hg perfindex $ hg perfloadmarkers $ hg perflog $ hg perflookup 2 $ hg perflrucache $ hg perfmanifest 2 $ hg perfmergecalculate -r 3 $ hg perfmoonwalk $ hg perfnodelookup 2 $ hg perfpathcopies 1 2 $ hg perfrawfiles 2 $ hg perfrevlog .hg/store/data/a.i $ hg perfrevlogrevision -m 0 $ hg perfrevrange $ hg perfrevset 'all()' $ hg perfstartup $ hg perfstatus $ hg perftags $ hg perftemplating $ hg perfvolatilesets $ hg perfwalk $ hg perfparents mercurial-3.7.3/tests/test-transplant.t0000644000175000017500000005672312676531525017636 0ustar mpmmpm00000000000000#require killdaemons $ cat <> $HGRCPATH > [extensions] > transplant= > EOF $ hg init t $ cd t $ hg transplant abort: no source URL, branch revision, or revision list provided [255] $ hg transplant --continue --all abort: --continue is incompatible with --branch, --all and --merge [255] $ hg transplant --all tip abort: --all requires a branch revision [255] $ hg transplant --all --branch default tip abort: --all is incompatible with a revision list [255] $ echo r1 > r1 $ hg ci -Amr1 -d'0 0' adding r1 $ hg co -q null $ hg transplant tip abort: no revision checked out [255] $ hg up -q $ echo r2 > r2 $ hg ci -Amr2 -d'1 0' adding r2 $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo b1 > b1 $ hg ci -Amb1 -d '0 0' adding b1 created new head $ hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg transplant 1 abort: outstanding uncommitted merges [255] $ hg up -qC tip $ echo b0 > b1 $ hg transplant 1 abort: outstanding local changes [255] $ hg up -qC tip $ echo b2 > b2 $ hg ci -Amb2 -d '1 0' adding b2 $ echo b3 > b3 $ hg ci -Amb3 -d '2 0' adding b3 $ hg log --template '{rev} {parents} {desc}\n' 4 b3 3 b2 2 0:17ab29e464c6 b1 1 r2 0 r1 $ hg clone . ../rebase updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg init ../emptydest $ cd ../emptydest $ hg transplant --source=../t > /dev/null $ cd ../rebase $ hg up -C 1 1 files updated, 0 files merged, 3 files removed, 0 files unresolved rebase b onto r1 (this also tests that editor is not invoked if '--edit' is not specified) $ HGEDITOR=cat hg transplant -a -b tip applying 37a1297eb21b 37a1297eb21b transplanted to e234d668f844 applying 722f4667af76 722f4667af76 transplanted to 539f377d78df applying a53251cdf717 a53251cdf717 transplanted to ffd6818a3975 $ hg log --template '{rev} {parents} {desc}\n' 7 b3 6 b2 5 1:d11e3596cc1a b1 4 b3 3 b2 2 0:17ab29e464c6 b1 1 r2 0 r1 test transplanted revset $ hg log -r 'transplanted()' --template '{rev} {parents} {desc}\n' 5 1:d11e3596cc1a b1 6 b2 7 b3 $ hg log -r 'transplanted(head())' --template '{rev} {parents} {desc}\n' 7 b3 $ hg help revsets | grep transplanted "transplanted([set])" Transplanted changesets in set, or all transplanted changesets. test transplanted keyword $ hg log --template '{rev} {transplanted}\n' 7 a53251cdf717679d1907b289f991534be05c997a 6 722f4667af767100cb15b6a79324bf8abbfe1ef4 5 37a1297eb21b3ef5c5d2ffac22121a0988ed9f21 4 3 2 1 0 test destination() revset predicate with a transplant of a transplant; new clone so subsequent rollback isn't affected (this also tests that editor is invoked if '--edit' is specified) $ hg clone -q . ../destination $ cd ../destination $ hg up -Cq 0 $ hg branch -q b4 $ hg ci -qm "b4" $ hg status --rev "7^1" --rev 7 A b3 $ cat > $TESTTMP/checkeditform.sh < env | grep HGEDITFORM > true > EOF $ cat > $TESTTMP/checkeditform-n-cat.sh < env | grep HGEDITFORM > cat \$* > EOF $ HGEDITOR="sh $TESTTMP/checkeditform-n-cat.sh" hg transplant --edit 7 applying ffd6818a3975 HGEDITFORM=transplant.normal b3 HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'b4' HG: added b3 ffd6818a3975 transplanted to 502236fa76bb $ hg log -r 'destination()' changeset: 5:e234d668f844 parent: 1:d11e3596cc1a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b1 changeset: 6:539f377d78df user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: b2 changeset: 7:ffd6818a3975 user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: b3 changeset: 9:502236fa76bb branch: b4 tag: tip user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: b3 $ hg log -r 'destination(a53251cdf717)' changeset: 7:ffd6818a3975 user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: b3 changeset: 9:502236fa76bb branch: b4 tag: tip user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: b3 test subset parameter in reverse order $ hg log -r 'reverse(all()) and destination(a53251cdf717)' changeset: 9:502236fa76bb branch: b4 tag: tip user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: b3 changeset: 7:ffd6818a3975 user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: b3 back to the original dir $ cd ../rebase rollback the transplant $ hg rollback repository tip rolled back to revision 4 (undo transplant) working directory now based on revision 1 $ hg tip -q 4:a53251cdf717 $ hg parents -q 1:d11e3596cc1a $ hg status ? b1 ? b2 ? b3 $ hg clone ../t ../prune updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../prune $ hg up -C 1 1 files updated, 0 files merged, 3 files removed, 0 files unresolved rebase b onto r1, skipping b2 $ hg transplant -a -b tip -p 3 applying 37a1297eb21b 37a1297eb21b transplanted to e234d668f844 applying a53251cdf717 a53251cdf717 transplanted to 7275fda4d04f $ hg log --template '{rev} {parents} {desc}\n' 6 b3 5 1:d11e3596cc1a b1 4 b3 3 b2 2 0:17ab29e464c6 b1 1 r2 0 r1 test same-parent transplant with --log $ hg clone -r 1 ../t ../sameparent adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../sameparent $ hg transplant --log -s ../prune 5 searching for changes applying e234d668f844 e234d668f844 transplanted to e07aea8ecf9c $ hg log --template '{rev} {parents} {desc}\n' 2 b1 (transplanted from e234d668f844e1b1a765f01db83a32c0c7bfa170) 1 r2 0 r1 remote transplant, and also test that transplant doesn't break with format-breaking diffopts $ hg clone -r 1 ../t ../remote adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../remote $ hg --config diff.noprefix=True transplant --log -s ../t 2 4 searching for changes applying 37a1297eb21b 37a1297eb21b transplanted to c19cf0ccb069 applying a53251cdf717 a53251cdf717 transplanted to f7fe5bf98525 $ hg log --template '{rev} {parents} {desc}\n' 3 b3 (transplanted from a53251cdf717679d1907b289f991534be05c997a) 2 b1 (transplanted from 37a1297eb21b3ef5c5d2ffac22121a0988ed9f21) 1 r2 0 r1 skip previous transplants $ hg transplant -s ../t -a -b 4 searching for changes applying 722f4667af76 722f4667af76 transplanted to 47156cd86c0b $ hg log --template '{rev} {parents} {desc}\n' 4 b2 3 b3 (transplanted from a53251cdf717679d1907b289f991534be05c997a) 2 b1 (transplanted from 37a1297eb21b3ef5c5d2ffac22121a0988ed9f21) 1 r2 0 r1 skip local changes transplanted to the source $ echo b4 > b4 $ hg ci -Amb4 -d '3 0' adding b4 $ hg clone ../t ../pullback updating to branch default 4 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../pullback $ hg transplant -s ../remote -a -b tip searching for changes applying 4333daefcb15 4333daefcb15 transplanted to 5f42c04e07cc remote transplant with pull $ hg -R ../t serve -p $HGPORT -d --pid-file=../t.pid $ cat ../t.pid >> $DAEMON_PIDS $ hg clone -r 0 ../t ../rp adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../rp $ hg transplant -s http://localhost:$HGPORT/ 37a1297eb21b a53251cdf717 searching for changes searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files applying a53251cdf717 a53251cdf717 transplanted to 8d9279348abb $ hg log --template '{rev} {parents} {desc}\n' 2 b3 1 b1 0 r1 remote transplant without pull (It was using "2" and "4" (as the previous transplant used to) which referenced revision different from one run to another) $ hg pull -q http://localhost:$HGPORT/ $ hg transplant -s http://localhost:$HGPORT/ 8d9279348abb 722f4667af76 skipping already applied revision 2:8d9279348abb applying 722f4667af76 722f4667af76 transplanted to 76e321915884 transplant --continue $ hg init ../tc $ cd ../tc $ cat < foo > foo > bar > baz > EOF $ echo toremove > toremove $ echo baz > baz $ hg ci -Amfoo adding baz adding foo adding toremove $ cat < foo > foo2 > bar2 > baz2 > EOF $ rm toremove $ echo added > added $ hg ci -Amfoo2 adding added removing toremove $ echo bar > bar $ cat > baz < before baz > baz > after baz > EOF $ hg ci -Ambar adding bar $ echo bar2 >> bar $ hg ci -mbar2 $ hg up 0 3 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo foobar > foo $ hg ci -mfoobar created new head $ hg transplant 1:3 applying 46ae92138f3c patching file foo Hunk #1 FAILED at 0 1 out of 1 hunks FAILED -- saving rejects to file foo.rej patch failed to apply abort: fix up the working directory and run hg transplant --continue [255] transplant -c shouldn't use an old changeset $ hg up -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm added $ hg transplant --continue abort: no transplant to continue [255] $ hg transplant 1 applying 46ae92138f3c patching file foo Hunk #1 FAILED at 0 1 out of 1 hunks FAILED -- saving rejects to file foo.rej patch failed to apply abort: fix up the working directory and run hg transplant --continue [255] $ cp .hg/transplant/journal .hg/transplant/journal.orig $ cat .hg/transplant/journal # User test # Date 0 0 # Node ID 46ae92138f3ce0249f6789650403286ead052b6d # Parent e8643552fde58f57515e19c4b373a57c96e62af3 foo2 $ grep -v 'Date' .hg/transplant/journal.orig > .hg/transplant/journal $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg transplant --continue -e abort: filter corrupted changeset (no user or date) [255] $ cp .hg/transplant/journal.orig .hg/transplant/journal $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg transplant --continue -e HGEDITFORM=transplant.normal 46ae92138f3c transplanted as 9159dada197d $ hg transplant 1:3 skipping already applied revision 1:46ae92138f3c applying 9d6d6b5a8275 9d6d6b5a8275 transplanted to 2d17a10c922f applying 1dab759070cf 1dab759070cf transplanted to e06a69927eb0 $ hg locate added bar baz foo test multiple revisions and --continue $ hg up -qC 0 $ echo bazbaz > baz $ hg ci -Am anotherbaz baz created new head $ hg transplant 1:3 applying 46ae92138f3c 46ae92138f3c transplanted to 1024233ea0ba applying 9d6d6b5a8275 patching file baz Hunk #1 FAILED at 0 1 out of 1 hunks FAILED -- saving rejects to file baz.rej patch failed to apply abort: fix up the working directory and run hg transplant --continue [255] $ hg transplant 1:3 abort: transplant in progress (use 'hg transplant --continue' or 'hg update' to abort) [255] $ echo fixed > baz $ hg transplant --continue 9d6d6b5a8275 transplanted as d80c49962290 applying 1dab759070cf 1dab759070cf transplanted to aa0ffe6bd5ae $ cd .. Issue1111: Test transplant --merge $ hg init t1111 $ cd t1111 $ echo a > a $ hg ci -Am adda adding a $ echo b >> a $ hg ci -m appendb $ echo c >> a $ hg ci -m appendc $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo d >> a $ hg ci -m appendd created new head transplant $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg transplant -m 1 -e applying 42dc4432fd35 HGEDITFORM=transplant.merge 1:42dc4432fd35 merged at a9f4acbac129 $ hg update -q -C 2 $ cat > a < x > y > z > EOF $ hg commit -m replace $ hg update -q -C 4 $ hg transplant -m 5 applying 600a3cdcb41d patching file a Hunk #1 FAILED at 0 1 out of 1 hunks FAILED -- saving rejects to file a.rej patch failed to apply abort: fix up the working directory and run hg transplant --continue [255] $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg transplant --continue -e HGEDITFORM=transplant.merge 600a3cdcb41d transplanted as a3f88be652e0 $ cd .. test transplant into empty repository $ hg init empty $ cd empty $ hg transplant -s ../t -b tip -a adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 4 files test "--merge" causing pull from source repository on local host $ hg --config extensions.mq= -q strip 2 $ hg transplant -s ../t --merge tip searching for changes searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files applying a53251cdf717 4:a53251cdf717 merged at 4831f4dc831a test interactive transplant $ hg --config extensions.strip= -q strip 0 $ hg -R ../t log -G --template "{rev}:{node|short}" @ 4:a53251cdf717 | o 3:722f4667af76 | o 2:37a1297eb21b | | o 1:d11e3596cc1a |/ o 0:17ab29e464c6 $ hg transplant -q --config ui.interactive=true -s ../t < ? > x > q > EOF 0:17ab29e464c6 apply changeset? [ynmpcq?]: ? y: yes, transplant this changeset n: no, skip this changeset m: merge at this changeset p: show patch c: commit selected changesets q: quit and cancel transplant ?: ? (show this help) apply changeset? [ynmpcq?]: x unrecognized response apply changeset? [ynmpcq?]: q $ hg transplant -q --config ui.interactive=true -s ../t < p > y > n > n > m > c > EOF 0:17ab29e464c6 apply changeset? [ynmpcq?]: p --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/r1 Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +r1 apply changeset? [ynmpcq?]: y 1:d11e3596cc1a apply changeset? [ynmpcq?]: n 2:37a1297eb21b apply changeset? [ynmpcq?]: n 3:722f4667af76 apply changeset? [ynmpcq?]: m 4:a53251cdf717 apply changeset? [ynmpcq?]: c $ hg log -G --template "{node|short}" @ 88be5dde5260 |\ | o 722f4667af76 | | | o 37a1297eb21b |/ o 17ab29e464c6 $ hg transplant -q --config ui.interactive=true -s ../t < x > ? > y > q > EOF 1:d11e3596cc1a apply changeset? [ynmpcq?]: x unrecognized response apply changeset? [ynmpcq?]: ? y: yes, transplant this changeset n: no, skip this changeset m: merge at this changeset p: show patch c: commit selected changesets q: quit and cancel transplant ?: ? (show this help) apply changeset? [ynmpcq?]: y 4:a53251cdf717 apply changeset? [ynmpcq?]: q $ hg heads --template "{node|short}\n" 88be5dde5260 $ cd .. #if unix-permissions system-sh test filter $ hg init filter $ cd filter $ cat <<'EOF' >test-filter > #!/bin/sh > sed 's/r1/r2/' $1 > $1.new > mv $1.new $1 > EOF $ chmod +x test-filter $ hg transplant -s ../t -b tip -a --filter ./test-filter filtering * (glob) applying 17ab29e464c6 17ab29e464c6 transplanted to e9ffc54ea104 filtering * (glob) applying 37a1297eb21b 37a1297eb21b transplanted to 348b36d0b6a5 filtering * (glob) applying 722f4667af76 722f4667af76 transplanted to 0aa6979afb95 filtering * (glob) applying a53251cdf717 a53251cdf717 transplanted to 14f8512272b5 $ hg log --template '{rev} {parents} {desc}\n' 3 b3 2 b2 1 b1 0 r2 $ cd .. test filter with failed patch $ cd filter $ hg up 0 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ echo foo > b1 $ hg ci -Am foo adding b1 adding test-filter created new head $ hg transplant 1 --filter ./test-filter filtering * (glob) applying 348b36d0b6a5 file b1 already exists 1 out of 1 hunks FAILED -- saving rejects to file b1.rej patch failed to apply abort: fix up the working directory and run hg transplant --continue [255] $ cd .. test environment passed to filter $ hg init filter-environment $ cd filter-environment $ cat <<'EOF' >test-filter-environment > #!/bin/sh > echo "Transplant by $HGUSER" >> $1 > echo "Transplant from rev $HGREVISION" >> $1 > EOF $ chmod +x test-filter-environment $ hg transplant -s ../t --filter ./test-filter-environment 0 filtering * (glob) applying 17ab29e464c6 17ab29e464c6 transplanted to 5190e68026a0 $ hg log --template '{rev} {parents} {desc}\n' 0 r1 Transplant by test Transplant from rev 17ab29e464c6ca53e329470efe2a9918ac617a6f $ cd .. test transplant with filter handles invalid changelog $ hg init filter-invalid-log $ cd filter-invalid-log $ cat <<'EOF' >test-filter-invalid-log > #!/bin/sh > echo "" > $1 > EOF $ chmod +x test-filter-invalid-log $ hg transplant -s ../t --filter ./test-filter-invalid-log 0 filtering * (glob) abort: filter corrupted changeset (no user or date) [255] $ cd .. #endif test with a win32ext like setup (differing EOLs) $ hg init twin1 $ cd twin1 $ echo a > a $ echo b > b $ echo b >> b $ hg ci -Am t adding a adding b $ echo a > b $ echo b >> b $ hg ci -m changeb $ cd .. $ hg init twin2 $ cd twin2 $ echo '[patch]' >> .hg/hgrc $ echo 'eol = crlf' >> .hg/hgrc $ $PYTHON -c "file('b', 'wb').write('b\r\nb\r\n')" $ hg ci -Am addb adding b $ hg transplant -s ../twin1 tip searching for changes warning: repository is unrelated applying 2e849d776c17 2e849d776c17 transplanted to 8e65bebc063e $ cat b a\r (esc) b\r (esc) $ cd .. test transplant with merge changeset is skipped $ hg init merge1a $ cd merge1a $ echo a > a $ hg ci -Am a adding a $ hg branch b marked working directory as branch b (branches are permanent and global, did you want a bookmark?) $ hg ci -m branchb $ echo b > b $ hg ci -Am b adding b $ hg update default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m mergeb $ cd .. $ hg init merge1b $ cd merge1b $ hg transplant -s ../merge1a tip $ cd .. test transplant with merge changeset accepts --parent $ hg init merge2a $ cd merge2a $ echo a > a $ hg ci -Am a adding a $ hg branch b marked working directory as branch b (branches are permanent and global, did you want a bookmark?) $ hg ci -m branchb $ echo b > b $ hg ci -Am b adding b $ hg update default 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge b 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m mergeb $ cd .. $ hg init merge2b $ cd merge2b $ hg transplant -s ../merge2a --parent tip tip abort: be9f9b39483f is not a parent of be9f9b39483f [255] $ hg transplant -s ../merge2a --parent 0 tip applying be9f9b39483f be9f9b39483f transplanted to 9959e51f94d1 $ cd .. test transplanting a patch turning into a no-op $ hg init binarysource $ cd binarysource $ echo a > a $ hg ci -Am adda a >>> file('b', 'wb').write('\0b1') $ hg ci -Am addb b >>> file('b', 'wb').write('\0b2') $ hg ci -m changeb b $ cd .. $ hg clone -r0 binarysource binarydest adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd binarydest $ cp ../binarysource/b b $ hg ci -Am addb2 b $ hg transplant -s ../binarysource 2 searching for changes applying 7a7d57e15850 skipping emptied changeset 7a7d57e15850 Test empty result in --continue $ hg transplant -s ../binarysource 1 searching for changes applying 645035761929 file b already exists 1 out of 1 hunks FAILED -- saving rejects to file b.rej patch failed to apply abort: fix up the working directory and run hg transplant --continue [255] $ hg status ? b.rej $ hg transplant --continue 645035761929 skipped due to empty diff $ cd .. Explicitly kill daemons to let the test exit on Windows $ killdaemons.py Test that patch-ed files are treated as "modified", when transplant is aborted by failure of patching, even if none of mode, size and timestamp of them isn't changed on the filesystem (see also issue4583) $ cd t $ cat > $TESTTMP/abort.py < # emulate that patch.patch() is aborted at patching on "abort" file > from mercurial import extensions, patch as patchmod > def patch(orig, ui, repo, patchname, > strip=1, prefix='', files=None, > eolmode='strict', similarity=0): > if files is None: > files = set() > r = orig(ui, repo, patchname, > strip=strip, prefix=prefix, files=files, > eolmode=eolmode, similarity=similarity) > if 'abort' in files: > raise patchmod.PatchError('intentional error while patching') > return r > def extsetup(ui): > extensions.wrapfunction(patchmod, 'patch', patch) > EOF $ echo X1 > r1 $ hg diff --nodates r1 diff -r a53251cdf717 r1 --- a/r1 +++ b/r1 @@ -1,1 +1,1 @@ -r1 +X1 $ hg commit -m "X1 as r1" $ echo 'marking to abort patching' > abort $ hg add abort $ echo Y1 > r1 $ hg diff --nodates r1 diff -r 22c515968f13 r1 --- a/r1 +++ b/r1 @@ -1,1 +1,1 @@ -X1 +Y1 $ hg commit -m "Y1 as r1" $ hg update -q -C d11e3596cc1a $ cat r1 r1 $ cat >> .hg/hgrc < [fakedirstatewritetime] > # emulate invoking dirstate.write() via repo.status() or markcommitted() > # at 2000-01-01 00:00 > fakenow = 200001010000 > > # emulate invoking patch.internalpatch() at 2000-01-01 00:00 > [fakepatchtime] > fakenow = 200001010000 > > [extensions] > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py > fakepatchtime = $TESTDIR/fakepatchtime.py > abort = $TESTTMP/abort.py > EOF $ hg transplant "22c515968f13::" applying 22c515968f13 22c515968f13 transplanted to * (glob) applying e38700ba9dd3 intentional error while patching abort: fix up the working directory and run hg transplant --continue [255] $ cat >> .hg/hgrc < [hooks] > fakedirstatewritetime = ! > fakepatchtime = ! > [extensions] > abort = ! > EOF $ cat r1 Y1 $ hg debugstate | grep ' r1$' n 644 3 unset r1 $ hg status -A r1 M r1 Test that rollback by unexpected failure after transplanting the first revision restores dirstate correctly. $ hg rollback -q $ rm -f abort $ hg update -q -C d11e3596cc1a $ hg parents -T "{node|short}\n" d11e3596cc1a $ hg status -A C r1 C r2 $ cat >> .hg/hgrc < [hooks] > # emulate failure at transplanting the 2nd revision > pretxncommit.abort = test ! -f abort > EOF $ hg transplant "22c515968f13::" applying 22c515968f13 22c515968f13 transplanted to * (glob) applying e38700ba9dd3 transaction abort! rollback completed abort: pretxncommit.abort hook exited with status 1 [255] $ cat >> .hg/hgrc < [hooks] > pretxncommit.abort = ! > EOF $ hg parents -T "{node|short}\n" d11e3596cc1a $ hg status -A M r1 ? abort C r2 $ cd .. mercurial-3.7.3/tests/test-impexp-branch.t0000644000175000017500000000366112676531525020176 0ustar mpmmpm00000000000000 $ echo '[extensions]' >> $HGRCPATH $ echo 'strip =' >> $HGRCPATH $ cat >findbranch.py < import re, sys > > head_re = re.compile('^#(?:(?:\\s+([A-Za-z][A-Za-z0-9_]*)(?:\\s.*)?)|(?:\\s*))$') > > for line in sys.stdin: > hmatch = head_re.match(line) > if not hmatch: > sys.exit(1) > if hmatch.group(1) == 'Branch': > sys.exit(0) > sys.exit(1) > EOF $ hg init a $ cd a $ echo "Rev 1" >rev $ hg add rev $ hg commit -m "No branch." $ hg branch abranch marked working directory as branch abranch (branches are permanent and global, did you want a bookmark?) $ echo "Rev 2" >rev $ hg commit -m "With branch." $ hg export 0 > ../r0.patch $ hg export 1 > ../r1.patch $ cd .. $ if python findbranch.py < r0.patch; then > echo "Export of default branch revision has Branch header" 1>&2 > exit 1 > fi $ if python findbranch.py < r1.patch; then > : # Do nothing > else > echo "Export of branch revision is missing Branch header" 1>&2 > exit 1 > fi Make sure import still works with branch information in patches. $ hg init b $ cd b $ hg import ../r0.patch applying ../r0.patch $ hg import ../r1.patch applying ../r1.patch $ cd .. $ hg init c $ cd c $ hg import --exact --no-commit ../r0.patch applying ../r0.patch warning: can't check exact import with --no-commit $ hg st A rev $ hg revert -a forgetting rev $ rm rev $ hg import --exact ../r0.patch applying ../r0.patch $ hg import --exact ../r1.patch applying ../r1.patch Test --exact and patch header separators (issue3356) $ hg strip --no-backup . 1 files updated, 0 files merged, 0 files removed, 0 files unresolved >>> import re >>> p = file('../r1.patch', 'rb').read() >>> p = re.sub(r'Parent\s+', 'Parent ', p) >>> file('../r1-ws.patch', 'wb').write(p) $ hg import --exact ../r1-ws.patch applying ../r1-ws.patch $ cd .. mercurial-3.7.3/tests/test-merge-force.t0000644000175000017500000007156612676531525017645 0ustar mpmmpm00000000000000Set up a base, local, and remote changeset, as well as the working copy state. Files names are of the form base_remote_local_working-copy. For example, content1_content2_content1_content2-untracked represents a file that was modified in the remote changeset, left untouched in the local changeset, and then modified in the working copy to match the remote content, then finally forgotten. $ hg init repo $ cd repo Create base changeset $ python $TESTDIR/generate-working-copy-states.py state 3 1 $ hg addremove -q --similarity 0 $ hg commit -qm 'base' Create remote changeset $ python $TESTDIR/generate-working-copy-states.py state 3 2 $ hg addremove -q --similarity 0 $ hg commit -qm 'remote' Create local changeset $ hg update -q 0 $ python $TESTDIR/generate-working-copy-states.py state 3 3 $ hg addremove -q --similarity 0 $ hg commit -qm 'local' Set up working directory $ python $TESTDIR/generate-working-copy-states.py state 3 wc $ hg addremove -q --similarity 0 $ hg forget *_*_*_*-untracked $ rm *_*_*_missing-* $ hg status -A M content1_content1_content1_content4-tracked M content1_content1_content3_content1-tracked M content1_content1_content3_content4-tracked M content1_content2_content1_content2-tracked M content1_content2_content1_content4-tracked M content1_content2_content2_content1-tracked M content1_content2_content2_content4-tracked M content1_content2_content3_content1-tracked M content1_content2_content3_content2-tracked M content1_content2_content3_content4-tracked M content1_missing_content1_content4-tracked M content1_missing_content3_content1-tracked M content1_missing_content3_content4-tracked M missing_content2_content2_content4-tracked M missing_content2_content3_content2-tracked M missing_content2_content3_content4-tracked M missing_missing_content3_content4-tracked A content1_content1_missing_content1-tracked A content1_content1_missing_content4-tracked A content1_content2_missing_content1-tracked A content1_content2_missing_content2-tracked A content1_content2_missing_content4-tracked A content1_missing_missing_content1-tracked A content1_missing_missing_content4-tracked A missing_content2_missing_content2-tracked A missing_content2_missing_content4-tracked A missing_missing_missing_content4-tracked R content1_content1_content1_content1-untracked R content1_content1_content1_content4-untracked R content1_content1_content1_missing-untracked R content1_content1_content3_content1-untracked R content1_content1_content3_content3-untracked R content1_content1_content3_content4-untracked R content1_content1_content3_missing-untracked R content1_content2_content1_content1-untracked R content1_content2_content1_content2-untracked R content1_content2_content1_content4-untracked R content1_content2_content1_missing-untracked R content1_content2_content2_content1-untracked R content1_content2_content2_content2-untracked R content1_content2_content2_content4-untracked R content1_content2_content2_missing-untracked R content1_content2_content3_content1-untracked R content1_content2_content3_content2-untracked R content1_content2_content3_content3-untracked R content1_content2_content3_content4-untracked R content1_content2_content3_missing-untracked R content1_missing_content1_content1-untracked R content1_missing_content1_content4-untracked R content1_missing_content1_missing-untracked R content1_missing_content3_content1-untracked R content1_missing_content3_content3-untracked R content1_missing_content3_content4-untracked R content1_missing_content3_missing-untracked R missing_content2_content2_content2-untracked R missing_content2_content2_content4-untracked R missing_content2_content2_missing-untracked R missing_content2_content3_content2-untracked R missing_content2_content3_content3-untracked R missing_content2_content3_content4-untracked R missing_content2_content3_missing-untracked R missing_missing_content3_content3-untracked R missing_missing_content3_content4-untracked R missing_missing_content3_missing-untracked ! content1_content1_content1_missing-tracked ! content1_content1_content3_missing-tracked ! content1_content1_missing_missing-tracked ! content1_content2_content1_missing-tracked ! content1_content2_content2_missing-tracked ! content1_content2_content3_missing-tracked ! content1_content2_missing_missing-tracked ! content1_missing_content1_missing-tracked ! content1_missing_content3_missing-tracked ! content1_missing_missing_missing-tracked ! missing_content2_content2_missing-tracked ! missing_content2_content3_missing-tracked ! missing_content2_missing_missing-tracked ! missing_missing_content3_missing-tracked ! missing_missing_missing_missing-tracked ? content1_content1_missing_content1-untracked ? content1_content1_missing_content4-untracked ? content1_content2_missing_content1-untracked ? content1_content2_missing_content2-untracked ? content1_content2_missing_content4-untracked ? content1_missing_missing_content1-untracked ? content1_missing_missing_content4-untracked ? missing_content2_missing_content2-untracked ? missing_content2_missing_content4-untracked ? missing_missing_missing_content4-untracked C content1_content1_content1_content1-tracked C content1_content1_content3_content3-tracked C content1_content2_content1_content1-tracked C content1_content2_content2_content2-tracked C content1_content2_content3_content3-tracked C content1_missing_content1_content1-tracked C content1_missing_content3_content3-tracked C missing_content2_content2_content2-tracked C missing_content2_content3_content3-tracked C missing_missing_content3_content3-tracked Merge with remote # Notes: # - local and remote changed content1_content2_*_content2-untracked # in the same way, so it could potentially be left alone $ hg merge -f --tool internal:merge3 'desc("remote")' local changed content1_missing_content1_content4-tracked which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u local changed content1_missing_content3_content3-tracked which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u local changed content1_missing_content3_content4-tracked which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u local changed content1_missing_missing_content4-tracked which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u remote changed content1_content2_content1_content1-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content1_content2-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content1_content4-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content1_missing-tracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content1_missing-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content2_content1-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content2_content2-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content2_content4-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content2_missing-tracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content2_missing-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content3_content1-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content3_content2-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content3_content3-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content3_content4-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content3_missing-tracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content3_missing-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_missing_content1-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_missing_content2-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_missing_content4-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_missing_missing-tracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_missing_missing-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u merging content1_content2_content1_content4-tracked merging content1_content2_content2_content1-tracked merging content1_content2_content2_content4-tracked merging content1_content2_content3_content1-tracked merging content1_content2_content3_content3-tracked merging content1_content2_content3_content4-tracked merging content1_content2_missing_content1-tracked merging content1_content2_missing_content4-tracked merging missing_content2_content2_content4-tracked merging missing_content2_content3_content3-tracked merging missing_content2_content3_content4-tracked merging missing_content2_missing_content4-tracked merging missing_content2_missing_content4-untracked warning: conflicts while merging content1_content2_content1_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging content1_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging content1_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging content1_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging content1_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging missing_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging missing_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging missing_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging missing_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging missing_content2_missing_content4-untracked! (edit, then use 'hg resolve --mark') 18 files updated, 3 files merged, 8 files removed, 35 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] Check which files need to be resolved (should correspond to the output above). This should be the files for which the base (1st filename segment), the remote (2nd segment) and the working copy (4th segment) are all different. Interestingly, one untracked file got merged and added, which corresponds to the odd 'if force and branchmerge and different' case in manifestmerge(). $ hg resolve -l U content1_content2_content1_content1-untracked U content1_content2_content1_content2-untracked U content1_content2_content1_content4-tracked U content1_content2_content1_content4-untracked U content1_content2_content1_missing-tracked U content1_content2_content1_missing-untracked R content1_content2_content2_content1-tracked U content1_content2_content2_content1-untracked U content1_content2_content2_content2-untracked U content1_content2_content2_content4-tracked U content1_content2_content2_content4-untracked U content1_content2_content2_missing-tracked U content1_content2_content2_missing-untracked R content1_content2_content3_content1-tracked U content1_content2_content3_content1-untracked U content1_content2_content3_content2-untracked U content1_content2_content3_content3-tracked U content1_content2_content3_content3-untracked U content1_content2_content3_content4-tracked U content1_content2_content3_content4-untracked U content1_content2_content3_missing-tracked U content1_content2_content3_missing-untracked R content1_content2_missing_content1-tracked U content1_content2_missing_content1-untracked U content1_content2_missing_content2-untracked U content1_content2_missing_content4-tracked U content1_content2_missing_content4-untracked U content1_content2_missing_missing-tracked U content1_content2_missing_missing-untracked U content1_missing_content1_content4-tracked U content1_missing_content3_content3-tracked U content1_missing_content3_content4-tracked U content1_missing_missing_content4-tracked U missing_content2_content2_content4-tracked U missing_content2_content3_content3-tracked U missing_content2_content3_content4-tracked U missing_content2_missing_content4-tracked U missing_content2_missing_content4-untracked Check status and file content Some files get added (e.g. content1_content2_content1_content1-untracked) It is not intuitive that content1_content2_content1_content4-tracked gets merged while content1_content2_content1_content4-untracked gets overwritten. Any *_content2_*-untracked triggers the modified/deleted prompt and then gets overwritten. A lot of untracked files become tracked, for example content1_content2_content2_content2-untracked. *_missing_missing_missing-tracked is reported as removed ('R'), which doesn't make sense since the file did not exist in the parent, but on the other hand, merged-in additions are reported as modifications, which is almost as strange. missing_missing_content3_missing-tracked becomes removed ('R'), even though the remote side did not touch the file $ checkstatus() { > for f in `python $TESTDIR/generate-working-copy-states.py filelist 3` > do > echo > hg status -A $f > if test -f $f > then > cat $f > else > echo '' > fi > done > } $ checkstatus 2>&1 | tee $TESTTMP/status1 C content1_content1_content1_content1-tracked content1 R content1_content1_content1_content1-untracked content1 M content1_content1_content1_content4-tracked content4 R content1_content1_content1_content4-untracked content4 ! content1_content1_content1_missing-tracked R content1_content1_content1_missing-untracked M content1_content1_content3_content1-tracked content1 R content1_content1_content3_content1-untracked content1 C content1_content1_content3_content3-tracked content3 R content1_content1_content3_content3-untracked content3 M content1_content1_content3_content4-tracked content4 R content1_content1_content3_content4-untracked content4 ! content1_content1_content3_missing-tracked R content1_content1_content3_missing-untracked A content1_content1_missing_content1-tracked content1 ? content1_content1_missing_content1-untracked content1 A content1_content1_missing_content4-tracked content4 ? content1_content1_missing_content4-untracked content4 ! content1_content1_missing_missing-tracked content1_content1_missing_missing-untracked: * (glob) M content1_content2_content1_content1-tracked content2 M content1_content2_content1_content1-untracked content2 M content1_content2_content1_content2-tracked content2 M content1_content2_content1_content2-untracked content2 M content1_content2_content1_content4-tracked <<<<<<< local: 0447570f1af6 - test: local content4 ||||||| base content1 ======= content2 >>>>>>> other: 85100b8c675b - test: remote M content1_content2_content1_content4-untracked content2 M content1_content2_content1_missing-tracked content2 M content1_content2_content1_missing-untracked content2 M content1_content2_content2_content1-tracked content2 M content1_content2_content2_content1-untracked content2 C content1_content2_content2_content2-tracked content2 M content1_content2_content2_content2-untracked content2 M content1_content2_content2_content4-tracked <<<<<<< local: 0447570f1af6 - test: local content4 ||||||| base content1 ======= content2 >>>>>>> other: 85100b8c675b - test: remote M content1_content2_content2_content4-untracked content2 M content1_content2_content2_missing-tracked content2 M content1_content2_content2_missing-untracked content2 M content1_content2_content3_content1-tracked content2 M content1_content2_content3_content1-untracked content2 M content1_content2_content3_content2-tracked content2 M content1_content2_content3_content2-untracked content2 M content1_content2_content3_content3-tracked <<<<<<< local: 0447570f1af6 - test: local content3 ||||||| base content1 ======= content2 >>>>>>> other: 85100b8c675b - test: remote M content1_content2_content3_content3-untracked content2 M content1_content2_content3_content4-tracked <<<<<<< local: 0447570f1af6 - test: local content4 ||||||| base content1 ======= content2 >>>>>>> other: 85100b8c675b - test: remote M content1_content2_content3_content4-untracked content2 M content1_content2_content3_missing-tracked content2 M content1_content2_content3_missing-untracked content2 M content1_content2_missing_content1-tracked content2 M content1_content2_missing_content1-untracked content2 M content1_content2_missing_content2-tracked content2 M content1_content2_missing_content2-untracked content2 M content1_content2_missing_content4-tracked <<<<<<< local: 0447570f1af6 - test: local content4 ||||||| base content1 ======= content2 >>>>>>> other: 85100b8c675b - test: remote M content1_content2_missing_content4-untracked content2 M content1_content2_missing_missing-tracked content2 M content1_content2_missing_missing-untracked content2 R content1_missing_content1_content1-tracked R content1_missing_content1_content1-untracked content1 M content1_missing_content1_content4-tracked content4 R content1_missing_content1_content4-untracked content4 R content1_missing_content1_missing-tracked R content1_missing_content1_missing-untracked R content1_missing_content3_content1-tracked R content1_missing_content3_content1-untracked content1 C content1_missing_content3_content3-tracked content3 R content1_missing_content3_content3-untracked content3 M content1_missing_content3_content4-tracked content4 R content1_missing_content3_content4-untracked content4 R content1_missing_content3_missing-tracked R content1_missing_content3_missing-untracked R content1_missing_missing_content1-tracked ? content1_missing_missing_content1-untracked content1 A content1_missing_missing_content4-tracked content4 ? content1_missing_missing_content4-untracked content4 R content1_missing_missing_missing-tracked content1_missing_missing_missing-untracked: * (glob) C missing_content2_content2_content2-tracked content2 M missing_content2_content2_content2-untracked content2 M missing_content2_content2_content4-tracked <<<<<<< local: 0447570f1af6 - test: local content4 ||||||| base ======= content2 >>>>>>> other: 85100b8c675b - test: remote M missing_content2_content2_content4-untracked content2 M missing_content2_content2_missing-tracked content2 M missing_content2_content2_missing-untracked content2 M missing_content2_content3_content2-tracked content2 M missing_content2_content3_content2-untracked content2 M missing_content2_content3_content3-tracked <<<<<<< local: 0447570f1af6 - test: local content3 ||||||| base ======= content2 >>>>>>> other: 85100b8c675b - test: remote M missing_content2_content3_content3-untracked content2 M missing_content2_content3_content4-tracked <<<<<<< local: 0447570f1af6 - test: local content4 ||||||| base ======= content2 >>>>>>> other: 85100b8c675b - test: remote M missing_content2_content3_content4-untracked content2 M missing_content2_content3_missing-tracked content2 M missing_content2_content3_missing-untracked content2 M missing_content2_missing_content2-tracked content2 M missing_content2_missing_content2-untracked content2 M missing_content2_missing_content4-tracked <<<<<<< local: 0447570f1af6 - test: local content4 ||||||| base ======= content2 >>>>>>> other: 85100b8c675b - test: remote M missing_content2_missing_content4-untracked <<<<<<< local: 0447570f1af6 - test: local content4 ||||||| base ======= content2 >>>>>>> other: 85100b8c675b - test: remote M missing_content2_missing_missing-tracked content2 M missing_content2_missing_missing-untracked content2 C missing_missing_content3_content3-tracked content3 R missing_missing_content3_content3-untracked content3 M missing_missing_content3_content4-tracked content4 R missing_missing_content3_content4-untracked content4 R missing_missing_content3_missing-tracked R missing_missing_content3_missing-untracked A missing_missing_missing_content4-tracked content4 ? missing_missing_missing_content4-untracked content4 R missing_missing_missing_missing-tracked missing_missing_missing_missing-untracked: * (glob) $ for f in `python $TESTDIR/generate-working-copy-states.py filelist 3` > do > if test -f ${f}.orig > then > echo ${f}.orig: > cat ${f}.orig > fi > done content1_content2_content1_content4-tracked.orig: content4 content1_content2_content2_content4-tracked.orig: content4 content1_content2_content3_content3-tracked.orig: content3 content1_content2_content3_content4-tracked.orig: content4 content1_content2_missing_content4-tracked.orig: content4 missing_content2_content2_content4-tracked.orig: content4 missing_content2_content3_content3-tracked.orig: content3 missing_content2_content3_content4-tracked.orig: content4 missing_content2_missing_content4-tracked.orig: content4 missing_content2_missing_content4-untracked.orig: content4 Re-resolve and check status $ hg resolve --unmark --all $ hg resolve --all --tool :local (no more unresolved files) $ hg resolve --unmark --all $ hg resolve --all --tool internal:merge3 remote changed content1_content2_content1_content1-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content1_content2-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u merging content1_content2_content1_content4-tracked remote changed content1_content2_content1_content4-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content1_missing-tracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content1_missing-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u merging content1_content2_content2_content1-tracked remote changed content1_content2_content2_content1-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content2_content2-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u merging content1_content2_content2_content4-tracked remote changed content1_content2_content2_content4-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content2_missing-tracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content2_missing-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u merging content1_content2_content3_content1-tracked remote changed content1_content2_content3_content1-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content3_content2-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u merging content1_content2_content3_content3-tracked remote changed content1_content2_content3_content3-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u merging content1_content2_content3_content4-tracked remote changed content1_content2_content3_content4-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content3_missing-tracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_content3_missing-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u merging content1_content2_missing_content1-tracked remote changed content1_content2_missing_content1-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_missing_content2-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u merging content1_content2_missing_content4-tracked remote changed content1_content2_missing_content4-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_missing_missing-tracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u remote changed content1_content2_missing_missing-untracked which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? u local changed content1_missing_content1_content4-tracked which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u local changed content1_missing_content3_content3-tracked which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u local changed content1_missing_content3_content4-tracked which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u local changed content1_missing_missing_content4-tracked which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? u merging missing_content2_content2_content4-tracked merging missing_content2_content3_content3-tracked merging missing_content2_content3_content4-tracked merging missing_content2_missing_content4-tracked merging missing_content2_missing_content4-untracked warning: conflicts while merging content1_content2_content1_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging content1_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging content1_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging content1_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging content1_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging missing_content2_content2_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging missing_content2_content3_content3-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging missing_content2_content3_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging missing_content2_missing_content4-tracked! (edit, then use 'hg resolve --mark') warning: conflicts while merging missing_content2_missing_content4-untracked! (edit, then use 'hg resolve --mark') [1] $ checkstatus > $TESTTMP/status2 2>&1 $ cmp $TESTTMP/status1 $TESTTMP/status2 || diff -U8 $TESTTMP/status1 $TESTTMP/status2 mercurial-3.7.3/tests/test-highlight.t0000644000175000017500000006562512676531525017420 0ustar mpmmpm00000000000000#require pygments serve $ cat <> $HGRCPATH > [extensions] > highlight = > [web] > pygments_style = friendly > highlightfiles = **.py and size('<100KB') > EOF $ hg init test $ cd test create random Python file to exercise Pygments $ cat < primes.py > #!/usr/bin/env python > > """Fun with generators. Corresponding Haskell implementation: > > primes = 2 : sieve [3, 5..] > where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0] > """ > > from itertools import dropwhile, ifilter, islice, count, chain > > def primes(): > """Generate all primes.""" > def sieve(ns): > p = ns.next() > # It is important to yield *here* in order to stop the > # infinite recursion. > yield p > ns = ifilter(lambda n: n % p != 0, ns) > for n in sieve(ns): > yield n > > odds = ifilter(lambda i: i % 2 == 1, count()) > return chain([2], sieve(dropwhile(lambda n: n < 3, odds))) > > if __name__ == "__main__": > import sys > try: > n = int(sys.argv[1]) > except (ValueError, IndexError): > n = 10 > p = primes() > print "The first %d primes: %s" % (n, list(islice(p, n))) > EOF $ echo >> primes.py # to test html markup with an empty line just before EOF $ hg ci -Ama adding primes.py hg serve $ hg serve -p $HGPORT -d -n test --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS hgweb filerevision, html $ (get-with-headers.py localhost:$HGPORT 'file/tip/primes.py') \ > | sed "s/class=\"k\"/class=\"kn\"/g" | sed "s/class=\"mf\"/class=\"mi\"/g" 200 Script output follows test: 06824edf55d0 primes.py

                        view primes.py @ 0:06824edf55d0 tip

                        a
                        author test
                        date Thu, 01 Jan 1970 00:00:00 +0000
                        parents
                        children
                        line wrap: on
                        line source
                          #!/usr/bin/env python
                          
                          """Fun with generators. Corresponding Haskell implementation:
                          
                          primes = 2 : sieve [3, 5..]
                              where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]
                          """
                          
                          from itertools import dropwhile, ifilter, islice, count, chain
                          
                          def primes():
                              """Generate all primes."""
                              def sieve(ns):
                                  p = ns.next()
                                  # It is important to yield *here* in order to stop the
                                  # infinite recursion.
                                  yield p
                                  ns = ifilter(lambda n: n % p != 0, ns)
                                  for n in sieve(ns):
                                      yield n
                          
                              odds = ifilter(lambda i: i % 2 == 1, count())
                              return chain([2], sieve(dropwhile(lambda n: n < 3, odds)))
                          
                          if __name__ == "__main__":
                              import sys
                              try:
                                  n = int(sys.argv[1])
                              except (ValueError, IndexError):
                                  n = 10
                              p = primes()
                              print "The first %d primes: %s" % (n, list(islice(p, n)))
                          
                        hgweb fileannotate, html $ (get-with-headers.py localhost:$HGPORT 'annotate/tip/primes.py') \ > | sed "s/class=\"k\"/class=\"kn\"/g" | sed "s/class=\"mi\"/class=\"mf\"/g" 200 Script output follows test: primes.py annotate

                        annotate primes.py @ 0:06824edf55d0 tip

                        a
                        author test
                        date Thu, 01 Jan 1970 00:00:00 +0000
                        parents
                        children
                        rev   line source
                        test@0 1 #!/usr/bin/env python
                        test@0 2
                        test@0 3 """Fun with generators. Corresponding Haskell implementation:
                        test@0 4
                        test@0 5 primes = 2 : sieve [3, 5..]
                        test@0 6 where sieve (p:ns) = p : sieve [n | n <- ns, mod n p /= 0]
                        test@0 7 """
                        test@0 8
                        test@0 9 from itertools import dropwhile, ifilter, islice, count, chain
                        test@0 10
                        test@0 11 def primes():
                        test@0 12 """Generate all primes."""
                        test@0 13 def sieve(ns):
                        test@0 14 p = ns.next()
                        test@0 15 # It is important to yield *here* in order to stop the
                        test@0 16 # infinite recursion.
                        test@0 17 yield p
                        test@0 18 ns = ifilter(lambda n: n % p != 0, ns)
                        test@0 19 for n in sieve(ns):
                        test@0 20 yield n
                        test@0 21
                        test@0 22 odds = ifilter(lambda i: i % 2 == 1, count())
                        test@0 23 return chain([2], sieve(dropwhile(lambda n: n < 3, odds)))
                        test@0 24
                        test@0 25 if __name__ == "__main__":
                        test@0 26 import sys
                        test@0 27 try:
                        test@0 28 n = int(sys.argv[1])
                        test@0 29 except (ValueError, IndexError):
                        test@0 30 n = 10
                        test@0 31 p = primes()
                        test@0 32 print "The first %d primes: %s" % (n, list(islice(p, n)))
                        test@0 33
                        hgweb fileannotate, raw $ (get-with-headers.py localhost:$HGPORT 'annotate/tip/primes.py?style=raw') \ > | sed "s/test@//" > a $ echo "200 Script output follows" > b $ echo "" >> b $ echo "" >> b $ hg annotate "primes.py" >> b $ echo "" >> b $ echo "" >> b $ echo "" >> b $ echo "" >> b $ cmp b a || diff -u b a hgweb filerevision, raw $ (get-with-headers.py localhost:$HGPORT 'file/tip/primes.py?style=raw') \ > > a $ echo "200 Script output follows" > b $ echo "" >> b $ hg cat primes.py >> b $ cmp b a || diff -u b a hgweb highlightcss friendly $ get-with-headers.py localhost:$HGPORT 'highlightcss' > out $ head -n 4 out 200 Script output follows /* pygments_style = friendly */ $ rm out errors encountered $ cat errors.log $ killdaemons.py Change the pygments style $ cat > .hg/hgrc < [web] > pygments_style = fruity > EOF hg serve again $ hg serve -p $HGPORT -d -n test --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS hgweb highlightcss fruity $ get-with-headers.py localhost:$HGPORT 'highlightcss' > out $ head -n 4 out 200 Script output follows /* pygments_style = fruity */ $ rm out errors encountered $ cat errors.log $ killdaemons.py only highlight C source files $ cat > .hg/hgrc < [web] > highlightfiles = **.c > EOF hg serve again $ hg serve -p $HGPORT -d -n test --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS test that fileset in highlightfiles works and primes.py is not highlighted $ get-with-headers.py localhost:$HGPORT 'file/tip/primes.py' | grep 'id="l11"' def primes(): errors encountered $ cat errors.log $ cd .. $ hg init eucjp $ cd eucjp $ $PYTHON -c 'print("\265\376")' >> eucjp.txt # Japanese kanji "Kyo" $ hg ci -Ama adding eucjp.txt $ hgserveget () { > killdaemons.py > echo % HGENCODING="$1" hg serve > HGENCODING="$1" hg serve -p $HGPORT -d -n test --pid-file=hg.pid -E errors.log > cat hg.pid >> $DAEMON_PIDS > > echo % hgweb filerevision, html > get-with-headers.py localhost:$HGPORT "file/tip/$2" \ > | grep '
                        ' > echo % errors encountered > cat errors.log > } $ hgserveget euc-jp eucjp.txt % HGENCODING=euc-jp hg serve % hgweb filerevision, html % errors encountered $ hgserveget utf-8 eucjp.txt % HGENCODING=utf-8 hg serve % hgweb filerevision, html % errors encountered $ hgserveget us-ascii eucjp.txt % HGENCODING=us-ascii hg serve % hgweb filerevision, html % errors encountered We attempt to highlight unknown files by default $ killdaemons.py $ cat > .hg/hgrc << EOF > [web] > highlightfiles = ** > EOF $ cat > unknownfile << EOF > #!/usr/bin/python > def foo(): > pass > EOF $ hg add unknownfile $ hg commit -m unknown unknownfile $ hg serve -p $HGPORT -d -n test --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT 'file/tip/unknownfile' | grep l2 def foo(): We can prevent Pygments from falling back to a non filename-based detection mode $ cat > .hg/hgrc << EOF > [web] > highlightfiles = ** > highlightonlymatchfilename = true > EOF $ killdaemons.py $ hg serve -p $HGPORT -d -n test --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT 'file/tip/unknownfile' | grep l2 def foo(): $ cd .. mercurial-3.7.3/tests/test-merge-default.t0000644000175000017500000000512112676531525020153 0ustar mpmmpm00000000000000 $ hg init $ echo a > a $ hg commit -A -ma adding a $ echo b >> a $ hg commit -mb $ echo c >> a $ hg commit -mc $ hg up 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo d >> a $ hg commit -md created new head $ hg up 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo e >> a $ hg commit -me created new head $ hg up 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Should fail because not at a head: $ hg merge abort: branch 'default' has 3 heads - please merge with an explicit rev (run 'hg heads .' to see heads) [255] $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Should fail because > 2 heads: $ HGMERGE=internal:other; export HGMERGE $ hg merge abort: branch 'default' has 3 heads - please merge with an explicit rev (run 'hg heads .' to see heads) [255] Should succeed: $ hg merge 2 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -mm1 Should succeed - 2 heads: $ hg merge -P changeset: 3:ea9ff125ff88 parent: 1:1846eede8b68 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: d $ hg merge 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -mm2 Should fail because at tip: $ hg merge abort: nothing to merge [255] $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Should fail because there is only one head: $ hg merge abort: nothing to merge (use 'hg update' instead) [255] $ hg up 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo f >> a $ hg branch foobranch marked working directory as branch foobranch (branches are permanent and global, did you want a bookmark?) $ hg commit -mf Should fail because merge with other branch: $ hg merge abort: branch 'foobranch' has one head - please merge with an explicit rev (run 'hg heads' to see all heads) [255] Test for issue2043: ensure that 'merge -P' shows ancestors of 6 that are not ancestors of 7, regardless of where their common ancestors are. Merge preview not affected by common ancestor: $ hg up -q 7 $ hg merge -q -P 6 2:2d95304fed5d 4:f25cbe84d8b3 5:a431fabd6039 6:e88e33f3bf62 Test experimental destination revset $ hg log -r '_destmerge()' abort: branch 'foobranch' has one head - please merge with an explicit rev (run 'hg heads' to see all heads) [255] mercurial-3.7.3/tests/test-check-execute.t0000644000175000017500000000106112676531525020146 0ustar mpmmpm00000000000000#require test-repo execbit $ cd "`dirname "$TESTDIR"`" look for python scripts without the execute bit $ hg files 'set:**.py and not exec() and grep(r"^#!.*?python")' [1] look for python scripts with execute bit but not shebang $ hg files 'set:**.py and exec() and not grep(r"^#!.*?python")' [1] look for shell scripts with execute bit but not shebang $ hg files 'set:**.sh and exec() and not grep(r"^#!.*(ba)?sh")' [1] look for non scripts with no shebang $ hg files 'set:exec() and not **.sh and not **.py and not grep(r"^#!")' [1] mercurial-3.7.3/tests/test-mq-qnew.t0000644000175000017500000002113112676531525017016 0ustar mpmmpm00000000000000 $ catpatch() { > cat $1 | sed -e "s/^\(# Parent \).*/\1/" > } $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ runtest() { > hg init mq > cd mq > > echo a > a > hg ci -Ama > > echo '% qnew should refuse bad patch names' > hg qnew series > hg qnew status > hg qnew guards > hg qnew . > hg qnew .. > hg qnew .hgignore > hg qnew .mqfoo > hg qnew 'foo#bar' > hg qnew 'foo:bar' > hg qnew "`echo foo; echo bar`" > > hg qinit -c > > echo '% qnew with name containing slash' > hg qnew foo/ > hg qnew foo/bar.patch > hg qnew foo > hg qseries > hg qpop > hg qdelete foo/bar.patch > > echo '% qnew with uncommitted changes' > echo a > somefile > hg add somefile > hg qnew uncommitted.patch > hg st > hg qseries > > echo '% qnew implies add' > hg -R .hg/patches st > > echo '% qnew missing' > hg qnew missing.patch missing > > echo '% qnew -m' > hg qnew -m 'foo bar' mtest.patch > catpatch .hg/patches/mtest.patch > > echo '% qnew twice' > hg qnew first.patch > hg qnew first.patch > > touch ../first.patch > hg qimport ../first.patch > > echo '% qnew -f from a subdirectory' > hg qpop -a > mkdir d > cd d > echo b > b > hg ci -Am t > echo b >> b > hg st > hg qnew -g -f p > catpatch ../.hg/patches/p > > echo '% qnew -u with no username configured' > HGUSER= hg qnew -u blue red > catpatch ../.hg/patches/red > > echo '% qnew -e -u with no username configured' > HGUSER= hg qnew -e -u chartreuse fucsia > catpatch ../.hg/patches/fucsia > > echo '% fail when trying to import a merge' > hg init merge > cd merge > touch a > hg ci -Am null > echo a >> a > hg ci -m a > hg up -r 0 > echo b >> a > hg ci -m b > hg merge -f 1 > hg resolve --mark a > hg qnew -f merge > > cd ../../.. > rm -r mq > } plain headers $ echo "[mq]" >> $HGRCPATH $ echo "plain=true" >> $HGRCPATH $ mkdir sandbox $ (cd sandbox ; runtest) adding a % qnew should refuse bad patch names abort: "series" cannot be used as the name of a patch abort: "status" cannot be used as the name of a patch abort: "guards" cannot be used as the name of a patch abort: "." cannot be used as the name of a patch abort: ".." cannot be used as the name of a patch abort: patch name cannot begin with ".hg" abort: patch name cannot begin with ".mq" abort: '#' cannot be used in the name of a patch abort: ':' cannot be used in the name of a patch abort: '\n' cannot be used in the name of a patch % qnew with name containing slash abort: path ends in directory separator: foo/ (glob) abort: "foo" already exists as a directory foo/bar.patch popping foo/bar.patch patch queue now empty % qnew with uncommitted changes uncommitted.patch % qnew implies add A .hgignore A series A uncommitted.patch % qnew missing abort: missing: * (glob) % qnew -m foo bar % qnew twice abort: patch "first.patch" already exists abort: patch "first.patch" already exists % qnew -f from a subdirectory popping first.patch popping mtest.patch popping uncommitted.patch patch queue now empty adding d/b M d/b diff --git a/d/b b/d/b --- a/d/b +++ b/d/b @@ -1,1 +1,2 @@ b +b % qnew -u with no username configured From: blue % qnew -e -u with no username configured From: chartreuse % fail when trying to import a merge adding a 1 files updated, 0 files merged, 0 files removed, 0 files unresolved created new head merging a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon (no more unresolved files) abort: cannot manage merge changesets $ rm -r sandbox hg headers $ echo "plain=false" >> $HGRCPATH $ mkdir sandbox $ (cd sandbox ; runtest) adding a % qnew should refuse bad patch names abort: "series" cannot be used as the name of a patch abort: "status" cannot be used as the name of a patch abort: "guards" cannot be used as the name of a patch abort: "." cannot be used as the name of a patch abort: ".." cannot be used as the name of a patch abort: patch name cannot begin with ".hg" abort: patch name cannot begin with ".mq" abort: '#' cannot be used in the name of a patch abort: ':' cannot be used in the name of a patch abort: '\n' cannot be used in the name of a patch % qnew with name containing slash abort: path ends in directory separator: foo/ (glob) abort: "foo" already exists as a directory foo/bar.patch popping foo/bar.patch patch queue now empty % qnew with uncommitted changes uncommitted.patch % qnew implies add A .hgignore A series A uncommitted.patch % qnew missing abort: missing: * (glob) % qnew -m # HG changeset patch # Parent foo bar % qnew twice abort: patch "first.patch" already exists abort: patch "first.patch" already exists % qnew -f from a subdirectory popping first.patch popping mtest.patch popping uncommitted.patch patch queue now empty adding d/b M d/b # HG changeset patch # Parent diff --git a/d/b b/d/b --- a/d/b +++ b/d/b @@ -1,1 +1,2 @@ b +b % qnew -u with no username configured # HG changeset patch # User blue # Parent % qnew -e -u with no username configured # HG changeset patch # User chartreuse # Parent % fail when trying to import a merge adding a 1 files updated, 0 files merged, 0 files removed, 0 files unresolved created new head merging a warning: conflicts while merging a! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon (no more unresolved files) abort: cannot manage merge changesets $ rm -r sandbox Test saving last-message.txt $ hg init repo $ cd repo $ cat > $TESTTMP/commitfailure.py < from mercurial import error > def reposetup(ui, repo): > class commitfailure(repo.__class__): > def commit(self, *args, **kwargs): > raise error.Abort('emulating unexpected abort') > repo.__class__ = commitfailure > EOF $ cat >> .hg/hgrc < [extensions] > # this failure occurs before editor invocation > commitfailure = $TESTTMP/commitfailure.py > EOF $ cat > $TESTTMP/editor.sh << EOF > echo "==== before editing" > cat \$1 > echo "====" > echo "test saving last-message.txt" >> \$1 > EOF (test that editor is not invoked before transaction starting) $ rm -f .hg/last-message.txt $ HGEDITOR="sh $TESTTMP/editor.sh" hg qnew -e patch abort: emulating unexpected abort [255] $ test -f .hg/last-message.txt [1] (test that editor is invoked and commit message is saved into "last-message.txt") $ cat >> .hg/hgrc < [extensions] > commitfailure = ! > [hooks] > # this failure occurs after editor invocation > pretxncommit.unexpectedabort = false > EOF $ rm -f .hg/last-message.txt $ hg status $ HGEDITOR="sh $TESTTMP/editor.sh" hg qnew -e patch ==== before editing HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to use default message. HG: -- HG: user: test HG: branch 'default' HG: no files changed ==== note: commit message saved in .hg/last-message.txt transaction abort! rollback completed abort: pretxncommit.unexpectedabort hook exited with status 1 [255] $ cat .hg/last-message.txt test saving last-message.txt $ cat >> .hg/hgrc < [hooks] > pretxncommit.unexpectedabort = > EOF #if unix-permissions Test handling default message with the patch filename with tail whitespaces $ cat > $TESTTMP/editor.sh << EOF > echo "==== before editing" > cat \$1 > echo "====" > echo "[mq]: patch " > \$1 > EOF $ rm -f .hg/last-message.txt $ hg status $ HGEDITOR="sh $TESTTMP/editor.sh" hg qnew -e "patch " ==== before editing HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to use default message. HG: -- HG: user: test HG: branch 'default' HG: no files changed ==== $ cat ".hg/patches/patch " # HG changeset patch # Parent 0000000000000000000000000000000000000000 $ cd .. #endif mercurial-3.7.3/tests/test-largefiles-update.t0000644000175000017500000005313012676531525021032 0ustar mpmmpm00000000000000This file focuses mainly on updating largefiles in the working directory (and ".hg/largefiles/dirstate") $ cat >> $HGRCPATH < [ui] > merge = internal:fail > [extensions] > largefiles = > EOF $ hg init repo $ cd repo $ echo large1 > large1 $ echo large2 > large2 $ hg add --large large1 large2 $ echo normal1 > normal1 $ hg add normal1 $ hg commit -m '#0' $ echo 'large1 in #1' > large1 $ echo 'normal1 in #1' > normal1 $ hg commit -m '#1' $ hg extdiff -r '.^' --config extensions.extdiff= diff -Npru repo.0d9d9b8dc9a3/.hglf/large1 repo/.hglf/large1 --- repo.0d9d9b8dc9a3/.hglf/large1 * (glob) +++ repo/.hglf/large1 * (glob) @@ -1 +1 @@ -4669e532d5b2c093a78eca010077e708a071bb64 +58e24f733a964da346e2407a2bee99d9001184f5 diff -Npru repo.0d9d9b8dc9a3/normal1 repo/normal1 --- repo.0d9d9b8dc9a3/normal1 * (glob) +++ repo/normal1 * (glob) @@ -1 +1 @@ -normal1 +normal1 in #1 [1] $ hg update -q -C 0 $ echo 'large2 in #2' > large2 $ hg commit -m '#2' created new head Test that update also updates the lfdirstate of 'unsure' largefiles after hashing them: The previous operations will usually have left us with largefiles with a mtime within the same second as the dirstate was written. The lfdirstate entries will thus have been written with an invalidated/unset mtime to make sure further changes within the same second is detected. We will however occasionally be "lucky" and get a tick between writing largefiles and writing dirstate so we get valid lfdirstate timestamps. The following verification is thus disabled but can be verified manually. #if false $ hg debugdirstate --large --nodate n 644 7 unset large1 n 644 13 unset large2 #endif Wait to make sure we get a tick so the mtime of the largefiles become valid. $ sleep 1 A linear merge will update standins before performing the actual merge. It will do a lfdirstate status walk and find 'unset'/'unsure' files, hash them, and update the corresponding standins. Verify that it actually marks the clean files as clean in lfdirstate so we don't have to hash them again next time we update. $ hg up 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg debugdirstate --large --nodate n 644 7 set large1 n 644 13 set large2 Test that lfdirstate keeps track of last modification of largefiles and prevents unnecessary hashing of content - also after linear/noop update $ sleep 1 $ hg st $ hg debugdirstate --large --nodate n 644 7 set large1 n 644 13 set large2 $ hg up 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg debugdirstate --large --nodate n 644 7 set large1 n 644 13 set large2 Test that "hg merge" updates largefiles from "other" correctly (getting largefiles from "other" normally) $ hg status -A large1 C large1 $ cat large1 large1 $ cat .hglf/large1 4669e532d5b2c093a78eca010077e708a071bb64 $ hg merge --config debug.dirstate.delaywrite=2 getting changed largefiles 1 largefiles updated, 0 removed 2 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status -A large1 M large1 $ cat large1 large1 in #1 $ cat .hglf/large1 58e24f733a964da346e2407a2bee99d9001184f5 $ hg diff -c 1 --nodates .hglf/large1 | grep '^[+-][0-9a-z]' -4669e532d5b2c093a78eca010077e708a071bb64 +58e24f733a964da346e2407a2bee99d9001184f5 (getting largefiles from "other" via conflict prompt) $ hg update -q -C 2 $ echo 'large1 in #3' > large1 $ echo 'normal1 in #3' > normal1 $ hg commit -m '#3' $ cat .hglf/large1 e5bb990443d6a92aaf7223813720f7566c9dd05b $ hg merge --config debug.dirstate.delaywrite=2 --config ui.interactive=True < o > EOF largefile large1 has a merge conflict ancestor was 4669e532d5b2c093a78eca010077e708a071bb64 keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or take (o)ther 58e24f733a964da346e2407a2bee99d9001184f5? o merging normal1 warning: conflicts while merging normal1! (edit, then use 'hg resolve --mark') getting changed largefiles 1 largefiles updated, 0 removed 0 files updated, 1 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ hg status -A large1 M large1 $ cat large1 large1 in #1 $ cat .hglf/large1 58e24f733a964da346e2407a2bee99d9001184f5 (merge non-existing largefiles from "other" via conflict prompt - make sure the following commit doesn't abort in a confusing way when trying to mark the non-existing file as normal in lfdirstate) $ mv .hg/largefiles/58e24f733a964da346e2407a2bee99d9001184f5 . $ hg update -q -C 3 $ hg merge --config largefiles.usercache=not --config debug.dirstate.delaywrite=2 --tool :local --config ui.interactive=True < o > EOF largefile large1 has a merge conflict ancestor was 4669e532d5b2c093a78eca010077e708a071bb64 keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or take (o)ther 58e24f733a964da346e2407a2bee99d9001184f5? o getting changed largefiles large1: largefile 58e24f733a964da346e2407a2bee99d9001184f5 not available from file:/*/$TESTTMP/repo (glob) 0 largefiles updated, 0 removed 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m '1-2-3 testing' --config largefiles.usercache=not large1: largefile 58e24f733a964da346e2407a2bee99d9001184f5 not available from local store $ hg up -C . --config largefiles.usercache=not getting changed largefiles large1: largefile 58e24f733a964da346e2407a2bee99d9001184f5 not available from file:/*/$TESTTMP/repo (glob) 0 largefiles updated, 0 removed 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg st large1 ! large1 $ hg rollback -q $ mv 58e24f733a964da346e2407a2bee99d9001184f5 .hg/largefiles/ Test that "hg revert -r REV" updates largefiles from "REV" correctly $ hg update -q -C 3 $ hg status -A large1 C large1 $ cat large1 large1 in #3 $ cat .hglf/large1 e5bb990443d6a92aaf7223813720f7566c9dd05b $ hg diff -c 1 --nodates .hglf/large1 | grep '^[+-][0-9a-z]' -4669e532d5b2c093a78eca010077e708a071bb64 +58e24f733a964da346e2407a2bee99d9001184f5 $ hg revert --no-backup -r 1 --config debug.dirstate.delaywrite=2 large1 $ hg status -A large1 M large1 $ cat large1 large1 in #1 $ cat .hglf/large1 58e24f733a964da346e2407a2bee99d9001184f5 Test that "hg rollback" restores status of largefiles correctly $ hg update -C -q $ hg remove large1 $ test -f .hglf/large1 [1] $ hg forget large2 $ test -f .hglf/large2 [1] $ echo largeX > largeX $ hg add --large largeX $ cat .hglf/largeX $ hg commit -m 'will be rollback-ed soon' $ echo largeY > largeY $ hg add --large largeY #if windows $ hg status -A large1 large1: * (glob) #else $ hg status -A large1 large1: No such file or directory #endif $ hg status -A large2 ? large2 $ hg status -A largeX C largeX $ hg status -A largeY A largeY $ hg rollback repository tip rolled back to revision 3 (undo commit) working directory now based on revision 3 $ hg status -A large1 R large1 $ test -f .hglf/large1 [1] $ hg status -A large2 R large2 $ test -f .hglf/large2 [1] $ hg status -A largeX A largeX $ cat .hglf/largeX $ hg status -A largeY ? largeY $ test -f .hglf/largeY [1] Test that "hg rollback" restores standins correctly $ hg commit -m 'will be rollback-ed soon' $ hg update -q -C 2 $ cat large1 large1 $ cat .hglf/large1 4669e532d5b2c093a78eca010077e708a071bb64 $ cat large2 large2 in #2 $ cat .hglf/large2 3cfce6277e7668985707b6887ce56f9f62f6ccd9 $ hg rollback -q -f $ cat large1 large1 $ cat .hglf/large1 4669e532d5b2c093a78eca010077e708a071bb64 $ cat large2 large2 in #2 $ cat .hglf/large2 3cfce6277e7668985707b6887ce56f9f62f6ccd9 (rollback the parent of the working directory, when the parent of it is not branch-tip) $ hg update -q -C 1 $ cat .hglf/large1 58e24f733a964da346e2407a2bee99d9001184f5 $ cat .hglf/large2 1deebade43c8c498a3c8daddac0244dc55d1331d $ echo normalX > normalX $ hg add normalX $ hg commit -m 'will be rollback-ed soon' $ hg rollback -q $ cat .hglf/large1 58e24f733a964da346e2407a2bee99d9001184f5 $ cat .hglf/large2 1deebade43c8c498a3c8daddac0244dc55d1331d Test that "hg status" shows status of largefiles correctly just after automated commit like rebase/transplant $ cat >> .hg/hgrc < [extensions] > rebase = > strip = > transplant = > EOF $ hg update -q -C 1 $ hg remove large1 $ echo largeX > largeX $ hg add --large largeX $ hg commit -m '#4' $ hg rebase -s 1 -d 2 --keep rebasing 1:72518492caa6 "#1" rebasing 4:07d6153b5c04 "#4" (tip) #if windows $ hg status -A large1 large1: * (glob) #else $ hg status -A large1 large1: No such file or directory #endif $ hg status -A largeX C largeX $ hg strip -q 5 $ hg update -q -C 2 $ hg transplant -q 1 4 #if windows $ hg status -A large1 large1: * (glob) #else $ hg status -A large1 large1: No such file or directory #endif $ hg status -A largeX C largeX $ hg strip -q 5 $ hg update -q -C 2 $ hg transplant -q --merge 1 --merge 4 #if windows $ hg status -A large1 large1: * (glob) #else $ hg status -A large1 large1: No such file or directory #endif $ hg status -A largeX C largeX $ hg strip -q 5 Test that linear merge can detect modification (and conflict) correctly (linear merge without conflict) $ echo 'large2 for linear merge (no conflict)' > large2 $ hg update 3 --config debug.dirstate.delaywrite=2 getting changed largefiles 1 largefiles updated, 0 removed 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg status -A large2 M large2 $ cat large2 large2 for linear merge (no conflict) $ cat .hglf/large2 9c4bf8f1b33536d6e5f89447e10620cfe52ea710 (linear merge with conflict, choosing "other") $ hg update -q -C 2 $ echo 'large1 for linear merge (conflict)' > large1 $ hg update 3 --config ui.interactive=True < o > EOF largefile large1 has a merge conflict ancestor was 4669e532d5b2c093a78eca010077e708a071bb64 keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? o getting changed largefiles 1 largefiles updated, 0 removed 1 files updated, 1 files merged, 0 files removed, 0 files unresolved $ hg status -A large1 C large1 $ cat large1 large1 in #3 $ cat .hglf/large1 e5bb990443d6a92aaf7223813720f7566c9dd05b (linear merge with conflict, choosing "local") $ hg update -q -C 2 $ echo 'large1 for linear merge (conflict)' > large1 $ hg update 3 --config debug.dirstate.delaywrite=2 largefile large1 has a merge conflict ancestor was 4669e532d5b2c093a78eca010077e708a071bb64 keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l 1 files updated, 1 files merged, 0 files removed, 0 files unresolved $ hg status -A large1 M large1 $ cat large1 large1 for linear merge (conflict) $ cat .hglf/large1 ba94c2efe5b7c5e0af8d189295ce00553b0612b7 Test a linear merge to a revision containing same-name normal file $ hg update -q -C 3 $ hg remove large2 $ echo 'large2 as normal file' > large2 $ hg add large2 $ echo 'large3 as normal file' > large3 $ hg add large3 $ hg commit -m '#5' $ hg manifest .hglf/large1 large2 large3 normal1 (modified largefile is already switched to normal) $ hg update -q -C 2 $ echo 'modified large2 for linear merge' > large2 $ hg update -q 5 remote turned local largefile large2 into a normal file keep (l)argefile or use (n)ormal file? l $ hg debugdirstate --nodates | grep large2 a 0 -1 unset .hglf/large2 r 0 0 set large2 $ hg status -A large2 A large2 $ cat large2 modified large2 for linear merge (added largefile is already committed as normal) $ hg update -q -C 2 $ echo 'large3 as large file for linear merge' > large3 $ hg add --large large3 $ hg update -q 5 remote turned local largefile large3 into a normal file keep (l)argefile or use (n)ormal file? l $ hg debugdirstate --nodates | grep large3 a 0 -1 unset .hglf/large3 r 0 0 set large3 $ hg status -A large3 A large3 $ cat large3 large3 as large file for linear merge $ rm -f large3 .hglf/large3 Test that the internal linear merging works correctly (both heads are stripped to keep pairing of revision number and commit log) $ hg update -q -C 2 $ hg strip 3 4 saved backup bundle to $TESTTMP/repo/.hg/strip-backup/9530e27857f7-2e7b195d-backup.hg (glob) $ mv .hg/strip-backup/9530e27857f7-2e7b195d-backup.hg $TESTTMP (internal linear merging at "hg pull --update") $ echo 'large1 for linear merge (conflict)' > large1 $ echo 'large2 for linear merge (conflict with normal file)' > large2 $ hg pull --update --config debug.dirstate.delaywrite=2 $TESTTMP/9530e27857f7-2e7b195d-backup.hg pulling from $TESTTMP/9530e27857f7-2e7b195d-backup.hg (glob) searching for changes adding changesets adding manifests adding file changes added 3 changesets with 5 changes to 5 files remote turned local largefile large2 into a normal file keep (l)argefile or use (n)ormal file? l largefile large1 has a merge conflict ancestor was 4669e532d5b2c093a78eca010077e708a071bb64 keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l 2 files updated, 1 files merged, 0 files removed, 0 files unresolved $ hg status -A large1 M large1 $ cat large1 large1 for linear merge (conflict) $ cat .hglf/large1 ba94c2efe5b7c5e0af8d189295ce00553b0612b7 $ hg status -A large2 A large2 $ cat large2 large2 for linear merge (conflict with normal file) $ cat .hglf/large2 d7591fe9be0f6227d90bddf3e4f52ff41fc1f544 (internal linear merging at "hg unbundle --update") $ hg update -q -C 2 $ hg rollback -q $ echo 'large1 for linear merge (conflict)' > large1 $ echo 'large2 for linear merge (conflict with normal file)' > large2 $ hg unbundle --update --config debug.dirstate.delaywrite=2 $TESTTMP/9530e27857f7-2e7b195d-backup.hg adding changesets adding manifests adding file changes added 3 changesets with 5 changes to 5 files remote turned local largefile large2 into a normal file keep (l)argefile or use (n)ormal file? l largefile large1 has a merge conflict ancestor was 4669e532d5b2c093a78eca010077e708a071bb64 keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l 2 files updated, 1 files merged, 0 files removed, 0 files unresolved $ hg status -A large1 M large1 $ cat large1 large1 for linear merge (conflict) $ cat .hglf/large1 ba94c2efe5b7c5e0af8d189295ce00553b0612b7 $ hg status -A large2 A large2 $ cat large2 large2 for linear merge (conflict with normal file) $ cat .hglf/large2 d7591fe9be0f6227d90bddf3e4f52ff41fc1f544 (internal linear merging in subrepo at "hg update") $ cd .. $ hg init subparent $ cd subparent $ hg clone -q -u 2 ../repo sub $ cat > .hgsub < sub = sub > EOF $ hg add .hgsub $ hg commit -m '#0@parent' $ cat .hgsubstate f74e50bd9e5594b7cf1e6c5cbab86ddd25f3ca2f sub $ hg -R sub update -q $ hg commit -m '#1@parent' $ cat .hgsubstate d65e59e952a9638e2ce863b41a420ca723dd3e8d sub $ hg update -q 0 $ echo 'large1 for linear merge (conflict)' > sub/large1 $ echo 'large2 for linear merge (conflict with normal file)' > sub/large2 $ hg update --config ui.interactive=True --config debug.dirstate.delaywrite=2 < m > r > l > l > EOF subrepository sub diverged (local revision: f74e50bd9e55, remote revision: d65e59e952a9) (M)erge, keep (l)ocal or keep (r)emote? m subrepository sources for sub differ (in checked out version) use (l)ocal source (f74e50bd9e55) or (r)emote source (d65e59e952a9)? r remote turned local largefile large2 into a normal file keep (l)argefile or use (n)ormal file? l largefile large1 has a merge conflict ancestor was 4669e532d5b2c093a78eca010077e708a071bb64 keep (l)ocal ba94c2efe5b7c5e0af8d189295ce00553b0612b7 or take (o)ther e5bb990443d6a92aaf7223813720f7566c9dd05b? l 2 files updated, 1 files merged, 0 files removed, 0 files unresolved 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R sub status -A sub/large1 M sub/large1 $ cat sub/large1 large1 for linear merge (conflict) $ cat sub/.hglf/large1 ba94c2efe5b7c5e0af8d189295ce00553b0612b7 $ hg -R sub status -A sub/large2 A sub/large2 $ cat sub/large2 large2 for linear merge (conflict with normal file) $ cat sub/.hglf/large2 d7591fe9be0f6227d90bddf3e4f52ff41fc1f544 $ cd .. $ cd repo Test that rebase updates largefiles in the working directory even if it is aborted by conflict. $ hg update -q -C 3 $ cat .hglf/large1 e5bb990443d6a92aaf7223813720f7566c9dd05b $ cat large1 large1 in #3 $ hg rebase -s 1 -d 3 --keep --config ui.interactive=True < o > EOF rebasing 1:72518492caa6 "#1" largefile large1 has a merge conflict ancestor was 4669e532d5b2c093a78eca010077e708a071bb64 keep (l)ocal e5bb990443d6a92aaf7223813720f7566c9dd05b or take (o)ther 58e24f733a964da346e2407a2bee99d9001184f5? o merging normal1 warning: conflicts while merging normal1! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] $ cat .hglf/large1 58e24f733a964da346e2407a2bee99d9001184f5 $ cat large1 large1 in #1 Test that rebase updates standins for manually modified largefiles at the 1st commit of resuming. $ echo "manually modified before 'hg rebase --continue'" > large1 $ hg resolve -m normal1 (no more unresolved files) continue: hg rebase --continue $ hg rebase --continue --config ui.interactive=True < c > EOF rebasing 1:72518492caa6 "#1" rebasing 4:07d6153b5c04 "#4" local changed .hglf/large1 which remote deleted use (c)hanged version, (d)elete, or leave (u)nresolved? c $ hg diff -c "tip~1" --nodates .hglf/large1 | grep '^[+-][0-9a-z]' -e5bb990443d6a92aaf7223813720f7566c9dd05b +8a4f783556e7dea21139ca0466eafce954c75c13 $ rm -f large1 $ hg update -q -C tip $ cat large1 manually modified before 'hg rebase --continue' Test that transplant updates largefiles, of which standins are safely changed, even if it is aborted by conflict of other. $ hg update -q -C 5 $ cat .hglf/large1 e5bb990443d6a92aaf7223813720f7566c9dd05b $ cat large1 large1 in #3 $ hg diff -c 4 .hglf/largeX | grep '^[+-][0-9a-z]' +fa44618ea25181aff4f48b70428294790cec9f61 $ hg transplant 4 applying 07d6153b5c04 patching file .hglf/large1 Hunk #1 FAILED at 0 1 out of 1 hunks FAILED -- saving rejects to file .hglf/large1.rej patch failed to apply abort: fix up the working directory and run hg transplant --continue [255] $ hg status -A large1 C large1 $ cat .hglf/large1 e5bb990443d6a92aaf7223813720f7566c9dd05b $ cat large1 large1 in #3 $ hg status -A largeX A largeX $ cat .hglf/largeX fa44618ea25181aff4f48b70428294790cec9f61 $ cat largeX largeX Test that transplant updates standins for manually modified largefiles at the 1st commit of resuming. $ echo "manually modified before 'hg transplant --continue'" > large1 $ hg transplant --continue 07d6153b5c04 transplanted as f1bf30eb88cc $ hg diff -c tip .hglf/large1 | grep '^[+-][0-9a-z]' -e5bb990443d6a92aaf7223813720f7566c9dd05b +6a4f36d4075fbe0f30ec1d26ca44e63c05903671 $ rm -f large1 $ hg update -q -C tip $ cat large1 manually modified before 'hg transplant --continue' Test that "hg status" doesn't show removal of largefiles not managed in the target context. $ hg update -q -C 4 $ hg remove largeX $ hg status -A largeX R largeX $ hg status -A --rev '.^1' largeX #if execbit Test that "hg status" against revisions other than parent notices exec bit changes of largefiles. $ hg update -q -C 4 (the case that large2 doesn't have exec bit in the target context but in the working context) $ chmod +x large2 $ hg status -A --rev 0 large2 M large2 $ hg commit -m 'chmod +x large2' (the case that large2 has exec bit in the target context but not in the working context) $ echo dummy > dummy $ hg add dummy $ hg commit -m 'revision for separation' $ chmod -x large2 $ hg status -A --rev '.^1' large2 M large2 #else Test that "hg status" against revisions other than parent ignores exec bit correctly on the platform being unaware of it. $ hg update -q -C 4 $ cat > exec-bit.patch < # HG changeset patch > # User test > # Date 0 0 > # Thu Jan 01 00:00:00 1970 +0000 > # Node ID be1b433a65b12b27b5519d92213e14f7e1769b90 > # Parent 07d6153b5c04313efb75deec9ba577de7faeb727 > chmod +x large2 > > diff --git a/.hglf/large2 b/.hglf/large2 > old mode 100644 > new mode 100755 > EOF $ hg import --exact --bypass exec-bit.patch applying exec-bit.patch $ hg status -A --rev tip large2 C large2 #endif $ cd .. Test that "hg convert" avoids copying largefiles from the working directory into store, because "hg convert" doesn't update largefiles in the working directory (removing files under ".cache/largefiles" forces "hg convert" to copy corresponding largefiles) $ cat >> $HGRCPATH < [extensions] > convert = > EOF $ rm $TESTTMP/.cache/largefiles/6a4f36d4075fbe0f30ec1d26ca44e63c05903671 $ hg convert -q repo repo.converted mercurial-3.7.3/tests/test-fileset-generated.t0000644000175000017500000001441612676531525021030 0ustar mpmmpm00000000000000 $ hg init Set up history and working copy $ python $TESTDIR/generate-working-copy-states.py state 2 1 $ hg addremove -q --similarity 0 $ hg commit -m first $ python $TESTDIR/generate-working-copy-states.py state 2 2 $ hg addremove -q --similarity 0 $ hg commit -m second $ python $TESTDIR/generate-working-copy-states.py state 2 wc $ hg addremove -q --similarity 0 $ hg forget *_*_*-untracked $ rm *_*_missing-* Test status $ hg st -A 'set:modified()' M content1_content1_content3-tracked M content1_content2_content1-tracked M content1_content2_content3-tracked M missing_content2_content3-tracked $ hg st -A 'set:added()' A content1_missing_content1-tracked A content1_missing_content3-tracked A missing_missing_content3-tracked $ hg st -A 'set:removed()' R content1_content1_content1-untracked R content1_content1_content3-untracked R content1_content1_missing-untracked R content1_content2_content1-untracked R content1_content2_content2-untracked R content1_content2_content3-untracked R content1_content2_missing-untracked R missing_content2_content2-untracked R missing_content2_content3-untracked R missing_content2_missing-untracked $ hg st -A 'set:deleted()' ! content1_content1_missing-tracked ! content1_content2_missing-tracked ! content1_missing_missing-tracked ! missing_content2_missing-tracked ! missing_missing_missing-tracked $ hg st -A 'set:missing()' ! content1_content1_missing-tracked ! content1_content2_missing-tracked ! content1_missing_missing-tracked ! missing_content2_missing-tracked ! missing_missing_missing-tracked $ hg st -A 'set:unknown()' ? content1_missing_content1-untracked ? content1_missing_content3-untracked ? missing_missing_content3-untracked $ hg st -A 'set:clean()' C content1_content1_content1-tracked C content1_content2_content2-tracked C missing_content2_content2-tracked Test log $ hg log -T '{rev}\n' --stat 'set:modified()' 1 content1_content2_content1-tracked | 2 +- content1_content2_content3-tracked | 2 +- missing_content2_content3-tracked | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) 0 content1_content1_content3-tracked | 1 + content1_content2_content1-tracked | 1 + content1_content2_content3-tracked | 1 + 3 files changed, 3 insertions(+), 0 deletions(-) Largefiles doesn't crash $ hg log -T '{rev}\n' --stat 'set:modified()' --config extensions.largefiles= 1 content1_content2_content1-tracked | 2 +- content1_content2_content3-tracked | 2 +- missing_content2_content3-tracked | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) 0 content1_content1_content3-tracked | 1 + content1_content2_content1-tracked | 1 + content1_content2_content3-tracked | 1 + 3 files changed, 3 insertions(+), 0 deletions(-) $ hg log -T '{rev}\n' --stat 'set:added()' 1 content1_missing_content1-tracked | 1 - content1_missing_content3-tracked | 1 - 2 files changed, 0 insertions(+), 2 deletions(-) 0 content1_missing_content1-tracked | 1 + content1_missing_content3-tracked | 1 + 2 files changed, 2 insertions(+), 0 deletions(-) $ hg log -T '{rev}\n' --stat 'set:removed()' 1 content1_content2_content1-untracked | 2 +- content1_content2_content2-untracked | 2 +- content1_content2_content3-untracked | 2 +- content1_content2_missing-untracked | 2 +- missing_content2_content2-untracked | 1 + missing_content2_content3-untracked | 1 + missing_content2_missing-untracked | 1 + 7 files changed, 7 insertions(+), 4 deletions(-) 0 content1_content1_content1-untracked | 1 + content1_content1_content3-untracked | 1 + content1_content1_missing-untracked | 1 + content1_content2_content1-untracked | 1 + content1_content2_content2-untracked | 1 + content1_content2_content3-untracked | 1 + content1_content2_missing-untracked | 1 + 7 files changed, 7 insertions(+), 0 deletions(-) $ hg log -T '{rev}\n' --stat 'set:deleted()' 1 content1_content2_missing-tracked | 2 +- content1_missing_missing-tracked | 1 - missing_content2_missing-tracked | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) 0 content1_content1_missing-tracked | 1 + content1_content2_missing-tracked | 1 + content1_missing_missing-tracked | 1 + 3 files changed, 3 insertions(+), 0 deletions(-) $ hg log -T '{rev}\n' --stat 'set:unknown()' 1 content1_missing_content1-untracked | 1 - content1_missing_content3-untracked | 1 - 2 files changed, 0 insertions(+), 2 deletions(-) 0 content1_missing_content1-untracked | 1 + content1_missing_content3-untracked | 1 + 2 files changed, 2 insertions(+), 0 deletions(-) $ hg log -T '{rev}\n' --stat 'set:clean()' 1 content1_content2_content2-tracked | 2 +- missing_content2_content2-tracked | 1 + 2 files changed, 2 insertions(+), 1 deletions(-) 0 content1_content1_content1-tracked | 1 + content1_content2_content2-tracked | 1 + 2 files changed, 2 insertions(+), 0 deletions(-) Test revert $ hg revert 'set:modified()' reverting content1_content1_content3-tracked reverting content1_content2_content1-tracked reverting content1_content2_content3-tracked reverting missing_content2_content3-tracked $ hg revert 'set:added()' forgetting content1_missing_content1-tracked forgetting content1_missing_content3-tracked forgetting missing_missing_content3-tracked $ hg revert 'set:removed()' undeleting content1_content1_content1-untracked undeleting content1_content1_content3-untracked undeleting content1_content1_missing-untracked undeleting content1_content2_content1-untracked undeleting content1_content2_content2-untracked undeleting content1_content2_content3-untracked undeleting content1_content2_missing-untracked undeleting missing_content2_content2-untracked undeleting missing_content2_content3-untracked undeleting missing_content2_missing-untracked $ hg revert 'set:deleted()' reverting content1_content1_missing-tracked reverting content1_content2_missing-tracked forgetting content1_missing_missing-tracked reverting missing_content2_missing-tracked forgetting missing_missing_missing-tracked $ hg revert 'set:unknown()' $ hg revert 'set:clean()' mercurial-3.7.3/tests/test-keyword.t0000644000175000017500000006734412676531525017135 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [extensions] > keyword = > mq = > notify = > record = > transplant = > [ui] > interactive = true > EOF hide outer repo $ hg init Run kwdemo before [keyword] files are set up as it would succeed without uisetup otherwise $ hg --quiet kwdemo [extensions] keyword = [keyword] demo.txt = [keywordset] svn = False [keywordmaps] Author = {author|user} Date = {date|utcdate} Header = {root}/{file},v {node|short} {date|utcdate} {author|user} Id = {file|basename},v {node|short} {date|utcdate} {author|user} RCSFile = {file|basename},v RCSfile = {file|basename},v Revision = {node|short} Source = {root}/{file},v $Author: test $ $Date: ????/??/?? ??:??:?? $ (glob) $Header: */demo.txt,v ???????????? ????/??/?? ??:??:?? test $ (glob) $Id: demo.txt,v ???????????? ????/??/?? ??:??:?? test $ (glob) $RCSFile: demo.txt,v $ $RCSfile: demo.txt,v $ $Revision: ???????????? $ (glob) $Source: */demo.txt,v $ (glob) $ hg --quiet kwdemo "Branch = {branches}" [extensions] keyword = [keyword] demo.txt = [keywordset] svn = False [keywordmaps] Branch = {branches} $Branch: demobranch $ $ cat <> $HGRCPATH > [keyword] > ** = > b = ignore > i = ignore > [hooks] > EOF $ cp $HGRCPATH $HGRCPATH.nohooks > cat <> $HGRCPATH > commit= > commit.test=cp a hooktest > EOF $ hg init Test-bndl $ cd Test-bndl kwshrink should exit silently in empty/invalid repo $ hg kwshrink Symlinks cannot be created on Windows. A bundle to test this was made with: hg init t cd t echo a > a ln -s a sym hg add sym hg ci -m addsym -u mercurial hg bundle --base null ../test-keyword.hg $ hg pull -u "$TESTDIR"/bundles/test-keyword.hg pulling from *test-keyword.hg (glob) requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo 'expand $Id$' > a $ echo 'do not process $Id:' >> a $ echo 'xxx $' >> a $ echo 'ignore $Id$' > b Output files as they were created $ cat a b expand $Id$ do not process $Id: xxx $ ignore $Id$ no kwfiles $ hg kwfiles untracked candidates $ hg -v kwfiles --unknown k a Add files and check status $ hg addremove adding a adding b $ hg status A a A b Default keyword expansion including commit hook Interrupted commit should not change state or run commit hook $ hg --debug commit abort: empty commit message [255] $ hg status A a A b Commit with several checks $ hg --debug commit -mabsym -u 'User Name ' committing files: a b committing manifest committing changelog overwriting a expanding keywords committed changeset 1:ef63ca68695bc9495032c6fda1350c71e6d256e9 running hook commit.test: cp a hooktest $ hg status ? hooktest $ hg debugrebuildstate $ hg --quiet identify ef63ca68695b cat files in working directory with keywords expanded $ cat a b expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ do not process $Id: xxx $ ignore $Id$ hg cat files and symlink, no expansion $ hg cat sym a b && echo expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ do not process $Id: xxx $ ignore $Id$ a $ diff a hooktest $ cp $HGRCPATH.nohooks $HGRCPATH $ rm hooktest hg status of kw-ignored binary file starting with '\1\n' >>> open("i", "wb").write("\1\nfoo") $ hg -q commit -Am metasep i $ hg status >>> open("i", "wb").write("\1\nbar") $ hg status M i $ hg -q commit -m "modify metasep" i $ hg status --rev 2:3 M i $ touch empty $ hg -q commit -A -m "another file" $ hg status -A --rev 3:4 i C i $ hg -q strip --no-backup 2 Test hook execution bundle $ hg bundle --base null ../kw.hg 2 changesets found $ cd .. $ hg init Test $ cd Test Notify on pull to check whether keywords stay as is in email ie. if patch.diff wrapper acts as it should $ cat <> $HGRCPATH > [hooks] > incoming.notify = python:hgext.notify.hook > [notify] > sources = pull > diffstat = False > maxsubject = 15 > [reposubs] > * = Test > EOF Pull from bundle and trigger notify $ hg pull -u ../kw.hg pulling from ../kw.hg requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 3 changes to 3 files Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Date: * (glob) Subject: changeset in... From: mercurial X-Hg-Notification: changeset a2392c293916 Message-Id: (glob) To: Test changeset a2392c293916 in $TESTTMP/Test (glob) details: $TESTTMP/Test?cmd=changeset;node=a2392c293916 description: addsym diffs (6 lines): diff -r 000000000000 -r a2392c293916 sym --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/sym Sat Feb 09 20:25:47 2008 +0100 @@ -0,0 +1,1 @@ +a \ No newline at end of file Content-Type: text/plain; charset="us-ascii" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit Date:* (glob) Subject: changeset in... From: User Name X-Hg-Notification: changeset ef63ca68695b Message-Id: (glob) To: Test changeset ef63ca68695b in $TESTTMP/Test (glob) details: $TESTTMP/Test?cmd=changeset;node=ef63ca68695b description: absym diffs (12 lines): diff -r a2392c293916 -r ef63ca68695b a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,3 @@ +expand $Id$ +do not process $Id: +xxx $ diff -r a2392c293916 -r ef63ca68695b b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +ignore $Id$ 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cp $HGRCPATH.nohooks $HGRCPATH Touch files and check with status $ touch a b $ hg status Update and expand $ rm sym a b $ hg update -C 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat a b expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ do not process $Id: xxx $ ignore $Id$ Check whether expansion is filewise and file mode is preserved $ echo '$Id$' > c $ echo 'tests for different changenodes' >> c #if unix-permissions $ chmod 600 c $ ls -l c | cut -b 1-10 -rw------- #endif commit file c $ hg commit -A -mcndiff -d '1 0' -u 'User Name ' adding c #if unix-permissions $ ls -l c | cut -b 1-10 -rw------- #endif force expansion $ hg -v kwexpand overwriting a expanding keywords overwriting c expanding keywords compare changenodes in a and c $ cat a c expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ do not process $Id: xxx $ $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $ tests for different changenodes record $ echo '$Id$' > r $ hg add r record chunk >>> lines = open('a', 'rb').readlines() >>> lines.insert(1, 'foo\n') >>> lines.append('bar\n') >>> open('a', 'wb').writelines(lines) $ hg record -d '10 1' -m rectest a< y > y > n > EOF diff --git a/a b/a 2 hunks, 2 lines changed examine changes to 'a'? [Ynesfdaq?] y @@ -1,3 +1,4 @@ expand $Id$ +foo do not process $Id: xxx $ record change 1/2 to 'a'? [Ynesfdaq?] y @@ -2,2 +3,3 @@ do not process $Id: xxx $ +bar record change 2/2 to 'a'? [Ynesfdaq?] n $ hg identify 5f5eb23505c3+ tip $ hg status M a A r Cat modified file a $ cat a expand $Id: a,v 5f5eb23505c3 1970/01/01 00:00:10 test $ foo do not process $Id: xxx $ bar Diff remaining chunk $ hg diff a diff -r 5f5eb23505c3 a --- a/a Thu Jan 01 00:00:09 1970 -0000 +++ b/a * (glob) @@ -2,3 +2,4 @@ foo do not process $Id: xxx $ +bar $ hg rollback repository tip rolled back to revision 2 (undo commit) working directory now based on revision 2 Record all chunks in file a $ echo foo > msg - do not use "hg record -m" here! $ hg record -l msg -d '11 1' a< y > y > y > EOF diff --git a/a b/a 2 hunks, 2 lines changed examine changes to 'a'? [Ynesfdaq?] y @@ -1,3 +1,4 @@ expand $Id$ +foo do not process $Id: xxx $ record change 1/2 to 'a'? [Ynesfdaq?] y @@ -2,2 +3,3 @@ do not process $Id: xxx $ +bar record change 2/2 to 'a'? [Ynesfdaq?] y File a should be clean $ hg status -A a C a rollback and revert expansion $ cat a expand $Id: a,v 78e0a02d76aa 1970/01/01 00:00:11 test $ foo do not process $Id: xxx $ bar $ hg --verbose rollback repository tip rolled back to revision 2 (undo commit) working directory now based on revision 2 overwriting a expanding keywords $ hg status a M a $ cat a expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ foo do not process $Id: xxx $ bar $ echo '$Id$' > y $ echo '$Id$' > z $ hg add y $ hg commit -Am "rollback only" z $ cat z $Id: z,v 45a5d3adce53 1970/01/01 00:00:00 test $ $ hg --verbose rollback repository tip rolled back to revision 2 (undo commit) working directory now based on revision 2 overwriting z shrinking keywords Only z should be overwritten $ hg status a y z M a A y A z $ cat z $Id$ $ hg forget y z $ rm y z record added file alone $ hg -v record -l msg -d '12 2' r< y > y > EOF diff --git a/r b/r new file mode 100644 examine changes to 'r'? [Ynesfdaq?] y @@ -0,0 +1,1 @@ +$Id$ record this change to 'r'? [Ynesfdaq?] y resolving manifests patching file r committing files: r committing manifest committing changelog committed changeset 3:82a2f715724d overwriting r expanding keywords $ hg status r $ hg --verbose rollback repository tip rolled back to revision 2 (undo commit) working directory now based on revision 2 overwriting r shrinking keywords $ hg forget r $ rm msg r $ hg update -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved record added keyword ignored file $ echo '$Id$' > i $ hg add i $ hg --verbose record -d '13 1' -m recignored< y > y > EOF diff --git a/i b/i new file mode 100644 examine changes to 'i'? [Ynesfdaq?] y @@ -0,0 +1,1 @@ +$Id$ record this change to 'i'? [Ynesfdaq?] y resolving manifests patching file i committing files: i committing manifest committing changelog committed changeset 3:9f40ceb5a072 $ cat i $Id$ $ hg -q rollback $ hg forget i $ rm i amend $ echo amend >> a $ echo amend >> b $ hg -q commit -d '14 1' -m 'prepare amend' $ hg --debug commit --amend -d '15 1' -m 'amend without changes' | grep keywords overwriting a expanding keywords $ hg -q id 67d8c481a6be $ head -1 a expand $Id: a,v 67d8c481a6be 1970/01/01 00:00:15 test $ $ hg -q strip --no-backup tip Test patch queue repo $ hg init --mq $ hg qimport -r tip -n mqtest.diff $ hg commit --mq -m mqtest Keywords should not be expanded in patch $ cat .hg/patches/mqtest.diff # HG changeset patch # User User Name # Date 1 0 # Thu Jan 01 00:00:01 1970 +0000 # Node ID 40a904bbbe4cd4ab0a1f28411e35db26341a40ad # Parent ef63ca68695bc9495032c6fda1350c71e6d256e9 cndiff diff -r ef63ca68695b -r 40a904bbbe4c c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c Thu Jan 01 00:00:01 1970 +0000 @@ -0,0 +1,2 @@ +$Id$ +tests for different changenodes $ hg qpop popping mqtest.diff patch queue now empty qgoto, implying qpush, should expand $ hg qgoto mqtest.diff applying mqtest.diff now at: mqtest.diff $ cat c $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $ tests for different changenodes $ hg cat c $Id: c,v 40a904bbbe4c 1970/01/01 00:00:01 user $ tests for different changenodes Keywords should not be expanded in filelog $ hg --config 'extensions.keyword=!' cat c $Id$ tests for different changenodes qpop and move on $ hg qpop popping mqtest.diff patch queue now empty Copy and show added kwfiles $ hg cp a c $ hg kwfiles a c Commit and show expansion in original and copy $ hg --debug commit -ma2c -d '1 0' -u 'User Name ' committing files: c c: copy a:0045e12f6c5791aac80ca6cbfd97709a88307292 committing manifest committing changelog overwriting c expanding keywords committed changeset 2:25736cf2f5cbe41f6be4e6784ef6ecf9f3bbcc7d $ cat a c expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ do not process $Id: xxx $ expand $Id: c,v 25736cf2f5cb 1970/01/01 00:00:01 user $ do not process $Id: xxx $ Touch copied c and check its status $ touch c $ hg status Copy kwfile to keyword ignored file unexpanding keywords $ hg --verbose copy a i copying a to i overwriting i shrinking keywords $ head -n 1 i expand $Id$ $ hg forget i $ rm i Copy ignored file to ignored file: no overwriting $ hg --verbose copy b i copying b to i $ hg forget i $ rm i cp symlink file; hg cp -A symlink file (part1) - copied symlink points to kwfile: overwrite #if symlink $ cp sym i $ ls -l i -rw-r--r--* (glob) $ head -1 i expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ $ hg copy --after --verbose sym i copying sym to i overwriting i shrinking keywords $ head -1 i expand $Id$ $ hg forget i $ rm i #endif Test different options of hg kwfiles $ hg kwfiles a c $ hg -v kwfiles --ignore I b I sym $ hg kwfiles --all K a K c I b I sym Diff specific revision $ hg diff --rev 1 diff -r ef63ca68695b c --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/c * (glob) @@ -0,0 +1,3 @@ +expand $Id$ +do not process $Id: +xxx $ Status after rollback: $ hg rollback repository tip rolled back to revision 1 (undo commit) working directory now based on revision 1 $ hg status A c $ hg update --clean 0 files updated, 0 files merged, 0 files removed, 0 files unresolved #if symlink cp symlink file; hg cp -A symlink file (part2) - copied symlink points to kw ignored file: do not overwrite $ cat a > i $ ln -s i symignored $ hg commit -Am 'fake expansion in ignored and symlink' i symignored $ cp symignored x $ hg copy --after --verbose symignored x copying symignored to x $ head -n 1 x expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ $ hg forget x $ rm x $ hg rollback repository tip rolled back to revision 1 (undo commit) working directory now based on revision 1 $ hg update --clean 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm i symignored #endif Custom keywordmaps as argument to kwdemo $ hg --quiet kwdemo "Xinfo = {author}: {desc}" [extensions] keyword = [keyword] ** = b = ignore demo.txt = i = ignore [keywordset] svn = False [keywordmaps] Xinfo = {author}: {desc} $Xinfo: test: hg keyword configuration and expansion example $ Configure custom keywordmaps $ cat <>$HGRCPATH > [keywordmaps] > Id = {file} {node|short} {date|rfc822date} {author|user} > Xinfo = {author}: {desc} > EOF Cat and hg cat files before custom expansion $ cat a b expand $Id: a,v ef63ca68695b 1970/01/01 00:00:00 user $ do not process $Id: xxx $ ignore $Id$ $ hg cat sym a b && echo expand $Id: a ef63ca68695b Thu, 01 Jan 1970 00:00:00 +0000 user $ do not process $Id: xxx $ ignore $Id$ a Write custom keyword and prepare multi-line commit message $ echo '$Xinfo$' >> a $ cat <> log > firstline > secondline > EOF Interrupted commit should not change state $ hg commit abort: empty commit message [255] $ hg status M a ? c ? log Commit with multi-line message and custom expansion $ hg --debug commit -l log -d '2 0' -u 'User Name ' committing files: a committing manifest committing changelog overwriting a expanding keywords committed changeset 2:bb948857c743469b22bbf51f7ec8112279ca5d83 $ rm log Stat, verify and show custom expansion (firstline) $ hg status ? c $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 3 changesets, 4 total revisions $ cat a b expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ do not process $Id: xxx $ $Xinfo: User Name : firstline $ ignore $Id$ $ hg cat sym a b && echo expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ do not process $Id: xxx $ $Xinfo: User Name : firstline $ ignore $Id$ a annotate $ hg annotate a 1: expand $Id$ 1: do not process $Id: 1: xxx $ 2: $Xinfo$ remove with status checks $ hg debugrebuildstate $ hg remove a $ hg --debug commit -m rma committing files: committing manifest committing changelog committed changeset 3:d14c712653769de926994cf7fbb06c8fbd68f012 $ hg status ? c Rollback, revert, and check expansion $ hg rollback repository tip rolled back to revision 2 (undo commit) working directory now based on revision 2 $ hg status R a ? c $ hg revert --no-backup --rev tip a $ cat a expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ do not process $Id: xxx $ $Xinfo: User Name : firstline $ Clone to test global and local configurations $ cd .. Expansion in destination with global configuration $ hg --quiet clone Test globalconf $ cat globalconf/a expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ do not process $Id: xxx $ $Xinfo: User Name : firstline $ No expansion in destination with local configuration in origin only $ hg --quiet --config 'keyword.**=ignore' clone Test localconf $ cat localconf/a expand $Id$ do not process $Id: xxx $ $Xinfo$ Clone to test incoming $ hg clone -r1 Test Test-a adding changesets adding manifests adding file changes added 2 changesets with 3 changes to 3 files updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd Test-a $ cat <> .hg/hgrc > [paths] > default = ../Test > EOF $ hg incoming comparing with $TESTTMP/Test (glob) searching for changes changeset: 2:bb948857c743 tag: tip user: User Name date: Thu Jan 01 00:00:02 1970 +0000 summary: firstline Imported patch should not be rejected >>> import re >>> text = re.sub(r'(Id.*)', r'\1 rejecttest', open('a').read()) >>> open('a', 'wb').write(text) $ hg --debug commit -m'rejects?' -d '3 0' -u 'User Name ' committing files: a committing manifest committing changelog overwriting a expanding keywords committed changeset 2:85e279d709ffc28c9fdd1b868570985fc3d87082 $ hg export -o ../rejecttest.diff tip $ cd ../Test $ hg import ../rejecttest.diff applying ../rejecttest.diff $ cat a b expand $Id: a 4e0994474d25 Thu, 01 Jan 1970 00:00:03 +0000 user $ rejecttest do not process $Id: rejecttest xxx $ $Xinfo: User Name : rejects? $ ignore $Id$ $ hg rollback repository tip rolled back to revision 2 (undo import) working directory now based on revision 2 $ hg update --clean 1 files updated, 0 files merged, 0 files removed, 0 files unresolved kwexpand/kwshrink on selected files $ mkdir x $ hg copy a x/a $ hg --verbose kwshrink a overwriting a shrinking keywords - sleep required for dirstate.normal() check $ sleep 1 $ hg status a $ hg --verbose kwexpand a overwriting a expanding keywords $ hg status a kwexpand x/a should abort $ hg --verbose kwexpand x/a abort: outstanding uncommitted changes [255] $ cd x $ hg --debug commit -m xa -d '3 0' -u 'User Name ' committing files: x/a x/a: copy a:779c764182ce5d43e2b1eb66ce06d7b47bfe342e committing manifest committing changelog overwriting x/a expanding keywords committed changeset 3:b4560182a3f9a358179fd2d835c15e9da379c1e4 $ cat a expand $Id: x/a b4560182a3f9 Thu, 01 Jan 1970 00:00:03 +0000 user $ do not process $Id: xxx $ $Xinfo: User Name : xa $ kwshrink a inside directory x $ hg --verbose kwshrink a overwriting x/a shrinking keywords $ cat a expand $Id$ do not process $Id: xxx $ $Xinfo$ $ cd .. kwexpand nonexistent $ hg kwexpand nonexistent nonexistent:* (glob) #if serve hg serve - expand with hgweb file - no expansion with hgweb annotate/changeset/filediff - check errors $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT 'file/tip/a/?style=raw' 200 Script output follows expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ do not process $Id: xxx $ $Xinfo: User Name : firstline $ $ get-with-headers.py localhost:$HGPORT 'annotate/tip/a/?style=raw' 200 Script output follows user@1: expand $Id$ user@1: do not process $Id: user@1: xxx $ user@2: $Xinfo$ $ get-with-headers.py localhost:$HGPORT 'rev/tip/?style=raw' 200 Script output follows # HG changeset patch # User User Name # Date 3 0 # Node ID b4560182a3f9a358179fd2d835c15e9da379c1e4 # Parent bb948857c743469b22bbf51f7ec8112279ca5d83 xa diff -r bb948857c743 -r b4560182a3f9 x/a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/x/a Thu Jan 01 00:00:03 1970 +0000 @@ -0,0 +1,4 @@ +expand $Id$ +do not process $Id: +xxx $ +$Xinfo$ $ get-with-headers.py localhost:$HGPORT 'diff/bb948857c743/a?style=raw' 200 Script output follows diff -r ef63ca68695b -r bb948857c743 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:02 1970 +0000 @@ -1,3 +1,4 @@ expand $Id$ do not process $Id: xxx $ +$Xinfo$ $ cat errors.log #endif Prepare merge and resolve tests $ echo '$Id$' > m $ hg add m $ hg commit -m 4kw $ echo foo >> m $ hg commit -m 5foo simplemerge $ hg update 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo foo >> m $ hg commit -m 6foo created new head $ hg merge 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m simplemerge $ cat m $Id: m 27d48ee14f67 Thu, 01 Jan 1970 00:00:00 +0000 test $ foo conflict: keyword should stay outside conflict zone $ hg update 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo bar >> m $ hg commit -m 8bar created new head $ hg merge merging m warning: conflicts while merging m! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ cat m $Id$ <<<<<<< local: 88a80c8d172e - test: 8bar bar ======= foo >>>>>>> other: 85d2d2d732a5 - test: simplemerge resolve to local, m must contain hash of last change (local parent) $ hg resolve -t internal:local -a (no more unresolved files) $ hg commit -m localresolve $ cat m $Id: m 88a80c8d172e Thu, 01 Jan 1970 00:00:00 +0000 test $ bar Test restricted mode with transplant -b $ hg update 6 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch foo marked working directory as branch foo (branches are permanent and global, did you want a bookmark?) $ mv a a.bak $ echo foobranch > a $ cat a.bak >> a $ rm a.bak $ hg commit -m 9foobranch $ hg update default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -y transplant -b foo tip applying 4aa30d025d50 4aa30d025d50 transplanted to e00abbf63521 Expansion in changeset but not in file $ hg tip -p changeset: 11:e00abbf63521 tag: tip parent: 9:800511b3a22d user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 9foobranch diff -r 800511b3a22d -r e00abbf63521 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +1,4 @@ +foobranch expand $Id$ do not process $Id: xxx $ $ head -n 2 a foobranch expand $Id: a e00abbf63521 Thu, 01 Jan 1970 00:00:00 +0000 test $ Turn off expansion $ hg -q rollback $ hg -q update -C kwshrink with unknown file u $ cp a u $ hg --verbose kwshrink overwriting a shrinking keywords overwriting m shrinking keywords overwriting x/a shrinking keywords Keywords shrunk in working directory, but not yet disabled - cat shows unexpanded keywords - hg cat shows expanded keywords $ cat a b expand $Id$ do not process $Id: xxx $ $Xinfo$ ignore $Id$ $ hg cat sym a b && echo expand $Id: a bb948857c743 Thu, 01 Jan 1970 00:00:02 +0000 user $ do not process $Id: xxx $ $Xinfo: User Name : firstline $ ignore $Id$ a Now disable keyword expansion $ cp $HGRCPATH $HGRCPATH.backup $ rm "$HGRCPATH" $ cat a b expand $Id$ do not process $Id: xxx $ $Xinfo$ ignore $Id$ $ hg cat sym a b && echo expand $Id$ do not process $Id: xxx $ $Xinfo$ ignore $Id$ a enable keyword expansion again $ cat $HGRCPATH.backup >> $HGRCPATH Test restricted mode with unshelve $ cat <> $HGRCPATH > [extensions] > shelve = > EOF $ echo xxxx >> a $ hg diff diff -r 800511b3a22d a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a * (glob) @@ -2,3 +2,4 @@ do not process $Id: xxx $ $Xinfo$ +xxxx $ hg shelve -q --name tmp $ hg shelve --list --patch tmp (*)* changes to: localresolve (glob) diff --git a/a b/a --- a/a +++ b/a @@ -2,3 +2,4 @@ do not process $Id: xxx $ $Xinfo$ +xxxx $ hg update -q -C 10 $ hg unshelve -q tmp $ hg diff diff -r 4aa30d025d50 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a * (glob) @@ -3,3 +3,4 @@ do not process $Id: xxx $ $Xinfo$ +xxxx Test restricted mode with rebase $ cat <> $HGRCPATH > [extensions] > rebase = > EOF $ hg update -q -C 9 $ echo xxxx >> a $ hg commit -m '#11' $ hg diff -c 11 diff -r 800511b3a22d -r b07670694489 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -2,3 +2,4 @@ do not process $Id: xxx $ $Xinfo$ +xxxx $ hg diff -c 10 diff -r 27d48ee14f67 -r 4aa30d025d50 a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -1,3 +1,4 @@ +foobranch expand $Id$ do not process $Id: xxx $ $ hg rebase -q -s 10 -d 11 --keep $ hg diff -r 9 -r 12 a diff -r 800511b3a22d -r 1939b927726c a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +1,6 @@ +foobranch expand $Id$ do not process $Id: xxx $ $Xinfo$ +xxxx Test restricted mode with graft $ hg graft -q 10 $ hg diff -r 9 -r 13 a diff -r 800511b3a22d -r 01a68de1003a a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -1,4 +1,6 @@ +foobranch expand $Id$ do not process $Id: xxx $ $Xinfo$ +xxxx Test restricted mode with backout $ hg backout -q 11 --no-commit $ hg diff a diff -r 01a68de1003a a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a * (glob) @@ -3,4 +3,3 @@ do not process $Id: xxx $ $Xinfo$ -xxxx Test restricted mode with histedit $ cat <> $HGRCPATH > [extensions] > histedit = > EOF $ hg commit -m 'backout #11' $ hg histedit -q --command - 13 < pick 49f5f2d940c3 14 backout #11 > pick 01a68de1003a 13 9foobranch > EOF Test restricted mode with fetch (with merge) $ cat <> $HGRCPATH > [extensions] > fetch = > EOF $ hg clone -q -r 9 . ../fetch-merge $ cd ../fetch-merge $ hg -R ../Test export 10 | hg import -q - $ hg fetch -q -r 11 $ hg diff -r 9 a diff -r 800511b3a22d a --- a/a Thu Jan 01 00:00:00 1970 +0000 +++ b/a * (glob) @@ -1,4 +1,6 @@ +foobranch expand $Id$ do not process $Id: xxx $ $Xinfo$ +xxxx $ cd .. mercurial-3.7.3/tests/test-hg-parseurl.py.out0000644000175000017500000000065312676531525020663 0ustar mpmmpm00000000000000http://example.com/no/anchor, branches: (None, []) http://example.com/an/anchor, branches: ('foo', []) http://example.com/no/anchor/branches, branches: (None, ['foo']) http://example.com/an/anchor/branches, branches: ('bar', ['foo']) http://example.com/an/anchor/branches-None, branches: ('foo', []) http://example.com/, branches: (None, []) http://example.com/, branches: (None, []) http://example.com/, branches: ('foo', []) mercurial-3.7.3/tests/test-treediscovery.t0000644000175000017500000003476212676531525020336 0ustar mpmmpm00000000000000#require killdaemons Tests discovery against servers without getbundle support: $ CAP="getbundle bundle2" $ . "$TESTDIR/notcapable" $ cat >> $HGRCPATH < [ui] > logtemplate="{rev} {node|short}: {desc} {branches}\n" > EOF Setup HTTP server control: $ remote=http://localhost:$HGPORT/ $ export remote $ tstart() { > echo '[web]' > $1/.hg/hgrc > echo 'push_ssl = false' >> $1/.hg/hgrc > echo 'allow_push = *' >> $1/.hg/hgrc > hg serve -R $1 -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log > cat hg.pid >> $DAEMON_PIDS > } $ tstop() { > killdaemons.py > [ "$1" ] && cut -d' ' -f6- access.log && cat errors.log > rm access.log errors.log > } Both are empty: $ hg init empty1 $ hg init empty2 $ tstart empty2 $ hg incoming -R empty1 $remote comparing with http://localhost:$HGPORT/ no changes found [1] $ hg outgoing -R empty1 $remote comparing with http://localhost:$HGPORT/ no changes found [1] $ hg pull -R empty1 $remote pulling from http://localhost:$HGPORT/ no changes found $ hg push -R empty1 $remote pushing to http://localhost:$HGPORT/ no changes found [1] $ tstop Base repo: $ hg init main $ cd main $ hg debugbuilddag -mo '+2:tbase @name1 +3:thead1 > $HGRCPATH $ echo 'convert =' >> $HGRCPATH $ glog() > { > hg log -G --template '{rev} "{desc}" files: {files}\n' "$@" > } $ hg init source $ cd source $ echo foo > foo $ echo baz > baz $ mkdir -p dir/subdir $ echo dir/file >> dir/file $ echo dir/file2 >> dir/file2 $ echo dir/file3 >> dir/file3 # to be corrupted in rev 0 $ echo dir/subdir/file3 >> dir/subdir/file3 $ echo dir/subdir/file4 >> dir/subdir/file4 $ hg ci -d '0 0' -qAm '0: add foo baz dir/' $ echo bar > bar $ echo quux > quux $ echo dir/file4 >> dir/file4 # to be corrupted in rev 1 $ hg copy foo copied $ hg ci -d '1 0' -qAm '1: add bar quux; copy foo to copied' $ echo >> foo $ hg ci -d '2 0' -m '2: change foo' $ hg up -qC 1 $ echo >> bar $ echo >> quux $ hg ci -d '3 0' -m '3: change bar quux' created new head $ hg up -qC 2 $ hg merge -qr 3 $ echo >> bar $ echo >> baz $ hg ci -d '4 0' -m '4: first merge; change bar baz' $ echo >> bar $ echo 1 >> baz $ echo >> quux $ hg ci -d '5 0' -m '5: change bar baz quux' $ hg up -qC 4 $ echo >> foo $ echo 2 >> baz $ hg ci -d '6 0' -m '6: change foo baz' created new head $ hg up -qC 5 $ hg merge -qr 6 $ echo >> bar $ hg ci -d '7 0' -m '7: second merge; change bar' $ echo >> foo $ hg ci -m '8: change foo' $ glog @ 8 "8: change foo" files: foo | o 7 "7: second merge; change bar" files: bar baz |\ | o 6 "6: change foo baz" files: baz foo | | o | 5 "5: change bar baz quux" files: bar baz quux |/ o 4 "4: first merge; change bar baz" files: bar baz |\ | o 3 "3: change bar quux" files: bar quux | | o | 2 "2: change foo" files: foo |/ o 1 "1: add bar quux; copy foo to copied" files: bar copied dir/file4 quux | o 0 "0: add foo baz dir/" files: baz dir/file dir/file2 dir/file3 dir/subdir/file3 dir/subdir/file4 foo final file versions in this repo: $ hg manifest --debug 9463f52fe115e377cf2878d4fc548117211063f2 644 bar 94c1be4dfde2ee8d78db8bbfcf81210813307c3d 644 baz 7711d36246cc83e61fb29cd6d4ef394c63f1ceaf 644 copied 3e20847584beff41d7cd16136b7331ab3d754be0 644 dir/file 75e6d3f8328f5f6ace6bf10b98df793416a09dca 644 dir/file2 e96dce0bc6a217656a3a410e5e6bec2c4f42bf7c 644 dir/file3 6edd55f559cdce67132b12ca09e09cee08b60442 644 dir/file4 5fe139720576e18e34bcc9f79174db8897c8afe9 644 dir/subdir/file3 57a1c1511590f3de52874adfa04effe8a77d64af 644 dir/subdir/file4 9a7b52012991e4873687192c3e17e61ba3e837a3 644 foo bc3eca3f47023a3e70ca0d8cc95a22a6827db19d 644 quux $ hg debugrename copied copied renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd $ cd .. Test interaction with startrev and verify that changing it is handled properly: $ > empty $ hg convert --filemap empty source movingstart --config convert.hg.startrev=3 -r4 initializing destination movingstart repository scanning source... sorting... converting... 1 3: change bar quux 0 4: first merge; change bar baz $ hg convert --filemap empty source movingstart scanning source... sorting... converting... 3 5: change bar baz quux 2 6: change foo baz 1 7: second merge; change bar warning: af455ce4166b3c9c88e6309c2b9332171dcea595 parent 61e22ca76c3b3e93df20338c4e02ce286898e825 is missing warning: cf908b3eeedc301c9272ebae931da966d5b326c7 parent 59e1ab45c888289513b7354484dac8a88217beab is missing 0 8: change foo splitrepo tests $ splitrepo() > { > msg="$1" > files="$2" > opts=$3 > echo "% $files: $msg" > prefix=`echo "$files" | sed -e 's/ /-/g'` > fmap="$prefix.fmap" > repo="$prefix.repo" > for i in $files; do > echo "include $i" >> "$fmap" > done > hg -q convert $opts --filemap "$fmap" --datesort source "$repo" > hg up -q -R "$repo" > glog -R "$repo" > hg -R "$repo" manifest --debug > } $ splitrepo 'skip unwanted merges; use 1st parent in 1st merge, 2nd in 2nd' foo % foo: skip unwanted merges; use 1st parent in 1st merge, 2nd in 2nd @ 3 "8: change foo" files: foo | o 2 "6: change foo baz" files: foo | o 1 "2: change foo" files: foo | o 0 "0: add foo baz dir/" files: foo 9a7b52012991e4873687192c3e17e61ba3e837a3 644 foo $ splitrepo 'merges are not merges anymore' bar % bar: merges are not merges anymore @ 4 "7: second merge; change bar" files: bar | o 3 "5: change bar baz quux" files: bar | o 2 "4: first merge; change bar baz" files: bar | o 1 "3: change bar quux" files: bar | o 0 "1: add bar quux; copy foo to copied" files: bar 9463f52fe115e377cf2878d4fc548117211063f2 644 bar $ splitrepo '1st merge is not a merge anymore; 2nd still is' baz % baz: 1st merge is not a merge anymore; 2nd still is @ 4 "7: second merge; change bar" files: baz |\ | o 3 "6: change foo baz" files: baz | | o | 2 "5: change bar baz quux" files: baz |/ o 1 "4: first merge; change bar baz" files: baz | o 0 "0: add foo baz dir/" files: baz 94c1be4dfde2ee8d78db8bbfcf81210813307c3d 644 baz $ splitrepo 'we add additional merges when they are interesting' 'foo quux' % foo quux: we add additional merges when they are interesting @ 8 "8: change foo" files: foo | o 7 "7: second merge; change bar" files: |\ | o 6 "6: change foo baz" files: foo | | o | 5 "5: change bar baz quux" files: quux |/ o 4 "4: first merge; change bar baz" files: |\ | o 3 "3: change bar quux" files: quux | | o | 2 "2: change foo" files: foo |/ o 1 "1: add bar quux; copy foo to copied" files: quux | o 0 "0: add foo baz dir/" files: foo 9a7b52012991e4873687192c3e17e61ba3e837a3 644 foo bc3eca3f47023a3e70ca0d8cc95a22a6827db19d 644 quux $ splitrepo 'partial conversion' 'bar quux' '-r 3' % bar quux: partial conversion @ 1 "3: change bar quux" files: bar quux | o 0 "1: add bar quux; copy foo to copied" files: bar quux b79105bedc55102f394e90a789c9c380117c1b4a 644 bar db0421cc6b685a458c8d86c7d5c004f94429ea23 644 quux $ splitrepo 'complete the partial conversion' 'bar quux' % bar quux: complete the partial conversion @ 4 "7: second merge; change bar" files: bar | o 3 "5: change bar baz quux" files: bar quux | o 2 "4: first merge; change bar baz" files: bar | o 1 "3: change bar quux" files: bar quux | o 0 "1: add bar quux; copy foo to copied" files: bar quux 9463f52fe115e377cf2878d4fc548117211063f2 644 bar bc3eca3f47023a3e70ca0d8cc95a22a6827db19d 644 quux $ rm -r foo.repo $ splitrepo 'partial conversion' 'foo' '-r 3' % foo: partial conversion @ 0 "0: add foo baz dir/" files: foo 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 foo $ splitrepo 'complete the partial conversion' 'foo' % foo: complete the partial conversion @ 3 "8: change foo" files: foo | o 2 "6: change foo baz" files: foo | o 1 "2: change foo" files: foo | o 0 "0: add foo baz dir/" files: foo 9a7b52012991e4873687192c3e17e61ba3e837a3 644 foo $ splitrepo 'copied file; source not included in new repo' copied % copied: copied file; source not included in new repo @ 0 "1: add bar quux; copy foo to copied" files: copied 2ed2a3912a0b24502043eae84ee4b279c18b90dd 644 copied $ hg --cwd copied.repo debugrename copied copied not renamed $ splitrepo 'copied file; source included in new repo' 'foo copied' % foo copied: copied file; source included in new repo @ 4 "8: change foo" files: foo | o 3 "6: change foo baz" files: foo | o 2 "2: change foo" files: foo | o 1 "1: add bar quux; copy foo to copied" files: copied | o 0 "0: add foo baz dir/" files: foo 7711d36246cc83e61fb29cd6d4ef394c63f1ceaf 644 copied 9a7b52012991e4873687192c3e17e61ba3e837a3 644 foo $ hg --cwd foo-copied.repo debugrename copied copied renamed from foo:2ed2a3912a0b24502043eae84ee4b279c18b90dd verify the top level 'include .' if there is no other includes: $ echo "exclude something" > default.fmap $ hg convert -q --filemap default.fmap -r1 source dummydest2 $ hg -R dummydest2 log --template '{rev} {node|short} {desc|firstline}\n' 1 61e22ca76c3b 1: add bar quux; copy foo to copied 0 c085cf2ee7fe 0: add foo baz dir/ $ echo "include somethingelse" >> default.fmap $ hg convert -q --filemap default.fmap -r1 source dummydest3 $ hg -R dummydest3 log --template '{rev} {node|short} {desc|firstline}\n' $ echo "include ." >> default.fmap $ hg convert -q --filemap default.fmap -r1 source dummydest4 $ hg -R dummydest4 log --template '{rev} {node|short} {desc|firstline}\n' 1 61e22ca76c3b 1: add bar quux; copy foo to copied 0 c085cf2ee7fe 0: add foo baz dir/ ensure that the filemap contains duplicated slashes (issue3612) $ cat > renames.fmap < include dir > exclude dir/file2 > rename dir dir2//dir3 > include foo > include copied > rename foo foo2/ > rename copied ./copied2 > exclude dir/subdir > include dir/subdir/file3 > EOF $ rm source/.hg/store/data/dir/file3.i $ rm source/.hg/store/data/dir/file4.i $ hg -q convert --filemap renames.fmap --datesort source dummydest abort: data/dir/file3.i@e96dce0bc6a2: no match found! [255] $ hg -q convert --filemap renames.fmap --datesort --config convert.hg.ignoreerrors=1 source renames.repo ignoring: data/dir/file3.i@e96dce0bc6a2: no match found ignoring: data/dir/file4.i@6edd55f559cd: no match found $ hg up -q -R renames.repo $ glog -R renames.repo @ 4 "8: change foo" files: foo2 | o 3 "6: change foo baz" files: foo2 | o 2 "2: change foo" files: foo2 | o 1 "1: add bar quux; copy foo to copied" files: copied2 | o 0 "0: add foo baz dir/" files: dir2/dir3/file dir2/dir3/subdir/file3 foo2 $ hg -R renames.repo verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 4 files, 5 changesets, 7 total revisions $ hg -R renames.repo manifest --debug d43feacba7a4f1f2080dde4a4b985bd8a0236d46 644 copied2 3e20847584beff41d7cd16136b7331ab3d754be0 644 dir2/dir3/file 5fe139720576e18e34bcc9f79174db8897c8afe9 644 dir2/dir3/subdir/file3 9a7b52012991e4873687192c3e17e61ba3e837a3 644 foo2 $ hg --cwd renames.repo debugrename copied2 copied2 renamed from foo2:2ed2a3912a0b24502043eae84ee4b279c18b90dd copied: $ hg --cwd source cat copied foo copied2: $ hg --cwd renames.repo cat copied2 foo filemap errors $ cat > errors.fmap < include dir/ # beware that comments changes error line numbers! > exclude /dir > rename dir//dir /dir//dir/ "out of sync" > include > EOF $ hg -q convert --filemap errors.fmap source errors.repo errors.fmap:3: superfluous / in include '/dir' errors.fmap:3: superfluous / in rename '/dir' errors.fmap:4: unknown directive 'out of sync' errors.fmap:5: path to exclude is missing abort: errors in filemap [255] test branch closing revision pruning if branch is pruned $ hg init branchpruning $ cd branchpruning $ hg branch foo marked working directory as branch foo (branches are permanent and global, did you want a bookmark?) $ echo a > a $ hg ci -Am adda adding a $ hg ci --close-branch -m closefoo $ hg up 0 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch empty marked working directory as branch empty (branches are permanent and global, did you want a bookmark?) $ hg ci -m emptybranch $ hg ci --close-branch -m closeempty $ hg up 0 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch default marked working directory as branch default (branches are permanent and global, did you want a bookmark?) $ echo b > b $ hg ci -Am addb adding b $ hg ci --close-branch -m closedefault $ cat > filemap < include b > EOF $ cd .. $ hg convert branchpruning branchpruning-hg1 initializing destination branchpruning-hg1 repository scanning source... sorting... converting... 5 adda 4 closefoo 3 emptybranch 2 closeempty 1 addb 0 closedefault $ glog -R branchpruning-hg1 _ 5 "closedefault" files: | o 4 "addb" files: b | | _ 3 "closeempty" files: | | | o 2 "emptybranch" files: |/ | _ 1 "closefoo" files: |/ o 0 "adda" files: a exercise incremental conversion at the same time $ hg convert -r0 --filemap branchpruning/filemap branchpruning branchpruning-hg2 initializing destination branchpruning-hg2 repository scanning source... sorting... converting... 0 adda $ hg convert -r4 --filemap branchpruning/filemap branchpruning branchpruning-hg2 scanning source... sorting... converting... 0 addb $ hg convert --filemap branchpruning/filemap branchpruning branchpruning-hg2 scanning source... sorting... converting... 3 closefoo 2 emptybranch 1 closeempty 0 closedefault $ glog -R branchpruning-hg2 _ 1 "closedefault" files: | o 0 "addb" files: b Test rebuilding of map with unknown revisions in shamap - it used to crash $ cd branchpruning $ hg up -r 2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m 'merging something' $ cd .. $ echo "53792d18237d2b64971fa571936869156655338d 6d955580116e82c4b029bd30f321323bae71a7f0" >> branchpruning-hg2/.hg/shamap $ hg convert --filemap branchpruning/filemap branchpruning branchpruning-hg2 --debug --config progress.debug=true run hg source pre-conversion action run hg sink pre-conversion action scanning source... scanning: 1 revisions sorting... converting... 0 merging something source: 2503605b178fe50e8fbbb0e77b97939540aa8c87 converting: 0/1 revisions (0.00%) unknown revmap source: 53792d18237d2b64971fa571936869156655338d run hg sink post-conversion action run hg source post-conversion action filemap rename undoing revision rename $ hg init renameundo $ cd renameundo $ echo 1 > a $ echo 1 > c $ hg ci -qAm add $ hg mv -q a b/a $ hg mv -q c b/c $ hg ci -qm rename $ echo 2 > b/a $ echo 2 > b/c $ hg ci -qm modify $ cd .. $ echo "rename b ." > renameundo.fmap $ hg convert --filemap renameundo.fmap renameundo renameundo2 initializing destination renameundo2 repository scanning source... sorting... converting... 2 add 1 rename filtering out empty revision repository tip rolled back to revision 0 (undo convert) 0 modify $ glog -R renameundo2 o 1 "modify" files: a c | o 0 "add" files: a c test merge parents/empty merges pruning $ glog() > { > hg log -G --template '{rev}:{node|short}@{branch} "{desc}" files: {files}\n' "$@" > } test anonymous branch pruning $ hg init anonymousbranch $ cd anonymousbranch $ echo a > a $ echo b > b $ hg ci -Am add adding a adding b $ echo a >> a $ hg ci -m changea $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b >> b $ hg ci -m changeb created new head $ hg up 1 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m merge $ cd .. $ cat > filemap < include a > EOF $ hg convert --filemap filemap anonymousbranch anonymousbranch-hg initializing destination anonymousbranch-hg repository scanning source... sorting... converting... 3 add 2 changea 1 changeb 0 merge $ glog -R anonymousbranch @ 3:c71d5201a498@default "merge" files: |\ | o 2:607eb44b17f9@default "changeb" files: b | | o | 1:1f60ea617824@default "changea" files: a |/ o 0:0146e6129113@default "add" files: a b $ glog -R anonymousbranch-hg o 1:cda818e7219b@default "changea" files: a | o 0:c334dc3be0da@default "add" files: a $ cat anonymousbranch-hg/.hg/shamap 0146e6129113dba9ac90207cfdf2d7ed35257ae5 c334dc3be0daa2a4e9ce4d2e2bdcba40c09d4916 1f60ea61782421edf8d051ff4fcb61b330f26a4a cda818e7219b5f7f3fb9f49780054ed6a1905ec3 607eb44b17f9348cd5cbd26e16af87ba77b0b037 c334dc3be0daa2a4e9ce4d2e2bdcba40c09d4916 c71d5201a498b2658d105a6bf69d7a0df2649aea cda818e7219b5f7f3fb9f49780054ed6a1905ec3 $ cat > filemap < include b > EOF $ hg convert --filemap filemap anonymousbranch anonymousbranch-hg2 initializing destination anonymousbranch-hg2 repository scanning source... sorting... converting... 3 add 2 changea 1 changeb 0 merge $ glog -R anonymousbranch @ 3:c71d5201a498@default "merge" files: |\ | o 2:607eb44b17f9@default "changeb" files: b | | o | 1:1f60ea617824@default "changea" files: a |/ o 0:0146e6129113@default "add" files: a b $ glog -R anonymousbranch-hg2 o 1:62dd350b0df6@default "changeb" files: b | o 0:4b9ced861657@default "add" files: b $ cat anonymousbranch-hg2/.hg/shamap 0146e6129113dba9ac90207cfdf2d7ed35257ae5 4b9ced86165703791653059a1db6ed864630a523 1f60ea61782421edf8d051ff4fcb61b330f26a4a 4b9ced86165703791653059a1db6ed864630a523 607eb44b17f9348cd5cbd26e16af87ba77b0b037 62dd350b0df695f7d2c82a02e0499b16fd790f22 c71d5201a498b2658d105a6bf69d7a0df2649aea 62dd350b0df695f7d2c82a02e0499b16fd790f22 test named branch pruning $ hg init namedbranch $ cd namedbranch $ echo a > a $ echo b > b $ hg ci -Am add adding a adding b $ echo a >> a $ hg ci -m changea $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch foo marked working directory as branch foo (branches are permanent and global, did you want a bookmark?) $ echo b >> b $ hg ci -m changeb $ hg up default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m merge $ cd .. $ cat > filemap < include a > EOF $ hg convert --filemap filemap namedbranch namedbranch-hg initializing destination namedbranch-hg repository scanning source... sorting... converting... 3 add 2 changea 1 changeb 0 merge $ glog -R namedbranch @ 3:73899bcbe45c@default "merge" files: |\ | o 2:8097982d19fc@foo "changeb" files: b | | o | 1:1f60ea617824@default "changea" files: a |/ o 0:0146e6129113@default "add" files: a b $ glog -R namedbranch-hg o 1:cda818e7219b@default "changea" files: a | o 0:c334dc3be0da@default "add" files: a $ cd namedbranch $ hg --config extensions.mq= strip tip 1 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/namedbranch/.hg/strip-backup/73899bcbe45c-92adf160-backup.hg (glob) $ hg up foo 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg merge default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m merge $ cd .. $ hg convert --filemap filemap namedbranch namedbranch-hg2 initializing destination namedbranch-hg2 repository scanning source... sorting... converting... 3 add 2 changea 1 changeb 0 merge $ glog -R namedbranch @ 3:e1959de76e1b@foo "merge" files: |\ | o 2:8097982d19fc@foo "changeb" files: b | | o | 1:1f60ea617824@default "changea" files: a |/ o 0:0146e6129113@default "add" files: a b $ glog -R namedbranch-hg2 o 2:dcf314454667@foo "merge" files: |\ | o 1:cda818e7219b@default "changea" files: a |/ o 0:c334dc3be0da@default "add" files: a $ cd .. test converting merges into a repo that contains other files $ hg init merge-test1 $ cd merge-test1 $ touch a && hg commit -Aqm 'add a' $ echo a > a && hg commit -Aqm 'edit a' $ hg up -q 0 $ touch b && hg commit -Aqm 'add b' $ hg merge -q 1 && hg commit -qm 'merge a & b' $ cd .. $ hg init merge-test2 $ cd merge-test2 $ mkdir converted $ touch converted/a toberemoved && hg commit -Aqm 'add converted/a & toberemoved' $ touch x && rm toberemoved && hg commit -Aqm 'add x & remove tobremoved' $ cd .. $ hg log -G -T '{shortest(node)} {desc}' -R merge-test1 @ 1191 merge a & b |\ | o 9077 add b | | o | d19f edit a |/ o ac82 add a $ hg log -G -T '{shortest(node)} {desc}' -R merge-test2 @ 150e add x & remove tobremoved | o bbac add converted/a & toberemoved - Build a shamap where the target converted/a is in on top of an unrelated - change to 'x'. This simulates using convert to merge several repositories - together. $ cat >> merge-test2/.hg/shamap < $(hg -R merge-test1 log -r 0 -T '{node}') $(hg -R merge-test2 log -r 0 -T '{node}') > $(hg -R merge-test1 log -r 1 -T '{node}') $(hg -R merge-test2 log -r 1 -T '{node}') > EOF $ cat >> merge-test-filemap < rename . converted/ > EOF $ hg convert --filemap merge-test-filemap merge-test1 merge-test2 --traceback scanning source... sorting... converting... 1 add b 0 merge a & b $ hg -R merge-test2 manifest -r tip converted/a converted/b x $ hg -R merge-test2 log -G -T '{shortest(node)} {desc}\n{files % "- {file}\n"}\n' o 6eaa merge a & b |\ - converted/a | | - toberemoved | | | o 2995 add b | | - converted/b | | @ | 150e add x & remove tobremoved |/ - toberemoved | - x | o bbac add converted/a & toberemoved - converted/a - toberemoved $ cd .. Test case where cleanp2 contains a file that doesn't exist in p2 - for example because filemap changed. $ hg init cleanp2 $ cd cleanp2 $ touch f f1 f2 && hg ci -Aqm '0' $ echo f1 > f1 && echo >> f && hg ci -m '1' $ hg up -qr0 && echo f2 > f2 && echo >> f && hg ci -qm '2' $ echo "include f" > filemap $ hg convert --filemap filemap . assuming destination .-hg initializing destination .-hg repository scanning source... sorting... converting... 2 0 1 1 0 2 $ hg merge && hg ci -qm '3' 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ echo "include ." > filemap $ hg convert --filemap filemap . assuming destination .-hg scanning source... sorting... converting... 0 3 $ hg -R .-hg log -G -T '{shortest(node)} {desc}\n{files % "- {file}\n"}\n' o e9ed 3 |\ | o 33a0 2 | | - f | | o | f73e 1 |/ - f | o d681 0 - f $ hg -R .-hg mani -r tip f $ cd .. mercurial-3.7.3/tests/test-mq-subrepo-svn.t0000644000175000017500000000224212676531525020331 0ustar mpmmpm00000000000000#require svn13 $ cat <> $HGRCPATH > [extensions] > mq = > [diff] > nodates = 1 > EOF fn to create new repository, and cd into it $ mkrepo() { > hg init $1 > cd $1 > hg qinit > } handle svn subrepos safely $ svnadmin create svn-repo-2499 $ SVNREPOPATH=`pwd`/svn-repo-2499/project #if windows $ SVNREPOURL=file:///`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` #else $ SVNREPOURL=file://`$PYTHON -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"` #endif $ mkdir -p svn-project-2499/trunk $ svn import -qm 'init project' svn-project-2499 "$SVNREPOURL" qnew on repo w/svn subrepo $ mkrepo repo-2499-svn-subrepo $ svn co "$SVNREPOURL"/trunk sub Checked out revision 1. $ echo 'sub = [svn]sub' >> .hgsub $ hg add .hgsub $ hg status -S -X '**/format' A .hgsub $ hg qnew -m0 0.diff $ cd sub $ echo a > a $ svn add a A a $ svn st A* a (glob) $ cd .. $ hg status -S # doesn't show status for svn subrepos (yet) $ hg qnew -m1 1.diff abort: uncommitted changes in subrepository 'sub' [255] $ cd .. mercurial-3.7.3/tests/test-issue1802.t0000644000175000017500000000322312676531525017076 0ustar mpmmpm00000000000000#require execbit Create extension that can disable exec checks: $ cat > noexec.py < from mercurial import extensions, util > def setflags(orig, f, l, x): > pass > def checkexec(orig, path): > return False > def extsetup(ui): > extensions.wrapfunction(util, 'setflags', setflags) > extensions.wrapfunction(util, 'checkexec', checkexec) > EOF $ hg init unix-repo $ cd unix-repo $ touch a $ hg add a $ hg commit -m 'unix: add a' $ hg clone . ../win-repo updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ chmod +x a $ hg commit -m 'unix: chmod a' $ hg manifest -v 755 * a $ cd ../win-repo $ touch b $ hg add b $ hg commit -m 'win: add b' $ hg manifest -v 644 a 644 b $ hg pull pulling from $TESTTMP/unix-repo searching for changes adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg manifest -v -r tip 755 * a Simulate a Windows merge: $ hg --config extensions.n=$TESTTMP/noexec.py merge --debug searching for copies back to rev 1 unmatched files in local: b resolving manifests branchmerge: True, force: False, partial: False ancestor: a03b0deabf2b, local: d6fa54f68ae1+, remote: 2d8bcf2dda39 a: update permissions -> e 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) Simulate a Windows commit: $ hg --config extensions.n=$TESTTMP/noexec.py commit -m 'win: merge' $ hg manifest -v 755 * a 644 b $ cd .. mercurial-3.7.3/tests/test-mq-qfold.t0000644000175000017500000001122512676531525017154 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [extensions] > mq = > [mq] > git = keep > [diff] > nodates = 1 > EOF init: $ hg init repo $ cd repo $ echo a > a $ hg ci -Am adda adding a $ echo a >> a $ hg qnew -f p1 $ echo b >> a $ hg qnew -f p2 $ echo c >> a $ hg qnew -f p3 Fold in the middle of the queue: (this tests also that editor is not invoked if '--edit' is not specified) $ hg qpop p1 popping p3 popping p2 now at: p1 $ hg qdiff diff -r 07f494440405 a --- a/a +++ b/a @@ -1,1 +1,2 @@ a +a $ HGEDITOR=cat hg qfold p2 $ grep git .hg/patches/p1 && echo 'git patch found!' [1] $ hg qser p1 p3 $ hg qdiff diff -r 07f494440405 a --- a/a +++ b/a @@ -1,1 +1,3 @@ a +a +b Fold with local changes: $ echo d >> a $ hg qfold p3 abort: local changes found, qrefresh first [255] $ hg diff -c . diff -r 07f494440405 -r ???????????? a (glob) --- a/a +++ b/a @@ -1,1 +1,3 @@ a +a +b $ hg revert -a --no-backup reverting a Fold git patch into a regular patch, expect git patch: $ echo a >> a $ hg qnew -f regular $ hg cp a aa $ hg qnew --git -f git $ hg qpop popping git now at: regular $ hg qfold git $ cat .hg/patches/regular # HG changeset patch # Parent ???????????????????????????????????????? (glob) diff --git a/a b/a --- a/a +++ b/a @@ -1,3 +1,4 @@ a a b +a diff --git a/a b/aa copy from a copy to aa --- a/a +++ b/aa @@ -1,3 +1,4 @@ a a b +a $ hg qpop popping regular now at: p1 $ hg qdel regular Fold regular patch into a git patch, expect git patch: $ hg cp a aa $ hg qnew --git -f git $ echo b >> aa $ hg qnew -f regular $ hg qpop popping regular now at: git $ hg qfold regular $ cat .hg/patches/git # HG changeset patch # Parent ???????????????????????????????????????? (glob) diff --git a/a b/aa copy from a copy to aa --- a/a +++ b/aa @@ -1,3 +1,4 @@ a a b +b Test saving last-message.txt: $ hg qrefresh -m "original message" $ cat > $TESTTMP/commitfailure.py < from mercurial import error > def reposetup(ui, repo): > class commitfailure(repo.__class__): > def commit(self, *args, **kwargs): > raise error.Abort('emulating unexpected abort') > repo.__class__ = commitfailure > EOF $ cat >> .hg/hgrc < [extensions] > # this failure occurs before editor invocation > commitfailure = $TESTTMP/commitfailure.py > EOF $ cat > $TESTTMP/editor.sh << EOF > echo "==== before editing" > cat \$1 > echo "====" > (echo; echo "test saving last-message.txt") >> \$1 > EOF $ hg qapplied p1 git $ hg tip --template "{files}\n" aa (test that editor is not invoked before transaction starting, and that combination of '--edit' and '--message' doesn't abort execution) $ rm -f .hg/last-message.txt $ HGEDITOR="sh $TESTTMP/editor.sh" hg qfold -e -m MESSAGE p3 qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: emulating unexpected abort [255] $ test -f .hg/last-message.txt [1] (reset applied patches and directory status) $ cat >> .hg/hgrc < [extensions] > # this failure occurs after editor invocation > commitfailure = ! > EOF $ hg qapplied p1 $ hg status -A aa ? aa $ rm aa $ hg status -m M a $ hg revert --no-backup -q a $ hg qpush -q git now at: git (test that editor is invoked and commit message is saved into "last-message.txt") $ cat >> .hg/hgrc < [hooks] > # this failure occurs after editor invocation > pretxncommit.unexpectedabort = false > EOF $ rm -f .hg/last-message.txt $ HGEDITOR="sh $TESTTMP/editor.sh" hg qfold -e p3 ==== before editing original message HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to use default message. HG: -- HG: user: test HG: branch 'default' HG: added aa HG: changed a ==== note: commit message saved in .hg/last-message.txt transaction abort! rollback completed qrefresh interrupted while patch was popped! (revert --all, qpush to recover) abort: pretxncommit.unexpectedabort hook exited with status 1 [255] $ cat .hg/last-message.txt original message test saving last-message.txt (confirm whether files listed up in the commit message editing are correct) $ cat >> .hg/hgrc < [hooks] > pretxncommit.unexpectedabort = > EOF $ hg status -u | while read f; do rm ${f}; done $ hg revert --no-backup -q --all $ hg qpush -q git now at: git $ hg qpush -q --move p3 now at: p3 $ hg status --rev "git^1" --rev . -arm M a A aa $ cd .. mercurial-3.7.3/tests/test-hgweb-commands.t0000644000175000017500000021613012676531525020331 0ustar mpmmpm00000000000000#require serve An attempt at more fully testing the hgweb web interface. The following things are tested elsewhere and are therefore omitted: - archive, tested in test-archive - unbundle, tested in test-push-http - changegroupsubset, tested in test-pull $ cat << EOF >> $HGRCPATH > [format] > usegeneraldelta=yes > EOF Set up the repo $ hg init test $ cd test $ mkdir da $ echo foo > da/foo $ echo foo > foo $ hg ci -Ambase adding da/foo adding foo $ hg tag 1.0 $ hg bookmark something $ hg bookmark -r0 anotherthing $ echo another > foo $ hg branch stable marked working directory as branch stable (branches are permanent and global, did you want a bookmark?) $ hg ci -Ambranch $ hg branch unstable marked working directory as branch unstable >>> open('msg', 'wb').write('branch commit with null character: \0\n') $ hg ci -l msg $ rm msg $ cat > .hg/hgrc < [graph] > default.width = 3 > stable.width = 3 > stable.color = FF0000 > [websub] > append = s|(.*)|\1(websub)| > EOF $ hg serve --config server.uncompressed=False -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ hg log -G --template '{rev}:{node|short} {desc}\n' @ 3:cad8025a2e87 branch commit with null character: \x00 (esc) | o 2:1d22e65f027e branch | o 1:a4f92ed23982 Added tag 1.0 for changeset 2ef0ac749a14 | o 0:2ef0ac749a14 base Logs and changes $ get-with-headers.py 127.0.0.1:$HGPORT 'log/?style=atom' 200 Script output follows http://*:$HGPORT/ (glob) (glob) (glob) test Changelog 1970-01-01T00:00:00+00:00 [unstable] branch commit with null character: http://*:$HGPORT/#changeset-cad8025a2e87f88c06259790adfa15acb4080123 (glob) (glob) test test 1970-01-01T00:00:00+00:00 1970-01-01T00:00:00+00:00
                        changeset cad8025a2e87
                        branch unstable
                        bookmark something
                        tag tip
                        user test
                        description branch commit with null character: (websub)
                        files
                        [stable] branch http://*:$HGPORT/#changeset-1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe (glob) (glob) test test 1970-01-01T00:00:00+00:00 1970-01-01T00:00:00+00:00
                        changeset 1d22e65f027e
                        branch stable
                        bookmark
                        tag
                        user test
                        description branch(websub)
                        files foo
                        [default] Added tag 1.0 for changeset 2ef0ac749a14 http://*:$HGPORT/#changeset-a4f92ed23982be056b9852de5dfe873eaac7f0de (glob) (glob) test test 1970-01-01T00:00:00+00:00 1970-01-01T00:00:00+00:00
                        changeset a4f92ed23982
                        branch default
                        bookmark
                        tag
                        user test
                        description Added tag 1.0 for changeset 2ef0ac749a14(websub)
                        files .hgtags
                        base http://*:$HGPORT/#changeset-2ef0ac749a14e4f57a5a822464a0902c6f7f448f (glob) (glob) test test 1970-01-01T00:00:00+00:00 1970-01-01T00:00:00+00:00
                        changeset 2ef0ac749a14
                        branch
                        bookmark anotherthing
                        tag 1.0
                        user test
                        description base(websub)
                        files da/foo
                        foo
                        $ get-with-headers.py 127.0.0.1:$HGPORT 'log/?style=rss' 200 Script output follows http://*:$HGPORT/ (glob) en-us test Changelog test Changelog [unstable] branch commit with null character: http://*:$HGPORT/rev/cad8025a2e87 (glob) http://*:$HGPORT/rev/cad8025a2e87 (glob) changeset cad8025a2e87 branch unstable bookmark something tag tip user test description branch commit with null character: (websub) files ]]> test Thu, 01 Jan 1970 00:00:00 +0000 [stable] branch http://*:$HGPORT/rev/1d22e65f027e (glob) http://*:$HGPORT/rev/1d22e65f027e (glob) changeset 1d22e65f027e branch stable bookmark tag user test description branch(websub) files foo
                        ]]>
                        test Thu, 01 Jan 1970 00:00:00 +0000
                        [default] Added tag 1.0 for changeset 2ef0ac749a14 http://*:$HGPORT/rev/a4f92ed23982 (glob) http://*:$HGPORT/rev/a4f92ed23982 (glob) changeset a4f92ed23982 branch default bookmark tag user test description Added tag 1.0 for changeset 2ef0ac749a14(websub) files .hgtags
                        ]]>
                        test Thu, 01 Jan 1970 00:00:00 +0000
                        base http://*:$HGPORT/rev/2ef0ac749a14 (glob) http://*:$HGPORT/rev/2ef0ac749a14 (glob) changeset 2ef0ac749a14 branch bookmark anotherthing tag 1.0 user test description base(websub) files da/foo
                        foo
                        ]]>
                        test Thu, 01 Jan 1970 00:00:00 +0000
                        (no-eol) $ get-with-headers.py 127.0.0.1:$HGPORT 'log/1/?style=atom' 200 Script output follows http://*:$HGPORT/ (glob) (glob) (glob) test Changelog 1970-01-01T00:00:00+00:00 [default] Added tag 1.0 for changeset 2ef0ac749a14 http://*:$HGPORT/#changeset-a4f92ed23982be056b9852de5dfe873eaac7f0de (glob) (glob) test test 1970-01-01T00:00:00+00:00 1970-01-01T00:00:00+00:00
                        changeset a4f92ed23982
                        branch default
                        bookmark
                        tag
                        user test
                        description Added tag 1.0 for changeset 2ef0ac749a14(websub)
                        files .hgtags
                        base http://*:$HGPORT/#changeset-2ef0ac749a14e4f57a5a822464a0902c6f7f448f (glob) (glob) test test 1970-01-01T00:00:00+00:00 1970-01-01T00:00:00+00:00
                        changeset 2ef0ac749a14
                        branch
                        bookmark anotherthing
                        tag 1.0
                        user test
                        description base(websub)
                        files da/foo
                        foo
                        $ get-with-headers.py 127.0.0.1:$HGPORT 'log/1/?style=rss' 200 Script output follows http://*:$HGPORT/ (glob) en-us test Changelog test Changelog [default] Added tag 1.0 for changeset 2ef0ac749a14 http://*:$HGPORT/rev/a4f92ed23982 (glob) http://*:$HGPORT/rev/a4f92ed23982 (glob) changeset a4f92ed23982 branch default bookmark tag user test description Added tag 1.0 for changeset 2ef0ac749a14(websub) files .hgtags
                        ]]>
                        test Thu, 01 Jan 1970 00:00:00 +0000
                        base http://*:$HGPORT/rev/2ef0ac749a14 (glob) http://*:$HGPORT/rev/2ef0ac749a14 (glob) changeset 2ef0ac749a14 branch bookmark anotherthing tag 1.0 user test description base(websub) files da/foo
                        foo
                        ]]>
                        test Thu, 01 Jan 1970 00:00:00 +0000
                        (no-eol) $ get-with-headers.py 127.0.0.1:$HGPORT 'log/1/foo/?style=atom' 200 Script output follows http://*:$HGPORT/atom-log/tip/foo (glob) (glob) test: foo history 1970-01-01T00:00:00+00:00 base http://*:$HGPORT/#changeset-2ef0ac749a14e4f57a5a822464a0902c6f7f448f (glob) (glob) test test 1970-01-01T00:00:00+00:00 1970-01-01T00:00:00+00:00
                        changeset 2ef0ac749a14
                        branch
                        bookmark anotherthing
                        tag 1.0
                        user test
                        description base(websub)
                        files
                        $ get-with-headers.py 127.0.0.1:$HGPORT 'log/1/foo/?style=rss' 200 Script output follows http://*:$HGPORT/ (glob) en-us test: foo history foo revision history base http://*:$HGPORT/log2ef0ac749a14/foo (glob) test Thu, 01 Jan 1970 00:00:00 +0000 $ get-with-headers.py 127.0.0.1:$HGPORT 'shortlog/' 200 Script output follows test: log

                        log

                        age author description
                        Thu, 01 Jan 1970 00:00:00 +0000 test branch commit with null character: unstable tip something
                        Thu, 01 Jan 1970 00:00:00 +0000 test branch stable
                        Thu, 01 Jan 1970 00:00:00 +0000 test Added tag 1.0 for changeset 2ef0ac749a14 default
                        Thu, 01 Jan 1970 00:00:00 +0000 test base 1.0 anotherthing
                        $ get-with-headers.py 127.0.0.1:$HGPORT 'rev/0/' 200 Script output follows test: 2ef0ac749a14

                        changeset 0:2ef0ac749a14 1.0 anotherthing

                        base(websub)
                        author test
                        date Thu, 01 Jan 1970 00:00:00 +0000
                        parents
                        children a4f92ed23982
                        files da/foo foo
                        diffstat 2 files changed, 2 insertions(+), 0 deletions(-) [+]
                        line wrap: on
                        line diff
                          --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
                          +++ b/da/foo	Thu Jan 01 00:00:00 1970 +0000
                          @@ -0,0 +1,1 @@
                          +foo
                          --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
                          +++ b/foo	Thu Jan 01 00:00:00 1970 +0000
                          @@ -0,0 +1,1 @@
                          +foo
                        $ get-with-headers.py 127.0.0.1:$HGPORT 'rev/1/?style=raw' 200 Script output follows # HG changeset patch # User test # Date 0 0 # Node ID a4f92ed23982be056b9852de5dfe873eaac7f0de # Parent 2ef0ac749a14e4f57a5a822464a0902c6f7f448f Added tag 1.0 for changeset 2ef0ac749a14 diff -r 2ef0ac749a14 -r a4f92ed23982 .hgtags --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/.hgtags Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +2ef0ac749a14e4f57a5a822464a0902c6f7f448f 1.0 $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=base' 200 Script output follows test: searching for base

                        searching for 'base'

                        Assuming literal keyword search.

                        age author description
                        Thu, 01 Jan 1970 00:00:00 +0000 test base 1.0 anotherthing
                        $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=stable&style=raw' | grep 'revision:' revision: 2 Search with revset syntax $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=tip^&style=raw' 200 Script output follows # HG changesets search # Node ID cad8025a2e87f88c06259790adfa15acb4080123 # Query "tip^" # Mode revset expression search changeset: 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe revision: 2 user: test date: Thu, 01 Jan 1970 00:00:00 +0000 summary: branch branch: stable $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=last(all(),2)^&style=raw' 200 Script output follows # HG changesets search # Node ID cad8025a2e87f88c06259790adfa15acb4080123 # Query "last(all(),2)^" # Mode revset expression search changeset: 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe revision: 2 user: test date: Thu, 01 Jan 1970 00:00:00 +0000 summary: branch branch: stable changeset: a4f92ed23982be056b9852de5dfe873eaac7f0de revision: 1 user: test date: Thu, 01 Jan 1970 00:00:00 +0000 summary: Added tag 1.0 for changeset 2ef0ac749a14 branch: default $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=last(all(,2)^&style=raw' 200 Script output follows # HG changesets search # Node ID cad8025a2e87f88c06259790adfa15acb4080123 # Query "last(all(,2)^" # Mode literal keyword search $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=last(al(),2)^&style=raw' 200 Script output follows # HG changesets search # Node ID cad8025a2e87f88c06259790adfa15acb4080123 # Query "last(al(),2)^" # Mode literal keyword search $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=bookmark(anotherthing)&style=raw' 200 Script output follows # HG changesets search # Node ID cad8025a2e87f88c06259790adfa15acb4080123 # Query "bookmark(anotherthing)" # Mode revset expression search changeset: 2ef0ac749a14e4f57a5a822464a0902c6f7f448f revision: 0 user: test date: Thu, 01 Jan 1970 00:00:00 +0000 summary: base tag: 1.0 bookmark: anotherthing $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=bookmark(abc)&style=raw' 200 Script output follows # HG changesets search # Node ID cad8025a2e87f88c06259790adfa15acb4080123 # Query "bookmark(abc)" # Mode literal keyword search $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=deadbeef:&style=raw' 200 Script output follows # HG changesets search # Node ID cad8025a2e87f88c06259790adfa15acb4080123 # Query "deadbeef:" # Mode literal keyword search $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=user("test")&style=raw' 200 Script output follows # HG changesets search # Node ID cad8025a2e87f88c06259790adfa15acb4080123 # Query "user("test")" # Mode revset expression search changeset: cad8025a2e87f88c06259790adfa15acb4080123 revision: 3 user: test date: Thu, 01 Jan 1970 00:00:00 +0000 summary: branch commit with null character: \x00 (esc) branch: unstable tag: tip bookmark: something changeset: 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe revision: 2 user: test date: Thu, 01 Jan 1970 00:00:00 +0000 summary: branch branch: stable changeset: a4f92ed23982be056b9852de5dfe873eaac7f0de revision: 1 user: test date: Thu, 01 Jan 1970 00:00:00 +0000 summary: Added tag 1.0 for changeset 2ef0ac749a14 branch: default changeset: 2ef0ac749a14e4f57a5a822464a0902c6f7f448f revision: 0 user: test date: Thu, 01 Jan 1970 00:00:00 +0000 summary: base tag: 1.0 bookmark: anotherthing $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=user("re:test")&style=raw' 200 Script output follows # HG changesets search # Node ID cad8025a2e87f88c06259790adfa15acb4080123 # Query "user("re:test")" # Mode literal keyword search File-related $ get-with-headers.py 127.0.0.1:$HGPORT 'file/1/foo/?style=raw' 200 Script output follows foo $ get-with-headers.py 127.0.0.1:$HGPORT 'annotate/1/foo/?style=raw' 200 Script output follows test@0: foo $ get-with-headers.py 127.0.0.1:$HGPORT 'file/1/?style=raw' 200 Script output follows drwxr-xr-x da -rw-r--r-- 45 .hgtags -rw-r--r-- 4 foo $ hg log --template "{file_mods}\n" -r 1 $ hg parents --template "{node|short}\n" -r 1 2ef0ac749a14 $ hg parents --template "{node|short}\n" -r 1 foo 2ef0ac749a14 $ get-with-headers.py 127.0.0.1:$HGPORT 'file/1/foo' 200 Script output follows test: a4f92ed23982 foo

                        view foo @ 1:a4f92ed23982

                        Added tag 1.0 for changeset 2ef0ac749a14(websub)
                        author test
                        date Thu, 01 Jan 1970 00:00:00 +0000
                        parents 2ef0ac749a14
                        children 1d22e65f027e
                        line wrap: on
                        line source
                          foo
                        $ get-with-headers.py 127.0.0.1:$HGPORT 'filediff/0/foo/?style=raw' 200 Script output follows diff -r 000000000000 -r 2ef0ac749a14 foo --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/foo Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +foo $ get-with-headers.py 127.0.0.1:$HGPORT 'filediff/1/foo/?style=raw' 200 Script output follows $ hg log --template "{file_mods}\n" -r 2 foo $ hg parents --template "{node|short}\n" -r 2 a4f92ed23982 $ hg parents --template "{node|short}\n" -r 2 foo 2ef0ac749a14 $ get-with-headers.py 127.0.0.1:$HGPORT 'file/2/foo' 200 Script output follows test: 1d22e65f027e foo

                        view foo @ 2:1d22e65f027e stable

                        branch(websub)
                        author test
                        date Thu, 01 Jan 1970 00:00:00 +0000
                        parents 2ef0ac749a14
                        children
                        line wrap: on
                        line source
                          another
                        Overviews $ get-with-headers.py 127.0.0.1:$HGPORT 'raw-tags' 200 Script output follows tip cad8025a2e87f88c06259790adfa15acb4080123 1.0 2ef0ac749a14e4f57a5a822464a0902c6f7f448f $ get-with-headers.py 127.0.0.1:$HGPORT 'raw-branches' 200 Script output follows unstable cad8025a2e87f88c06259790adfa15acb4080123 open stable 1d22e65f027e5a0609357e7d8e7508cd2ba5d2fe inactive default a4f92ed23982be056b9852de5dfe873eaac7f0de inactive $ get-with-headers.py 127.0.0.1:$HGPORT 'raw-bookmarks' 200 Script output follows anotherthing 2ef0ac749a14e4f57a5a822464a0902c6f7f448f something cad8025a2e87f88c06259790adfa15acb4080123 $ get-with-headers.py 127.0.0.1:$HGPORT 'summary/?style=gitweb' 200 Script output follows test: Summary
                         
                        descriptionunknown
                        ownerFoo Bar <foo.bar@example.com>
                        last changeThu, 01 Jan 1970 00:00:00 +0000
                        Thu, 01 Jan 1970 00:00:00 +0000 test branch commit with null character: unstable tip something
                        Thu, 01 Jan 1970 00:00:00 +0000 test branch stable
                        Thu, 01 Jan 1970 00:00:00 +0000 test Added tag 1.0 for changeset 2ef0ac749a14 default
                        Thu, 01 Jan 1970 00:00:00 +0000 test base 1.0 anotherthing
                        ...
                        Thu, 01 Jan 1970 00:00:00 +0000 1.0
                        ...
                        Thu, 01 Jan 1970 00:00:00 +0000 anotherthing
                        Thu, 01 Jan 1970 00:00:00 +0000 something
                        ...
                        Thu, 01 Jan 1970 00:00:00 +0000 unstable
                        Thu, 01 Jan 1970 00:00:00 +0000 stable
                        Thu, 01 Jan 1970 00:00:00 +0000 default
                        ...
                        $ get-with-headers.py 127.0.0.1:$HGPORT 'graph/?style=gitweb' 200 Script output follows test: Graph
                         
                            raw graph $ get-with-headers.py 127.0.0.1:$HGPORT 'graph/?style=raw' 200 Script output follows # HG graph # Node ID cad8025a2e87f88c06259790adfa15acb4080123 # Rows shown 4 changeset: cad8025a2e87 user: test date: 1970-01-01 summary: branch commit with null character: \x00 (esc) branch: unstable tag: tip bookmark: something node: (0, 0) (color 1) edge: (0, 0) -> (0, 1) (color 1) changeset: 1d22e65f027e user: test date: 1970-01-01 summary: branch branch: stable node: (0, 1) (color 1) edge: (0, 1) -> (0, 2) (color 1) changeset: a4f92ed23982 user: test date: 1970-01-01 summary: Added tag 1.0 for changeset 2ef0ac749a14 branch: default node: (0, 2) (color 1) edge: (0, 2) -> (0, 3) (color 1) changeset: 2ef0ac749a14 user: test date: 1970-01-01 summary: base tag: 1.0 bookmark: anotherthing node: (0, 3) (color 1) capabilities $ get-with-headers.py 127.0.0.1:$HGPORT '?cmd=capabilities'; echo 200 Script output follows lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 heads $ get-with-headers.py 127.0.0.1:$HGPORT '?cmd=heads' 200 Script output follows cad8025a2e87f88c06259790adfa15acb4080123 branches $ get-with-headers.py 127.0.0.1:$HGPORT '?cmd=branches&nodes=0000000000000000000000000000000000000000' 200 Script output follows 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 changegroup $ get-with-headers.py 127.0.0.1:$HGPORT '?cmd=changegroup&roots=0000000000000000000000000000000000000000' 200 Script output follows x\x9c\xbd\x94MHTQ\x14\xc7'+\x9d\xc66\x81\x89P\xc1\xa3\x14\xcct\xba\xef\xbe\xfb\xde\xbb\xcfr0\xb3"\x02\x11[%\x98\xdcO\xa7\xd2\x19\x98y\xd2\x07h"\x96\xa0e\xda\xa6lUY-\xca\x08\xa2\x82\x16\x96\xd1\xa2\xf0#\xc8\x95\x1b\xdd$!m*"\xc8\x82\xea\xbe\x9c\x01\x85\xc9\x996\x1d\xf8\xc1\xe3~\x9d\xff9\xef\x7f\xaf\xcf\xe7\xbb\x19\xfc4\xec^\xcb\x9b\xfbz\xa6\xbe\xb3\x90_\xef/\x8d\x9e\xad\xbe\xe4\xcb0\xd2\xec\xad\x12X:\xc8\x12\x12\xd9:\x95\xba \x1cG\xb7$\xc5\xc44\x1c(\x1d\x03\x03\xdb\x84\x0cK#\xe0\x8a\xb8\x1b\x00\x1a\x08p\xb2SF\xa3\x01\x8f\x00%q\xa1Ny{k!8\xe5t>[{\xe2j\xddl\xc3\xcf\xee\xd0\xddW\x9ff3U\x9djobj\xbb\x87E\x88\x05l\x001\x12\x18\x13\xc6 \xb7(\xe3\x02a\x80\x81\xcel.u\x9b\x1b\x8c\x91\x80Z\x0c\x15\x15 (esc) \x7f0\xdc\xe4\x92\xa6\xb87\x16\xf2\xcaT\x14\xef\xe1\\pM\r (no-eol) (esc) kz\x10h2\x1a\xd3X\x98D\x9aD\\\xb8\x1a\x14\x12\x10f#\x87\xe8H\xad\x1d\xd9\xb2\xf5}cV{}\xf6:\xb3\xbd\xad\xaf\xd5?\xb9\xe3\xf6\xd4\xcf\x15\x84.\x8bT{\x97\x16\xa4Z\xeaX\x10\xabL\xc8\x81DJ\xc8\x18\x00\xccq\x80A-j2j \x83\x1b\x02\x03O|PQ\xae\xc8W\x9d\xd7h\x8cDX\xb8<\xee\x12\xda,\xfe\xfc\x005\xb3K\xc1\x14\xd9\x8b\xb3^C\xc7\xa6\xb3\xea\x83\xdd\xdf.d\x17]\xe9\xbf\xff}\xe3\xf0#\xff\xaam+\x88Z\x16\xa9\xf6&tT+\xf2\x96\xe8h\x8d$\x94\xa8\xf1}\x8aC\x8a\xc2\xc59\x8dE[Z\x8e\xb9\xda\xc9cnX\x8b\xb467{\xad\x8e\x11\xe6\x8aX\xb9\x96L52\xbf\xb0\xff\xe3\x81M\x9fk\x07\xf3\x7f\xf4\x1c\xbe\xbc\x80s\xea^\x7fY\xc1\xca\xcb"\x8d\xbb\x1a\x16]\xea\x83\x82Cb8:$\x80Bd\x02\x08\x90!\x88P^\x12\x88B\xdba:\xa6\x0e\xe0<\xf0O\x8bU\x82\x81\xe3wr\xb2\xba\xe6{&\xcaNL\xceutln\xfb\xdc\xb6{,\xd3\x82\xd28IO\xb8\xd7G\x0cF!\x16\x86\x8d\x11@\x02A\xcb\xc2\x94Q\x04L\x01\x00u8\x86&0\xb0EtO\xd0\xc5\x9c#\xb4'\xef`\xc9\xaf\xd2\xd1\xf5\x83\xab\x9f<\x1e\x8fT\x84:R\x89L%\xe8/\xee \x8a>E\x99\xd7\x1dlZ\x08B\x1dc\xf5\\0\x83\x01B\x95Im\x1d[\x92s*\x99`L\xd7\x894e qfn\xb2 (esc) \xa5mh\xbc\xf8\xdd\xa9\xca\x9a*\xd9;^y\xd4\xf7t\xbah\xf5\xf9\x1b\x99\xfe\xe94\xcd*[zu\x05\x92\xa6ML\x82!D\x16"\xc0\x01\x90Y\xd2\x96\x08a\xe9\xdd\xfa\xa4\xb6\xc4#\xa6\xbexpjh\xa0$\xb7\xb0V\xdb\xfba\xbef\xee\xe1\xe9\x17\xbd\xfd3\x99JKc\xc25\x89+\xeaE\xce\xffK\x17>\xc7\xb7\x16tE^\x8e\xde\x0bu\x17Dg\x9e\xbf\x99\xd8\xf0\xa01\xd3\xbc+\xbc\x13k\x14~\x12\x89\xbaa\x11K\x96\xe5\xfb\r (no-eol) (esc) \x95)\xbe\xf6 (no-eol) (esc) stream_out $ get-with-headers.py 127.0.0.1:$HGPORT '?cmd=stream_out' 200 Script output follows 1 failing unbundle, requires POST request $ get-with-headers.py 127.0.0.1:$HGPORT '?cmd=unbundle' 405 push requires POST request 0 push requires POST request [1] Static files $ get-with-headers.py 127.0.0.1:$HGPORT 'static/style.css' 200 Script output follows a { text-decoration:none; } .age { white-space:nowrap; } .date { white-space:nowrap; } .indexlinks { white-space:nowrap; } .parity0 { background-color: #ddd; color: #000; } .parity1 { background-color: #eee; color: #000; } .lineno { width: 60px; color: #aaa; font-size: smaller; text-align: right; } .plusline { color: green; } .minusline { color: red; } .atline { color: purple; } .annotate { font-size: smaller; text-align: right; padding-right: 1em; } .buttons a { background-color: #666; padding: 2pt; color: white; font-family: sans-serif; font-weight: bold; } .navigate a { background-color: #ccc; padding: 2pt; font-family: sans-serif; color: black; } .metatag { background-color: #888; color: white; text-align: right; } /* Common */ pre { margin: 0; } .logo { float: right; clear: right; } /* Changelog/Filelog entries */ .logEntry { width: 100%; } .logEntry .age { width: 15%; } .logEntry th.label { width: 16em; } .logEntry th { font-weight: normal; text-align: right; vertical-align: top; } .logEntry th.age, .logEntry th.firstline { font-weight: bold; } .logEntry th.firstline { text-align: left; width: inherit; } /* Shortlog entries */ .slogEntry { width: 100%; } .slogEntry .age { width: 8em; } .slogEntry td { font-weight: normal; text-align: left; vertical-align: top; } .slogEntry td.author { width: 15em; } /* Tag entries */ #tagEntries { list-style: none; margin: 0; padding: 0; } #tagEntries .tagEntry { list-style: none; margin: 0; padding: 0; } /* Changeset entry */ #changesetEntry { } #changesetEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } #changesetEntry th.files, #changesetEntry th.description { vertical-align: top; } /* File diff view */ #filediffEntry { } #filediffEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } /* Graph */ div#wrapper { position: relative; margin: 0; padding: 0; } canvas { position: absolute; z-index: 5; top: -0.6em; margin: 0; } ul#nodebgs { list-style: none inside none; padding: 0; margin: 0; top: -0.7em; } ul#graphnodes li, ul#nodebgs li { height: 39px; } ul#graphnodes { position: absolute; z-index: 10; top: -0.85em; list-style: none inside none; padding: 0; } ul#graphnodes li .info { display: block; font-size: 70%; position: relative; top: -1px; } Stop and restart the server at the directory different from the repository root. Even in such case, file patterns should be resolved relative to the repository root. (issue4568) $ killdaemons.py $ hg serve --config server.preferuncompressed=True -n test \ > -p $HGPORT -d --pid-file=`pwd`/hg.pid -E `pwd`/errors.log \ > --cwd .. -R `pwd` $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py 127.0.0.1:$HGPORT 'log?rev=adds("foo")&style=raw' 200 Script output follows # HG changesets search # Node ID cad8025a2e87f88c06259790adfa15acb4080123 # Query "adds("foo")" # Mode revset expression search changeset: 2ef0ac749a14e4f57a5a822464a0902c6f7f448f revision: 0 user: test date: Thu, 01 Jan 1970 00:00:00 +0000 summary: base tag: 1.0 bookmark: anotherthing Stop and restart with HGENCODING=cp932 and preferuncompressed $ killdaemons.py $ HGENCODING=cp932 hg serve --config server.preferuncompressed=True -n test \ > -p $HGPORT -d --pid-file=hg.pid -E errors.log $ cat hg.pid >> $DAEMON_PIDS commit message with Japanese Kanji 'Noh', which ends with '\x5c' $ echo foo >> foo $ HGENCODING=cp932 hg ci -m `$PYTHON -c 'print("\x94\x5c")'` Graph json escape of multibyte character $ get-with-headers.py 127.0.0.1:$HGPORT 'graph/' > out >>> for line in open("out"): ... if line.startswith("var data ="): ... print line, var data = [["061dd13ba3c3", [0, 1], [[0, 0, 1, -1, ""]], "\u80fd", "test", "1970-01-01", ["unstable", true], ["tip"], ["something"]], ["cad8025a2e87", [0, 1], [[0, 0, 1, 3, "FF0000"]], "branch commit with null character: \u0000", "test", "1970-01-01", ["unstable", false], [], []], ["1d22e65f027e", [0, 1], [[0, 0, 1, 3, ""]], "branch", "test", "1970-01-01", ["stable", true], [], []], ["a4f92ed23982", [0, 1], [[0, 0, 1, 3, ""]], "Added tag 1.0 for changeset 2ef0ac749a14", "test", "1970-01-01", ["default", true], [], []], ["2ef0ac749a14", [0, 1], [], "base", "test", "1970-01-01", ["default", false], ["1.0"], ["anotherthing"]]]; capabilities (plain version to check the format) $ get-with-headers.py 127.0.0.1:$HGPORT '?cmd=capabilities' | dd ibs=75 count=1 2> /dev/null; echo 200 Script output follows lookup changegroupsubset branchmap pushkey known (spread version to check the content) $ get-with-headers.py 127.0.0.1:$HGPORT '?cmd=capabilities' | tr ' ' '\n'; echo 200 Script output follows lookup changegroupsubset branchmap pushkey known getbundle unbundlehash batch stream-preferred streamreqs=generaldelta,revlogv1 bundle2=HG20%0Achangegroup%3D01%2C02%0Adigests%3Dmd5%2Csha1%2Csha512%0Aerror%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0Ahgtagsfnodes%0Alistkeys%0Apushkey%0Aremote-changegroup%3Dhttp%2Chttps unbundle=HG10GZ,HG10BZ,HG10UN httpheader=1024 heads ERRORS ENCOUNTERED $ cat errors.log $ killdaemons.py $ cd .. Test graph paging $ mkcommit() { > echo $1 >> a > hg ci -Am $1 a > } $ hg init graph $ cd graph $ mkcommit 0 $ mkcommit 1 $ mkcommit 2 $ mkcommit 3 $ mkcommit 4 $ mkcommit 5 $ hg serve --config server.uncompressed=False \ > --config web.maxshortchanges=2 \ > -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ hg log -G --template '{rev}:{node|short} {desc}\n' @ 5:aed2d9c1d0e7 5 | o 4:b60a39a85a01 4 | o 3:ada793dcc118 3 | o 2:ab4f1438558b 2 | o 1:e06180cbfb0c 1 | o 0:b4e73ffab476 0 Test paging $ get-with-headers.py 127.0.0.1:$HGPORT \ > 'graph/?style=raw' | grep changeset changeset: aed2d9c1d0e7 changeset: b60a39a85a01 $ get-with-headers.py 127.0.0.1:$HGPORT \ > 'graph/?style=raw&revcount=3' | grep changeset changeset: aed2d9c1d0e7 changeset: b60a39a85a01 changeset: ada793dcc118 $ get-with-headers.py 127.0.0.1:$HGPORT \ > 'graph/e06180cbfb0?style=raw&revcount=3' | grep changeset changeset: e06180cbfb0c changeset: b4e73ffab476 $ get-with-headers.py 127.0.0.1:$HGPORT \ > 'graph/b4e73ffab47?style=raw&revcount=3' | grep changeset changeset: b4e73ffab476 $ cat errors.log bookmarks view doesn't choke on bookmarks on secret changesets (issue3774) $ hg phase -fs 4 $ hg bookmark -r4 secret $ cat > hgweb.cgi < from mercurial import demandimport; demandimport.enable() > from mercurial.hgweb import hgweb > from mercurial.hgweb import wsgicgi > app = hgweb('.', 'test') > wsgicgi.launch(app) > HGWEB $ . "$TESTDIR/cgienv" $ PATH_INFO=/bookmarks; export PATH_INFO $ QUERY_STRING='style=raw' $ python hgweb.cgi | grep -v ETag: Status: 200 Script output follows\r (esc) Content-Type: text/plain; charset=ascii\r (esc) \r (esc) listbookmarks hides secret bookmarks $ PATH_INFO=/; export PATH_INFO $ QUERY_STRING='cmd=listkeys&namespace=bookmarks' $ python hgweb.cgi Status: 200 Script output follows\r (esc) Content-Type: application/mercurial-0.1\r (esc) Content-Length: 0\r (esc) \r (esc) search works with filtering $ PATH_INFO=/log; export PATH_INFO $ QUERY_STRING='rev=babar' $ python hgweb.cgi > search $ grep Status search Status: 200 Script output follows\r (esc) summary works with filtering (issue3810) $ PATH_INFO=/summary; export PATH_INFO $ QUERY_STRING='style=monoblue'; export QUERY_STRING $ python hgweb.cgi > summary.out $ grep "^Status" summary.out Status: 200 Script output follows\r (esc) proper status for filtered revision (missing rev) $ PATH_INFO=/rev/5; export PATH_INFO $ QUERY_STRING='style=raw' $ python hgweb.cgi #> search Status: 404 Not Found\r (esc) ETag: *\r (glob) (esc) Content-Type: text/plain; charset=ascii\r (esc) \r (esc) error: filtered revision '5' (not in 'served' subset) (filtered rev) $ PATH_INFO=/rev/4; export PATH_INFO $ QUERY_STRING='style=raw' $ python hgweb.cgi #> search Status: 404 Not Found\r (esc) ETag: *\r (glob) (esc) Content-Type: text/plain; charset=ascii\r (esc) \r (esc) error: filtered revision '4' (not in 'served' subset) filtered '0' changeset (create new root) $ hg up null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo 'babar' > jungle $ hg add jungle $ hg ci -m 'Babar is in the jungle!' created new head $ hg graft 0:: grafting 0:b4e73ffab476 "0" grafting 1:e06180cbfb0c "1" grafting 2:ab4f1438558b "2" grafting 3:ada793dcc118 "3" grafting 4:b60a39a85a01 "4" (secret) grafting 5:aed2d9c1d0e7 "5" (turning the initial root secret (filtered)) $ hg phase --force --secret 0 $ PATH_INFO=/graph/; export PATH_INFO $ QUERY_STRING='' $ python hgweb.cgi | grep Status Status: 200 Script output follows\r (esc) (check rendered revision) $ QUERY_STRING='style=raw' $ python hgweb.cgi | grep -v ETag Status: 200 Script output follows\r (esc) Content-Type: text/plain; charset=ascii\r (esc) \r (esc) # HG graph # Node ID 1d9b947fef1fbb382a95c11a8f5a67e9a10b5026 # Rows shown 7 changeset: 1d9b947fef1f user: test date: 1970-01-01 summary: 5 branch: default tag: tip node: (0, 0) (color 1) edge: (0, 0) -> (0, 1) (color 1) changeset: 0cfd435fd222 user: test date: 1970-01-01 summary: 4 node: (0, 1) (color 1) edge: (0, 1) -> (0, 2) (color 1) changeset: 6768b9939e82 user: test date: 1970-01-01 summary: 3 node: (0, 2) (color 1) edge: (0, 2) -> (0, 3) (color 1) changeset: 05b0497fd125 user: test date: 1970-01-01 summary: 2 node: (0, 3) (color 1) edge: (0, 3) -> (0, 4) (color 1) changeset: 9c102df67cfb user: test date: 1970-01-01 summary: 1 node: (0, 4) (color 1) edge: (0, 4) -> (0, 5) (color 1) changeset: 3ebcd7db11bf user: test date: 1970-01-01 summary: 0 node: (0, 5) (color 1) edge: (0, 5) -> (0, 6) (color 1) changeset: c5e9bd96ae01 user: test date: 1970-01-01 summary: Babar is in the jungle! node: (0, 6) (color 1) $ cd .. mercurial-3.7.3/tests/test-status-inprocess.py0000755000175000017500000000113112676531525021145 0ustar mpmmpm00000000000000#!/usr/bin/python from mercurial.ui import ui from mercurial.localrepo import localrepository from mercurial.commands import add, commit, status u = ui() print '% creating repo' repo = localrepository(u, '.', create=True) f = open('test.py', 'w') try: f.write('foo\n') finally: f.close print '% add and commit' add(u, repo, 'test.py') commit(u, repo, message='*') status(u, repo, clean=True) print '% change' f = open('test.py', 'w') try: f.write('bar\n') finally: f.close() # this would return clean instead of changed before the fix status(u, repo, clean=True, modified=True) mercurial-3.7.3/tests/test-context.py0000644000175000017500000001031612676531525017305 0ustar mpmmpm00000000000000import os from mercurial import hg, ui, context, encoding u = ui.ui() repo = hg.repository(u, 'test1', create=1) os.chdir('test1') # create 'foo' with fixed time stamp f = open('foo', 'wb') f.write('foo\n') f.close() os.utime('foo', (1000, 1000)) # add+commit 'foo' repo[None].add(['foo']) repo.commit(text='commit1', date="0 0") if os.name == 'nt': d = repo[None]['foo'].date() print "workingfilectx.date = (%d, %d)" % (d[0], d[1]) else: print "workingfilectx.date =", repo[None]['foo'].date() # test memctx with non-ASCII commit message def filectxfn(repo, memctx, path): return context.memfilectx(repo, "foo", "") ctx = context.memctx(repo, ['tip', None], encoding.tolocal("Gr\xc3\xbcezi!"), ["foo"], filectxfn) ctx.commit() for enc in "ASCII", "Latin-1", "UTF-8": encoding.encoding = enc print "%-8s: %s" % (enc, repo["tip"].description()) # test performing a status def getfilectx(repo, memctx, f): fctx = memctx.parents()[0][f] data, flags = fctx.data(), fctx.flags() if f == 'foo': data += 'bar\n' return context.memfilectx(repo, f, data, 'l' in flags, 'x' in flags) ctxa = repo.changectx(0) ctxb = context.memctx(repo, [ctxa.node(), None], "test diff", ["foo"], getfilectx, ctxa.user(), ctxa.date()) print ctxb.status(ctxa) # test performing a diff on a memctx for d in ctxb.diff(ctxa, git=True): print d # test safeness and correctness of "ctx.status()" print '= checking context.status():' # ancestor "wcctx ~ 2" actx2 = repo['.'] repo.wwrite('bar-m', 'bar-m\n', '') repo.wwrite('bar-r', 'bar-r\n', '') repo[None].add(['bar-m', 'bar-r']) repo.commit(text='add bar-m, bar-r', date="0 0") # ancestor "wcctx ~ 1" actx1 = repo['.'] repo.wwrite('bar-m', 'bar-m bar-m\n', '') repo.wwrite('bar-a', 'bar-a\n', '') repo[None].add(['bar-a']) repo[None].forget(['bar-r']) # status at this point: # M bar-m # A bar-a # R bar-r # C foo from mercurial import scmutil print '== checking workingctx.status:' wctx = repo[None] print 'wctx._status=%s' % (str(wctx._status)) print '=== with "pattern match":' print actx1.status(other=wctx, match=scmutil.matchfiles(repo, ['bar-m', 'foo'])) print 'wctx._status=%s' % (str(wctx._status)) print actx2.status(other=wctx, match=scmutil.matchfiles(repo, ['bar-m', 'foo'])) print 'wctx._status=%s' % (str(wctx._status)) print '=== with "always match" and "listclean=True":' print actx1.status(other=wctx, listclean=True) print 'wctx._status=%s' % (str(wctx._status)) print actx2.status(other=wctx, listclean=True) print 'wctx._status=%s' % (str(wctx._status)) print "== checking workingcommitctx.status:" wcctx = context.workingcommitctx(repo, scmutil.status(['bar-m'], ['bar-a'], [], [], [], [], []), text='', date='0 0') print 'wcctx._status=%s' % (str(wcctx._status)) print '=== with "always match":' print actx1.status(other=wcctx) print 'wcctx._status=%s' % (str(wcctx._status)) print actx2.status(other=wcctx) print 'wcctx._status=%s' % (str(wcctx._status)) print '=== with "always match" and "listclean=True":' print actx1.status(other=wcctx, listclean=True) print 'wcctx._status=%s' % (str(wcctx._status)) print actx2.status(other=wcctx, listclean=True) print 'wcctx._status=%s' % (str(wcctx._status)) print '=== with "pattern match":' print actx1.status(other=wcctx, match=scmutil.matchfiles(repo, ['bar-m', 'foo'])) print 'wcctx._status=%s' % (str(wcctx._status)) print actx2.status(other=wcctx, match=scmutil.matchfiles(repo, ['bar-m', 'foo'])) print 'wcctx._status=%s' % (str(wcctx._status)) print '=== with "pattern match" and "listclean=True":' print actx1.status(other=wcctx, match=scmutil.matchfiles(repo, ['bar-r', 'foo']), listclean=True) print 'wcctx._status=%s' % (str(wcctx._status)) print actx2.status(other=wcctx, match=scmutil.matchfiles(repo, ['bar-r', 'foo']), listclean=True) print 'wcctx._status=%s' % (str(wcctx._status)) mercurial-3.7.3/tests/test-update-renames.t0000644000175000017500000000074612676531525020354 0ustar mpmmpm00000000000000Test update logic when there are renames Update with local changes across a file rename $ hg init $ echo a > a $ hg add a $ hg ci -m a $ hg mv a b $ hg ci -m rename $ echo b > b $ hg ci -m change $ hg up -q 0 $ echo c > a $ hg up merging a and b to b warning: conflicts while merging b! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges [1] mercurial-3.7.3/tests/test-convert-clonebranches.t0000644000175000017500000000371612676531525021726 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [extensions] > convert = > [convert] > hg.tagsbranch = 0 > EOF $ hg init source $ cd source $ echo a > a $ hg ci -qAm adda Add a merge with one parent in the same branch $ echo a >> a $ hg ci -qAm changea $ hg up -qC 0 $ hg branch branch0 marked working directory as branch branch0 (branches are permanent and global, did you want a bookmark?) $ echo b > b $ hg ci -qAm addb $ hg up -qC $ hg merge default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -qm mergeab $ hg tag -ql mergeab $ cd .. Miss perl... sometimes $ cat > filter.py < import sys, re > > r = re.compile(r'^(?:\d+|pulling from)') > sys.stdout.writelines([l for l in sys.stdin if r.search(l)]) > EOF convert $ hg convert -v --config convert.hg.clonebranches=1 source dest | > python filter.py 3 adda 2 changea 1 addb pulling from default into branch0 1 changesets found 0 mergeab pulling from default into branch0 1 changesets found Add a merge with both parents and child in different branches $ cd source $ hg branch branch1 marked working directory as branch branch1 $ echo a > file1 $ hg ci -qAm c1 $ hg up -qC mergeab $ hg branch branch2 marked working directory as branch branch2 $ echo a > file2 $ hg ci -qAm c2 $ hg merge branch1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg branch branch3 marked working directory as branch branch3 $ hg ci -qAm c3 $ cd .. incremental conversion $ hg convert -v --config convert.hg.clonebranches=1 source dest | > python filter.py 2 c1 pulling from branch0 into branch1 4 changesets found 1 c2 pulling from branch0 into branch2 4 changesets found 0 c3 pulling from branch1 into branch3 5 changesets found pulling from branch2 into branch3 1 changesets found mercurial-3.7.3/tests/test-histedit-non-commute.t0000644000175000017500000002007712676531525021515 0ustar mpmmpm00000000000000 $ . "$TESTDIR/histedit-helpers.sh" $ cat >> $HGRCPATH < [extensions] > histedit= > EOF $ initrepo () > { > hg init $1 > cd $1 > for x in a b c d e f ; do > echo $x$x$x$x$x > $x > hg add $x > done > hg ci -m 'Initial commit' > for x in a b c d e f ; do > echo $x > $x > hg ci -m $x > done > echo 'I can haz no commute' > e > hg ci -m 'does not commute with e' > cd .. > } $ initrepo r1 $ cd r1 Initial generation of the command files $ EDITED="$TESTTMP/editedhistory" $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 3 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 4 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 7 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 5 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 6 >> $EDITED $ cat $EDITED pick 65a9a84f33fd 3 c pick 00f1c5383965 4 d pick 39522b764e3d 7 does not commute with e pick 7b4e2f4b7bcd 5 e pick 500cac37a696 6 f log before edit $ hg log --graph @ changeset: 7:39522b764e3d | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: does not commute with e | o changeset: 6:500cac37a696 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 5:7b4e2f4b7bcd | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 4:00f1c5383965 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 3:65a9a84f33fd | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 2:da6535b52e45 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 1:c1f09da44841 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: a | o changeset: 0:1715188a53c7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Initial commit edit the history $ hg histedit 3 --commands $EDITED 2>&1 | fixbundle 2 files updated, 0 files merged, 0 files removed, 0 files unresolved merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (pick 39522b764e3d) (hg histedit --continue to resume) abort the edit $ hg histedit --abort 2>&1 | fixbundle 2 files updated, 0 files merged, 0 files removed, 0 files unresolved second edit set $ hg log --graph @ changeset: 7:39522b764e3d | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: does not commute with e | o changeset: 6:500cac37a696 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 5:7b4e2f4b7bcd | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: e | o changeset: 4:00f1c5383965 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 3:65a9a84f33fd | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 2:da6535b52e45 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 1:c1f09da44841 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: a | o changeset: 0:1715188a53c7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Initial commit edit the history $ hg histedit 3 --commands $EDITED 2>&1 | fixbundle 2 files updated, 0 files merged, 0 files removed, 0 files unresolved merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (pick 39522b764e3d) (hg histedit --continue to resume) fix up $ echo 'I can haz no commute' > e $ hg resolve --mark e (no more unresolved files) continue: hg histedit --continue $ hg histedit --continue 2>&1 | fixbundle merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (pick 7b4e2f4b7bcd) (hg histedit --continue to resume) This failure is caused by 7b4e2f4b7bcd "e" not rebasing the non commutative former children. just continue this time $ hg revert -r 'p1()' e $ hg resolve --mark e (no more unresolved files) continue: hg histedit --continue $ hg histedit --continue 2>&1 | fixbundle 7b4e2f4b7bcd: empty changeset log after edit $ hg log --graph @ changeset: 6:7efe1373e4bc | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 5:e334d87a1e55 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: does not commute with e | o changeset: 4:00f1c5383965 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 3:65a9a84f33fd | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 2:da6535b52e45 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 1:c1f09da44841 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: a | o changeset: 0:1715188a53c7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Initial commit start over $ cd .. $ initrepo r2 $ cd r2 $ rm $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 3 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 4 >> $EDITED $ hg log --template 'mess {node|short} {rev} {desc}\n' -r 7 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 5 >> $EDITED $ hg log --template 'pick {node|short} {rev} {desc}\n' -r 6 >> $EDITED $ cat $EDITED pick 65a9a84f33fd 3 c pick 00f1c5383965 4 d mess 39522b764e3d 7 does not commute with e pick 7b4e2f4b7bcd 5 e pick 500cac37a696 6 f edit the history, this time with a fold action $ hg histedit 3 --commands $EDITED 2>&1 | fixbundle 2 files updated, 0 files merged, 0 files removed, 0 files unresolved merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (mess 39522b764e3d) (hg histedit --continue to resume) $ echo 'I can haz no commute' > e $ hg resolve --mark e (no more unresolved files) continue: hg histedit --continue $ hg histedit --continue 2>&1 | fixbundle merging e warning: conflicts while merging e! (edit, then use 'hg resolve --mark') Fix up the change (pick 7b4e2f4b7bcd) (hg histedit --continue to resume) second edit also fails, but just continue $ hg revert -r 'p1()' e $ hg resolve --mark e (no more unresolved files) continue: hg histedit --continue $ hg histedit --continue 2>&1 | fixbundle 7b4e2f4b7bcd: empty changeset post message fix $ hg log --graph @ changeset: 6:7efe1373e4bc | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: f | o changeset: 5:e334d87a1e55 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: does not commute with e | o changeset: 4:00f1c5383965 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: d | o changeset: 3:65a9a84f33fd | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: c | o changeset: 2:da6535b52e45 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b | o changeset: 1:c1f09da44841 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: a | o changeset: 0:1715188a53c7 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: Initial commit $ cd .. mercurial-3.7.3/tests/test-update-branches.t0000644000175000017500000001554212676531525020507 0ustar mpmmpm00000000000000# Construct the following history tree: # # @ 5:e1bb631146ca b1 # | # o 4:a4fdb3b883c4 0:b608b9236435 b1 # | # | o 3:4b57d2520816 1:44592833ba9f # | | # | | o 2:063f31070f65 # | |/ # | o 1:44592833ba9f # |/ # o 0:b608b9236435 $ mkdir b1 $ cd b1 $ hg init $ echo foo > foo $ echo zero > a $ hg init sub $ echo suba > sub/suba $ hg --cwd sub ci -Am addsuba adding suba $ echo 'sub = sub' > .hgsub $ hg ci -qAm0 $ echo one > a ; hg ci -m1 $ echo two > a ; hg ci -m2 $ hg up -q 1 $ echo three > a ; hg ci -qm3 $ hg up -q 0 $ hg branch -q b1 $ echo four > a ; hg ci -qm4 $ echo five > a ; hg ci -qm5 Initial repo state: $ hg log -G --template '{rev}:{node|short} {parents} {branches}\n' @ 5:ff252e8273df b1 | o 4:d047485b3896 0:60829823a42a b1 | | o 3:6efa171f091b 1:0786582aa4b1 | | | | o 2:bd10386d478c | |/ | o 1:0786582aa4b1 |/ o 0:60829823a42a Make sure update doesn't assume b1 is a repository if invoked from outside: $ cd .. $ hg update b1 abort: no repository found in '$TESTTMP' (.hg not found)! [255] $ cd b1 Test helper functions: $ revtest () { > msg=$1 > dirtyflag=$2 # 'clean', 'dirty' or 'dirtysub' > startrev=$3 > targetrev=$4 > opt=$5 > hg up -qC $startrev > test $dirtyflag = dirty && echo dirty > foo > test $dirtyflag = dirtysub && echo dirty > sub/suba > hg up $opt $targetrev > hg parent --template 'parent={rev}\n' > hg stat -S > } $ norevtest () { > msg=$1 > dirtyflag=$2 # 'clean', 'dirty' or 'dirtysub' > startrev=$3 > opt=$4 > hg up -qC $startrev > test $dirtyflag = dirty && echo dirty > foo > test $dirtyflag = dirtysub && echo dirty > sub/suba > hg up $opt > hg parent --template 'parent={rev}\n' > hg stat -S > } Test cases are documented in a table in the update function of merge.py. Cases are run as shown in that table, row by row. $ norevtest 'none clean linear' clean 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved parent=5 $ norevtest 'none clean same' clean 2 abort: not a linear update (merge or update --check to force update) parent=2 $ revtest 'none clean linear' clean 1 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved parent=2 $ revtest 'none clean same' clean 2 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved parent=3 $ revtest 'none clean cross' clean 3 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved parent=4 $ revtest 'none dirty linear' dirty 1 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved parent=2 M foo $ revtest 'none dirtysub linear' dirtysub 1 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved parent=2 M sub/suba $ revtest 'none dirty same' dirty 2 3 abort: uncommitted changes (commit or update --clean to discard changes) parent=2 M foo $ revtest 'none dirtysub same' dirtysub 2 3 abort: uncommitted changes (commit or update --clean to discard changes) parent=2 M sub/suba $ revtest 'none dirty cross' dirty 3 4 abort: uncommitted changes (commit or update --clean to discard changes) parent=3 M foo $ norevtest 'none dirty cross' dirty 2 abort: uncommitted changes (commit and merge, or update --clean to discard changes) parent=2 M foo $ revtest 'none dirtysub cross' dirtysub 3 4 abort: uncommitted changes (commit or update --clean to discard changes) parent=3 M sub/suba $ revtest '-C dirty linear' dirty 1 2 -C 2 files updated, 0 files merged, 0 files removed, 0 files unresolved parent=2 $ revtest '-c dirty linear' dirty 1 2 -c abort: uncommitted changes parent=1 M foo $ revtest '-c dirtysub linear' dirtysub 1 2 -c abort: uncommitted changes in subrepository 'sub' parent=1 M sub/suba $ norevtest '-c clean same' clean 2 -c 1 files updated, 0 files merged, 0 files removed, 0 files unresolved parent=3 $ revtest '-cC dirty linear' dirty 1 2 -cC abort: cannot specify both -c/--check and -C/--clean parent=1 M foo Test obsolescence behavior --------------------------------------------------------------------- successors should be taken in account when checking head destination $ cat << EOF >> $HGRCPATH > [ui] > logtemplate={rev}:{node|short} {desc|firstline} > [experimental] > evolution=createmarkers > EOF Test no-argument update to a successor of an obsoleted changeset $ hg log -G o 5:ff252e8273df 5 | o 4:d047485b3896 4 | | o 3:6efa171f091b 3 | | | | o 2:bd10386d478c 2 | |/ | @ 1:0786582aa4b1 1 |/ o 0:60829823a42a 0 $ hg book bm -r 3 $ hg status M foo We add simple obsolescence marker between 3 and 4 (indirect successors) $ hg id --debug -i -r 3 6efa171f091b00a3c35edc15d48c52a498929953 $ hg id --debug -i -r 4 d047485b3896813b2a624e86201983520f003206 $ hg debugobsolete 6efa171f091b00a3c35edc15d48c52a498929953 aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa $ hg debugobsolete aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa d047485b3896813b2a624e86201983520f003206 Test that 5 is detected as a valid destination from 3 and also accepts moving the bookmark (issue4015) $ hg up --quiet --hidden 3 $ hg up 5 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg book bm moving bookmark 'bm' forward from 6efa171f091b $ hg bookmarks * bm 5:ff252e8273df Test that 4 is detected as the no-argument destination from 3 and also moves the bookmark with it $ hg up --quiet 0 # we should be able to update to 3 directly $ hg up --quiet --hidden 3 # but not implemented yet. $ hg book -f bm $ hg up 1 files updated, 0 files merged, 0 files removed, 0 files unresolved updating bookmark bm $ hg book * bm 4:d047485b3896 Test that 5 is detected as a valid destination from 1 $ hg up --quiet 0 # we should be able to update to 3 directly $ hg up --quiet --hidden 3 # but not implemented yet. $ hg up 5 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Test that 5 is not detected as a valid destination from 2 $ hg up --quiet 0 $ hg up --quiet 2 $ hg up 5 abort: uncommitted changes (commit or update --clean to discard changes) [255] Test that we don't crash when updating from a pruned changeset (i.e. has no successors). Behavior should probably be that we update to the first non-obsolete parent but that will be decided later. $ hg id --debug -r 2 bd10386d478cd5a9faf2e604114c8e6da62d3889 $ hg up --quiet 0 $ hg up --quiet 2 $ hg debugobsolete bd10386d478cd5a9faf2e604114c8e6da62d3889 $ hg up 0 files updated, 0 files merged, 0 files removed, 0 files unresolved Test experimental revset support $ hg log -r '_destupdate()' 2:bd10386d478c 2 (no-eol) mercurial-3.7.3/tests/test-hgweb-no-path-info.t0000644000175000017500000000725612676531525021036 0ustar mpmmpm00000000000000This tests if hgweb and hgwebdir still work if the REQUEST_URI variable is no longer passed with the request. Instead, SCRIPT_NAME and PATH_INFO should be used from d74fc8dec2b4 onward to route the request. $ hg init repo $ cd repo $ echo foo > bar $ hg add bar $ hg commit -m "test" $ hg tip changeset: 0:61c9426e69fe tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: test $ cat > request.py < from mercurial.hgweb import hgweb, hgwebdir > from StringIO import StringIO > import os, sys > > errors = StringIO() > input = StringIO() > > def startrsp(status, headers): > print '---- STATUS' > print status > print '---- HEADERS' > print [i for i in headers if i[0] != 'ETag'] > print '---- DATA' > return output.write > > env = { > 'wsgi.version': (1, 0), > 'wsgi.url_scheme': 'http', > 'wsgi.errors': errors, > 'wsgi.input': input, > 'wsgi.multithread': False, > 'wsgi.multiprocess': False, > 'wsgi.run_once': False, > 'REQUEST_METHOD': 'GET', > 'PATH_INFO': '/', > 'SCRIPT_NAME': '', > 'SERVER_NAME': '127.0.0.1', > 'SERVER_PORT': os.environ['HGPORT'], > 'SERVER_PROTOCOL': 'HTTP/1.0' > } > > def process(app): > content = app(env, startrsp) > sys.stdout.write(output.getvalue()) > sys.stdout.write(''.join(content)) > getattr(content, 'close', lambda : None)() > print '---- ERRORS' > print errors.getvalue() > > output = StringIO() > env['QUERY_STRING'] = 'style=atom' > process(hgweb('.', name='repo')) > > output = StringIO() > env['QUERY_STRING'] = 'style=raw' > process(hgwebdir({'repo': '.'})) > EOF $ python request.py ---- STATUS 200 Script output follows ---- HEADERS [('Content-Type', 'application/atom+xml; charset=ascii')] ---- DATA http://127.0.0.1:$HGPORT/ repo Changelog 1970-01-01T00:00:00+00:00 [default] test http://127.0.0.1:$HGPORT/#changeset-61c9426e69fef294feed5e2bbfc97d39944a5b1c test test 1970-01-01T00:00:00+00:00 1970-01-01T00:00:00+00:00
                            changeset 61c9426e69fe
                            branch default
                            bookmark
                            tag tip
                            user test
                            description test
                            files bar
                            ---- ERRORS ---- STATUS 200 Script output follows ---- HEADERS [('Content-Type', 'text/plain; charset=ascii')] ---- DATA /repo/ ---- ERRORS $ cd .. mercurial-3.7.3/tests/test-extension.t0000644000175000017500000011562512676531525017461 0ustar mpmmpm00000000000000Test basic extension support $ cat > foobar.py < import os > from mercurial import cmdutil, commands > cmdtable = {} > command = cmdutil.command(cmdtable) > def uisetup(ui): > ui.write("uisetup called\\n") > def reposetup(ui, repo): > ui.write("reposetup called for %s\\n" % os.path.basename(repo.root)) > ui.write("ui %s= repo.ui\\n" % (ui == repo.ui and "=" or "!")) > @command('foo', [], 'hg foo') > def foo(ui, *args, **kwargs): > ui.write("Foo\\n") > @command('bar', [], 'hg bar', norepo=True) > def bar(ui, *args, **kwargs): > ui.write("Bar\\n") > EOF $ abspath=`pwd`/foobar.py $ mkdir barfoo $ cp foobar.py barfoo/__init__.py $ barfoopath=`pwd`/barfoo $ hg init a $ cd a $ echo foo > file $ hg add file $ hg commit -m 'add file' $ echo '[extensions]' >> $HGRCPATH $ echo "foobar = $abspath" >> $HGRCPATH $ hg foo uisetup called reposetup called for a ui == repo.ui Foo $ cd .. $ hg clone a b uisetup called reposetup called for a ui == repo.ui reposetup called for b ui == repo.ui updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg bar uisetup called Bar $ echo 'foobar = !' >> $HGRCPATH module/__init__.py-style $ echo "barfoo = $barfoopath" >> $HGRCPATH $ cd a $ hg foo uisetup called reposetup called for a ui == repo.ui Foo $ echo 'barfoo = !' >> $HGRCPATH Check that extensions are loaded in phases: $ cat > foo.py < import os > name = os.path.basename(__file__).rsplit('.', 1)[0] > print "1) %s imported" % name > def uisetup(ui): > print "2) %s uisetup" % name > def extsetup(): > print "3) %s extsetup" % name > def reposetup(ui, repo): > print "4) %s reposetup" % name > EOF $ cp foo.py bar.py $ echo 'foo = foo.py' >> $HGRCPATH $ echo 'bar = bar.py' >> $HGRCPATH Command with no output, we just want to see the extensions loaded: $ hg paths 1) foo imported 1) bar imported 2) foo uisetup 2) bar uisetup 3) foo extsetup 3) bar extsetup 4) foo reposetup 4) bar reposetup Check hgweb's load order: $ cat > hgweb.cgi < #!/usr/bin/env python > from mercurial import demandimport; demandimport.enable() > from mercurial.hgweb import hgweb > from mercurial.hgweb import wsgicgi > application = hgweb('.', 'test repo') > wsgicgi.launch(application) > EOF $ REQUEST_METHOD='GET' PATH_INFO='/' SCRIPT_NAME='' QUERY_STRING='' \ > SERVER_PORT='80' SERVER_NAME='localhost' python hgweb.cgi \ > | grep '^[0-9]) ' # ignores HTML output 1) foo imported 1) bar imported 2) foo uisetup 2) bar uisetup 3) foo extsetup 3) bar extsetup 4) foo reposetup 4) bar reposetup $ echo 'foo = !' >> $HGRCPATH $ echo 'bar = !' >> $HGRCPATH Check "from __future__ import absolute_import" support for external libraries #if windows $ PATHSEP=";" #else $ PATHSEP=":" #endif $ export PATHSEP $ mkdir $TESTTMP/libroot $ echo "s = 'libroot/ambig.py'" > $TESTTMP/libroot/ambig.py $ mkdir $TESTTMP/libroot/mod $ touch $TESTTMP/libroot/mod/__init__.py $ echo "s = 'libroot/mod/ambig.py'" > $TESTTMP/libroot/mod/ambig.py #if absimport $ cat > $TESTTMP/libroot/mod/ambigabs.py < from __future__ import absolute_import > import ambig # should load "libroot/ambig.py" > s = ambig.s > EOF $ cat > loadabs.py < import mod.ambigabs as ambigabs > def extsetup(): > print 'ambigabs.s=%s' % ambigabs.s > EOF $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}/libroot; hg --config extensions.loadabs=loadabs.py root) ambigabs.s=libroot/ambig.py $TESTTMP/a (glob) #endif #if no-py3k $ cat > $TESTTMP/libroot/mod/ambigrel.py < import ambig # should load "libroot/mod/ambig.py" > s = ambig.s > EOF $ cat > loadrel.py < import mod.ambigrel as ambigrel > def extsetup(): > print 'ambigrel.s=%s' % ambigrel.s > EOF $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}/libroot; hg --config extensions.loadrel=loadrel.py root) ambigrel.s=libroot/mod/ambig.py $TESTTMP/a (glob) #endif Check absolute/relative import of extension specific modules $ mkdir $TESTTMP/extroot $ cat > $TESTTMP/extroot/bar.py < s = 'this is extroot.bar' > EOF $ mkdir $TESTTMP/extroot/sub1 $ cat > $TESTTMP/extroot/sub1/__init__.py < s = 'this is extroot.sub1.__init__' > EOF $ cat > $TESTTMP/extroot/sub1/baz.py < s = 'this is extroot.sub1.baz' > EOF $ cat > $TESTTMP/extroot/__init__.py < s = 'this is extroot.__init__' > import foo > def extsetup(ui): > ui.write('(extroot) ', foo.func(), '\n') > EOF $ cat > $TESTTMP/extroot/foo.py < # test absolute import > buf = [] > def func(): > # "not locals" case > import extroot.bar > buf.append('import extroot.bar in func(): %s' % extroot.bar.s) > return '\n(extroot) '.join(buf) > # "fromlist == ('*',)" case > from extroot.bar import * > buf.append('from extroot.bar import *: %s' % s) > # "not fromlist" and "if '.' in name" case > import extroot.sub1.baz > buf.append('import extroot.sub1.baz: %s' % extroot.sub1.baz.s) > # "not fromlist" and NOT "if '.' in name" case > import extroot > buf.append('import extroot: %s' % extroot.s) > # NOT "not fromlist" and NOT "level != -1" case > from extroot.bar import s > buf.append('from extroot.bar import s: %s' % s) > EOF $ (PYTHONPATH=${PYTHONPATH}${PATHSEP}${TESTTMP}; hg --config extensions.extroot=$TESTTMP/extroot root) (extroot) from extroot.bar import *: this is extroot.bar (extroot) import extroot.sub1.baz: this is extroot.sub1.baz (extroot) import extroot: this is extroot.__init__ (extroot) from extroot.bar import s: this is extroot.bar (extroot) import extroot.bar in func(): this is extroot.bar $TESTTMP/a (glob) #if no-py3k $ rm "$TESTTMP"/extroot/foo.* $ cat > $TESTTMP/extroot/foo.py < # test relative import > buf = [] > def func(): > # "not locals" case > import bar > buf.append('import bar in func(): %s' % bar.s) > return '\n(extroot) '.join(buf) > # "fromlist == ('*',)" case > from bar import * > buf.append('from bar import *: %s' % s) > # "not fromlist" and "if '.' in name" case > import sub1.baz > buf.append('import sub1.baz: %s' % sub1.baz.s) > # "not fromlist" and NOT "if '.' in name" case > import sub1 > buf.append('import sub1: %s' % sub1.s) > # NOT "not fromlist" and NOT "level != -1" case > from bar import s > buf.append('from bar import s: %s' % s) > EOF $ hg --config extensions.extroot=$TESTTMP/extroot root (extroot) from bar import *: this is extroot.bar (extroot) import sub1.baz: this is extroot.sub1.baz (extroot) import sub1: this is extroot.sub1.__init__ (extroot) from bar import s: this is extroot.bar (extroot) import bar in func(): this is extroot.bar $TESTTMP/a (glob) #endif $ cd .. hide outer repo $ hg init $ cat > empty.py < '''empty cmdtable > ''' > cmdtable = {} > EOF $ emptypath=`pwd`/empty.py $ echo "empty = $emptypath" >> $HGRCPATH $ hg help empty empty extension - empty cmdtable no commands defined $ echo 'empty = !' >> $HGRCPATH $ cat > debugextension.py < '''only debugcommands > ''' > from mercurial import cmdutil > cmdtable = {} > command = cmdutil.command(cmdtable) > @command('debugfoobar', [], 'hg debugfoobar') > def debugfoobar(ui, repo, *args, **opts): > "yet another debug command" > pass > @command('foo', [], 'hg foo') > def foo(ui, repo, *args, **opts): > """yet another foo command > This command has been DEPRECATED since forever. > """ > pass > EOF $ debugpath=`pwd`/debugextension.py $ echo "debugextension = $debugpath" >> $HGRCPATH $ hg help debugextension hg debugextensions show information about active extensions options: (some details hidden, use --verbose to show complete help) $ hg --verbose help debugextension hg debugextensions show information about active extensions options: -T --template TEMPLATE display with template (EXPERIMENTAL) global options ([+] can be repeated): -R --repository REPO repository root directory or name of overlay bundle file --cwd DIR change working directory -y --noninteractive do not prompt, automatically pick the first choice for all prompts -q --quiet suppress output -v --verbose enable additional output --config CONFIG [+] set/override config option (use 'section.name=value') --debug enable debugging output --debugger start debugger --encoding ENCODE set the charset encoding (default: ascii) --encodingmode MODE set the charset encoding mode (default: strict) --traceback always print a traceback on exception --time time how long the command takes --profile print command execution profile --version output version information and exit -h --help display help and exit --hidden consider hidden changesets $ hg --debug help debugextension hg debugextensions show information about active extensions options: -T --template TEMPLATE display with template (EXPERIMENTAL) global options ([+] can be repeated): -R --repository REPO repository root directory or name of overlay bundle file --cwd DIR change working directory -y --noninteractive do not prompt, automatically pick the first choice for all prompts -q --quiet suppress output -v --verbose enable additional output --config CONFIG [+] set/override config option (use 'section.name=value') --debug enable debugging output --debugger start debugger --encoding ENCODE set the charset encoding (default: ascii) --encodingmode MODE set the charset encoding mode (default: strict) --traceback always print a traceback on exception --time time how long the command takes --profile print command execution profile --version output version information and exit -h --help display help and exit --hidden consider hidden changesets $ echo 'debugextension = !' >> $HGRCPATH Asking for help about a deprecated extension should do something useful: $ hg help glog 'glog' is provided by the following extension: graphlog command to view revision graphs from a shell (DEPRECATED) (use "hg help extensions" for information on enabling extensions) Extension module help vs command help: $ echo 'extdiff =' >> $HGRCPATH $ hg help extdiff hg extdiff [OPT]... [FILE]... use external program to diff repository (or selected files) Show differences between revisions for the specified files, using an external program. The default program used is diff, with default options "-Npru". To select a different program, use the -p/--program option. The program will be passed the names of two directories to compare. To pass additional options to the program, use -o/--option. These will be passed before the names of the directories to compare. When two revision arguments are given, then changes are shown between those revisions. If only one revision is specified then that revision is compared to the working directory, and, when no revisions are specified, the working directory files are compared to its parent. (use "hg help -e extdiff" to show help for the extdiff extension) options ([+] can be repeated): -p --program CMD comparison program to run -o --option OPT [+] pass option to comparison program -r --rev REV [+] revision -c --change REV change made by revision --patch compare patches for two revisions -I --include PATTERN [+] include names matching the given patterns -X --exclude PATTERN [+] exclude names matching the given patterns -S --subrepos recurse into subrepositories (some details hidden, use --verbose to show complete help) $ hg help --extension extdiff extdiff extension - command to allow external programs to compare revisions The extdiff Mercurial extension allows you to use external programs to compare revisions, or revision with working directory. The external diff programs are called with a configurable set of options and two non-option arguments: paths to directories containing snapshots of files to compare. The extdiff extension also allows you to configure new diff commands, so you do not need to type 'hg extdiff -p kdiff3' always. [extdiff] # add new command that runs GNU diff(1) in 'context diff' mode cdiff = gdiff -Nprc5 ## or the old way: #cmd.cdiff = gdiff #opts.cdiff = -Nprc5 # add new command called meld, runs meld (no need to name twice). If # the meld executable is not available, the meld tool in [merge-tools] # will be used, if available meld = # add new command called vimdiff, runs gvimdiff with DirDiff plugin # (see http://www.vim.org/scripts/script.php?script_id=102) Non # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in # your .vimrc vimdiff = gvim -f "+next" \ "+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))" Tool arguments can include variables that are expanded at runtime: $parent1, $plabel1 - filename, descriptive label of first parent $child, $clabel - filename, descriptive label of child revision $parent2, $plabel2 - filename, descriptive label of second parent $root - repository root $parent is an alias for $parent1. The extdiff extension will look in your [diff-tools] and [merge-tools] sections for diff tool arguments, when none are specified in [extdiff]. [extdiff] kdiff3 = [diff-tools] kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child You can use -I/-X and list of file or directory names like normal 'hg diff' command. The extdiff extension makes snapshots of only needed files, so running the external diff program will actually be pretty fast (at least faster than having to compare the entire tree). list of commands: extdiff use external program to diff repository (or selected files) (use "hg help -v -e extdiff" to show built-in aliases and global options) $ echo 'extdiff = !' >> $HGRCPATH Test help topic with same name as extension $ cat > multirevs.py < from mercurial import cmdutil, commands > cmdtable = {} > command = cmdutil.command(cmdtable) > """multirevs extension > Big multi-line module docstring.""" > @command('multirevs', [], 'ARG', norepo=True) > def multirevs(ui, repo, arg, *args, **opts): > """multirevs command""" > pass > EOF $ echo "multirevs = multirevs.py" >> $HGRCPATH $ hg help multirevs Specifying Multiple Revisions """"""""""""""""""""""""""""" When Mercurial accepts more than one revision, they may be specified individually, or provided as a topologically continuous range, separated by the ":" character. The syntax of range notation is [BEGIN]:[END], where BEGIN and END are revision identifiers. Both BEGIN and END are optional. If BEGIN is not specified, it defaults to revision number 0. If END is not specified, it defaults to the tip. The range ":" thus means "all revisions". If BEGIN is greater than END, revisions are treated in reverse order. A range acts as a closed interval. This means that a range of 3:5 gives 3, 4 and 5. Similarly, a range of 9:6 gives 9, 8, 7, and 6. use "hg help -c multirevs" to see help for the multirevs command $ hg help -c multirevs hg multirevs ARG multirevs command (some details hidden, use --verbose to show complete help) $ hg multirevs hg multirevs: invalid arguments hg multirevs ARG multirevs command (use "hg multirevs -h" to show more help) [255] $ echo "multirevs = !" >> $HGRCPATH Issue811: Problem loading extensions twice (by site and by user) $ cat <> $HGRCPATH > mq = > strip = > hgext.mq = > hgext/mq = > EOF Show extensions: (note that mq force load strip, also checking it's not loaded twice) $ hg debugextensions mq strip For extensions, which name matches one of its commands, help message should ask '-v -e' to get list of built-in aliases along with extension help itself $ mkdir $TESTTMP/d $ cat > $TESTTMP/d/dodo.py < """ > This is an awesome 'dodo' extension. It does nothing and > writes 'Foo foo' > """ > from mercurial import cmdutil, commands > cmdtable = {} > command = cmdutil.command(cmdtable) > @command('dodo', [], 'hg dodo') > def dodo(ui, *args, **kwargs): > """Does nothing""" > ui.write("I do nothing. Yay\\n") > @command('foofoo', [], 'hg foofoo') > def foofoo(ui, *args, **kwargs): > """Writes 'Foo foo'""" > ui.write("Foo foo\\n") > EOF $ dodopath=$TESTTMP/d/dodo.py $ echo "dodo = $dodopath" >> $HGRCPATH Make sure that user is asked to enter '-v -e' to get list of built-in aliases $ hg help -e dodo dodo extension - This is an awesome 'dodo' extension. It does nothing and writes 'Foo foo' list of commands: dodo Does nothing foofoo Writes 'Foo foo' (use "hg help -v -e dodo" to show built-in aliases and global options) Make sure that '-v -e' prints list of built-in aliases along with extension help itself $ hg help -v -e dodo dodo extension - This is an awesome 'dodo' extension. It does nothing and writes 'Foo foo' list of commands: dodo Does nothing foofoo Writes 'Foo foo' global options ([+] can be repeated): -R --repository REPO repository root directory or name of overlay bundle file --cwd DIR change working directory -y --noninteractive do not prompt, automatically pick the first choice for all prompts -q --quiet suppress output -v --verbose enable additional output --config CONFIG [+] set/override config option (use 'section.name=value') --debug enable debugging output --debugger start debugger --encoding ENCODE set the charset encoding (default: ascii) --encodingmode MODE set the charset encoding mode (default: strict) --traceback always print a traceback on exception --time time how long the command takes --profile print command execution profile --version output version information and exit -h --help display help and exit --hidden consider hidden changesets Make sure that single '-v' option shows help and built-ins only for 'dodo' command $ hg help -v dodo hg dodo Does nothing (use "hg help -e dodo" to show help for the dodo extension) options: --mq operate on patch repository global options ([+] can be repeated): -R --repository REPO repository root directory or name of overlay bundle file --cwd DIR change working directory -y --noninteractive do not prompt, automatically pick the first choice for all prompts -q --quiet suppress output -v --verbose enable additional output --config CONFIG [+] set/override config option (use 'section.name=value') --debug enable debugging output --debugger start debugger --encoding ENCODE set the charset encoding (default: ascii) --encodingmode MODE set the charset encoding mode (default: strict) --traceback always print a traceback on exception --time time how long the command takes --profile print command execution profile --version output version information and exit -h --help display help and exit --hidden consider hidden changesets In case when extension name doesn't match any of its commands, help message should ask for '-v' to get list of built-in aliases along with extension help $ cat > $TESTTMP/d/dudu.py < """ > This is an awesome 'dudu' extension. It does something and > also writes 'Beep beep' > """ > from mercurial import cmdutil, commands > cmdtable = {} > command = cmdutil.command(cmdtable) > @command('something', [], 'hg something') > def something(ui, *args, **kwargs): > """Does something""" > ui.write("I do something. Yaaay\\n") > @command('beep', [], 'hg beep') > def beep(ui, *args, **kwargs): > """Writes 'Beep beep'""" > ui.write("Beep beep\\n") > EOF $ dudupath=$TESTTMP/d/dudu.py $ echo "dudu = $dudupath" >> $HGRCPATH $ hg help -e dudu dudu extension - This is an awesome 'dudu' extension. It does something and also writes 'Beep beep' list of commands: beep Writes 'Beep beep' something Does something (use "hg help -v dudu" to show built-in aliases and global options) In case when extension name doesn't match any of its commands, help options '-v' and '-v -e' should be equivalent $ hg help -v dudu dudu extension - This is an awesome 'dudu' extension. It does something and also writes 'Beep beep' list of commands: beep Writes 'Beep beep' something Does something global options ([+] can be repeated): -R --repository REPO repository root directory or name of overlay bundle file --cwd DIR change working directory -y --noninteractive do not prompt, automatically pick the first choice for all prompts -q --quiet suppress output -v --verbose enable additional output --config CONFIG [+] set/override config option (use 'section.name=value') --debug enable debugging output --debugger start debugger --encoding ENCODE set the charset encoding (default: ascii) --encodingmode MODE set the charset encoding mode (default: strict) --traceback always print a traceback on exception --time time how long the command takes --profile print command execution profile --version output version information and exit -h --help display help and exit --hidden consider hidden changesets $ hg help -v -e dudu dudu extension - This is an awesome 'dudu' extension. It does something and also writes 'Beep beep' list of commands: beep Writes 'Beep beep' something Does something global options ([+] can be repeated): -R --repository REPO repository root directory or name of overlay bundle file --cwd DIR change working directory -y --noninteractive do not prompt, automatically pick the first choice for all prompts -q --quiet suppress output -v --verbose enable additional output --config CONFIG [+] set/override config option (use 'section.name=value') --debug enable debugging output --debugger start debugger --encoding ENCODE set the charset encoding (default: ascii) --encodingmode MODE set the charset encoding mode (default: strict) --traceback always print a traceback on exception --time time how long the command takes --profile print command execution profile --version output version information and exit -h --help display help and exit --hidden consider hidden changesets Disabled extension commands: $ ORGHGRCPATH=$HGRCPATH $ HGRCPATH= $ export HGRCPATH $ hg help email 'email' is provided by the following extension: patchbomb command to send changesets as (a series of) patch emails (use "hg help extensions" for information on enabling extensions) $ hg qdel hg: unknown command 'qdel' 'qdelete' is provided by the following extension: mq manage a stack of patches (use "hg help extensions" for information on enabling extensions) [255] $ hg churn hg: unknown command 'churn' 'churn' is provided by the following extension: churn command to display statistics about repository history (use "hg help extensions" for information on enabling extensions) [255] Disabled extensions: $ hg help churn churn extension - command to display statistics about repository history (use "hg help extensions" for information on enabling extensions) $ hg help patchbomb patchbomb extension - command to send changesets as (a series of) patch emails (use "hg help extensions" for information on enabling extensions) Broken disabled extension and command: $ mkdir hgext $ echo > hgext/__init__.py $ cat > hgext/broken.py < "broken extension' > EOF $ cat > path.py < import os, sys > sys.path.insert(0, os.environ['HGEXTPATH']) > EOF $ HGEXTPATH=`pwd` $ export HGEXTPATH $ hg --config extensions.path=./path.py help broken broken extension - (no help text available) (use "hg help extensions" for information on enabling extensions) $ cat > hgext/forest.py < cmdtable = None > EOF $ hg --config extensions.path=./path.py help foo > /dev/null warning: error finding commands in $TESTTMP/hgext/forest.py (glob) abort: no such help topic: foo (try "hg help --keyword foo") [255] $ cat > throw.py < from mercurial import cmdutil, commands, util > cmdtable = {} > command = cmdutil.command(cmdtable) > class Bogon(Exception): pass > @command('throw', [], 'hg throw', norepo=True) > def throw(ui, **opts): > """throws an exception""" > raise Bogon() > EOF No declared supported version, extension complains: $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*' ** Unknown exception encountered with possibly-broken third-party extension throw ** which supports versions unknown of Mercurial. ** Please disable throw and try your action again. ** If that fixes the bug please report it to the extension author. ** Python * (glob) ** Mercurial Distributed SCM * (glob) ** Extensions loaded: throw empty declaration of supported version, extension complains: $ echo "testedwith = ''" >> throw.py $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*' ** Unknown exception encountered with possibly-broken third-party extension throw ** which supports versions unknown of Mercurial. ** Please disable throw and try your action again. ** If that fixes the bug please report it to the extension author. ** Python * (glob) ** Mercurial Distributed SCM (*) (glob) ** Extensions loaded: throw If the extension specifies a buglink, show that: $ echo 'buglink = "http://example.com/bts"' >> throw.py $ rm -f throw.pyc throw.pyo $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*' ** Unknown exception encountered with possibly-broken third-party extension throw ** which supports versions unknown of Mercurial. ** Please disable throw and try your action again. ** If that fixes the bug please report it to http://example.com/bts ** Python * (glob) ** Mercurial Distributed SCM (*) (glob) ** Extensions loaded: throw If the extensions declare outdated versions, accuse the older extension first: $ echo "from mercurial import util" >> older.py $ echo "util.version = lambda:'2.2'" >> older.py $ echo "testedwith = '1.9.3'" >> older.py $ echo "testedwith = '2.1.1'" >> throw.py $ rm -f throw.pyc throw.pyo $ hg --config extensions.throw=throw.py --config extensions.older=older.py \ > throw 2>&1 | egrep '^\*\*' ** Unknown exception encountered with possibly-broken third-party extension older ** which supports versions 1.9 of Mercurial. ** Please disable older and try your action again. ** If that fixes the bug please report it to the extension author. ** Python * (glob) ** Mercurial Distributed SCM (version 2.2) ** Extensions loaded: throw, older One extension only tested with older, one only with newer versions: $ echo "util.version = lambda:'2.1'" >> older.py $ rm -f older.pyc older.pyo $ hg --config extensions.throw=throw.py --config extensions.older=older.py \ > throw 2>&1 | egrep '^\*\*' ** Unknown exception encountered with possibly-broken third-party extension older ** which supports versions 1.9 of Mercurial. ** Please disable older and try your action again. ** If that fixes the bug please report it to the extension author. ** Python * (glob) ** Mercurial Distributed SCM (version 2.1) ** Extensions loaded: throw, older Older extension is tested with current version, the other only with newer: $ echo "util.version = lambda:'1.9.3'" >> older.py $ rm -f older.pyc older.pyo $ hg --config extensions.throw=throw.py --config extensions.older=older.py \ > throw 2>&1 | egrep '^\*\*' ** Unknown exception encountered with possibly-broken third-party extension throw ** which supports versions 2.1 of Mercurial. ** Please disable throw and try your action again. ** If that fixes the bug please report it to http://example.com/bts ** Python * (glob) ** Mercurial Distributed SCM (version 1.9.3) ** Extensions loaded: throw, older Ability to point to a different point $ hg --config extensions.throw=throw.py --config extensions.older=older.py \ > --config ui.supportcontact='Your Local Goat Lenders' throw 2>&1 | egrep '^\*\*' ** unknown exception encountered, please report by visiting ** Your Local Goat Lenders ** Python * (glob) ** Mercurial Distributed SCM (*) (glob) ** Extensions loaded: throw, older Declare the version as supporting this hg version, show regular bts link: $ hgver=`$PYTHON -c 'from mercurial import util; print util.version().split("+")[0]'` $ echo 'testedwith = """'"$hgver"'"""' >> throw.py $ if [ -z "$hgver" ]; then > echo "unable to fetch a mercurial version. Make sure __version__ is correct"; > fi $ rm -f throw.pyc throw.pyo $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*' ** unknown exception encountered, please report by visiting ** https://mercurial-scm.org/wiki/BugTracker ** Python * (glob) ** Mercurial Distributed SCM (*) (glob) ** Extensions loaded: throw Patch version is ignored during compatibility check $ echo "testedwith = '3.2'" >> throw.py $ echo "util.version = lambda:'3.2.2'" >> throw.py $ rm -f throw.pyc throw.pyo $ hg --config extensions.throw=throw.py throw 2>&1 | egrep '^\*\*' ** unknown exception encountered, please report by visiting ** https://mercurial-scm.org/wiki/BugTracker ** Python * (glob) ** Mercurial Distributed SCM (*) (glob) ** Extensions loaded: throw Test version number support in 'hg version': $ echo '__version__ = (1, 2, 3)' >> throw.py $ rm -f throw.pyc throw.pyo $ hg version -v Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) Copyright (C) 2005-* Matt Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. Enabled extensions: $ hg version -v --config extensions.throw=throw.py Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) Copyright (C) 2005-* Matt Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. Enabled extensions: throw 1.2.3 $ echo 'getversion = lambda: "1.twentythree"' >> throw.py $ rm -f throw.pyc throw.pyo $ hg version -v --config extensions.throw=throw.py Mercurial Distributed SCM (version *) (glob) (see https://mercurial-scm.org for more information) Copyright (C) 2005-* Matt Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. Enabled extensions: throw 1.twentythree Refuse to load extensions with minimum version requirements $ cat > minversion1.py << EOF > from mercurial import util > util.version = lambda: '3.5.2' > minimumhgversion = '3.6' > EOF $ hg --config extensions.minversion=minversion1.py version (third party extension minversion requires version 3.6 or newer of Mercurial; disabling) Mercurial Distributed SCM (version 3.5.2) (see https://mercurial-scm.org for more information) Copyright (C) 2005-* Matt Mackall and others (glob) This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. $ cat > minversion2.py << EOF > from mercurial import util > util.version = lambda: '3.6' > minimumhgversion = '3.7' > EOF $ hg --config extensions.minversion=minversion2.py version 2>&1 | egrep '\(third' (third party extension minversion requires version 3.7 or newer of Mercurial; disabling) Can load version that is only off by point release $ cat > minversion2.py << EOF > from mercurial import util > util.version = lambda: '3.6.1' > minimumhgversion = '3.6' > EOF $ hg --config extensions.minversion=minversion3.py version 2>&1 | egrep '\(third' [1] Can load minimum version identical to current $ cat > minversion3.py << EOF > from mercurial import util > util.version = lambda: '3.5' > minimumhgversion = '3.5' > EOF $ hg --config extensions.minversion=minversion3.py version 2>&1 | egrep '\(third' [1] Restore HGRCPATH $ HGRCPATH=$ORGHGRCPATH $ export HGRCPATH Commands handling multiple repositories at a time should invoke only "reposetup()" of extensions enabling in the target repository. $ mkdir reposetup-test $ cd reposetup-test $ cat > $TESTTMP/reposetuptest.py < from mercurial import extensions > def reposetup(ui, repo): > ui.write('reposetup() for %s\n' % (repo.root)) > EOF $ hg init src $ echo a > src/a $ hg -R src commit -Am '#0 at src/a' adding a $ echo '[extensions]' >> src/.hg/hgrc $ echo '# enable extension locally' >> src/.hg/hgrc $ echo "reposetuptest = $TESTTMP/reposetuptest.py" >> src/.hg/hgrc $ hg -R src status reposetup() for $TESTTMP/reposetup-test/src (glob) $ hg clone -U src clone-dst1 reposetup() for $TESTTMP/reposetup-test/src (glob) $ hg init push-dst1 $ hg -q -R src push push-dst1 reposetup() for $TESTTMP/reposetup-test/src (glob) $ hg init pull-src1 $ hg -q -R pull-src1 pull src reposetup() for $TESTTMP/reposetup-test/src (glob) $ cat <> $HGRCPATH > [extensions] > # disable extension globally and explicitly > reposetuptest = ! > EOF $ hg clone -U src clone-dst2 reposetup() for $TESTTMP/reposetup-test/src (glob) $ hg init push-dst2 $ hg -q -R src push push-dst2 reposetup() for $TESTTMP/reposetup-test/src (glob) $ hg init pull-src2 $ hg -q -R pull-src2 pull src reposetup() for $TESTTMP/reposetup-test/src (glob) $ cat <> $HGRCPATH > [extensions] > # enable extension globally > reposetuptest = $TESTTMP/reposetuptest.py > EOF $ hg clone -U src clone-dst3 reposetup() for $TESTTMP/reposetup-test/src (glob) reposetup() for $TESTTMP/reposetup-test/clone-dst3 (glob) $ hg init push-dst3 reposetup() for $TESTTMP/reposetup-test/push-dst3 (glob) $ hg -q -R src push push-dst3 reposetup() for $TESTTMP/reposetup-test/src (glob) reposetup() for $TESTTMP/reposetup-test/push-dst3 (glob) $ hg init pull-src3 reposetup() for $TESTTMP/reposetup-test/pull-src3 (glob) $ hg -q -R pull-src3 pull src reposetup() for $TESTTMP/reposetup-test/pull-src3 (glob) reposetup() for $TESTTMP/reposetup-test/src (glob) $ echo '[extensions]' >> src/.hg/hgrc $ echo '# disable extension locally' >> src/.hg/hgrc $ echo 'reposetuptest = !' >> src/.hg/hgrc $ hg clone -U src clone-dst4 reposetup() for $TESTTMP/reposetup-test/clone-dst4 (glob) $ hg init push-dst4 reposetup() for $TESTTMP/reposetup-test/push-dst4 (glob) $ hg -q -R src push push-dst4 reposetup() for $TESTTMP/reposetup-test/push-dst4 (glob) $ hg init pull-src4 reposetup() for $TESTTMP/reposetup-test/pull-src4 (glob) $ hg -q -R pull-src4 pull src reposetup() for $TESTTMP/reposetup-test/pull-src4 (glob) disabling in command line overlays with all configuration $ hg --config extensions.reposetuptest=! clone -U src clone-dst5 $ hg --config extensions.reposetuptest=! init push-dst5 $ hg --config extensions.reposetuptest=! -q -R src push push-dst5 $ hg --config extensions.reposetuptest=! init pull-src5 $ hg --config extensions.reposetuptest=! -q -R pull-src5 pull src $ cat <> $HGRCPATH > [extensions] > # disable extension globally and explicitly > reposetuptest = ! > EOF $ hg init parent $ hg init parent/sub1 $ echo 1 > parent/sub1/1 $ hg -R parent/sub1 commit -Am '#0 at parent/sub1' adding 1 $ hg init parent/sub2 $ hg init parent/sub2/sub21 $ echo 21 > parent/sub2/sub21/21 $ hg -R parent/sub2/sub21 commit -Am '#0 at parent/sub2/sub21' adding 21 $ cat > parent/sub2/.hgsub < sub21 = sub21 > EOF $ hg -R parent/sub2 commit -Am '#0 at parent/sub2' adding .hgsub $ hg init parent/sub3 $ echo 3 > parent/sub3/3 $ hg -R parent/sub3 commit -Am '#0 at parent/sub3' adding 3 $ cat > parent/.hgsub < sub1 = sub1 > sub2 = sub2 > sub3 = sub3 > EOF $ hg -R parent commit -Am '#0 at parent' adding .hgsub $ echo '[extensions]' >> parent/.hg/hgrc $ echo '# enable extension locally' >> parent/.hg/hgrc $ echo "reposetuptest = $TESTTMP/reposetuptest.py" >> parent/.hg/hgrc $ cp parent/.hg/hgrc parent/sub2/.hg/hgrc $ hg -R parent status -S -A reposetup() for $TESTTMP/reposetup-test/parent (glob) reposetup() for $TESTTMP/reposetup-test/parent/sub2 (glob) C .hgsub C .hgsubstate C sub1/1 C sub2/.hgsub C sub2/.hgsubstate C sub2/sub21/21 C sub3/3 $ cd .. Test synopsis and docstring extending $ hg init exthelp $ cat > exthelp.py < from mercurial import commands, extensions > def exbookmarks(orig, *args, **opts): > return orig(*args, **opts) > def uisetup(ui): > synopsis = ' GREPME [--foo] [-x]' > docstring = ''' > GREPME make sure that this is in the help! > ''' > extensions.wrapcommand(commands.table, 'bookmarks', exbookmarks, > synopsis, docstring) > EOF $ abspath=`pwd`/exthelp.py $ echo '[extensions]' >> $HGRCPATH $ echo "exthelp = $abspath" >> $HGRCPATH $ cd exthelp $ hg help bookmarks | grep GREPME hg bookmarks [OPTIONS]... [NAME]... GREPME [--foo] [-x] GREPME make sure that this is in the help! mercurial-3.7.3/tests/test-parents.t0000644000175000017500000000611412676531525017111 0ustar mpmmpm00000000000000test parents command $ hg init repo $ cd repo no working directory $ hg parents $ echo a > a $ echo b > b $ hg ci -Amab -d '0 0' adding a adding b $ echo a >> a $ hg ci -Ama -d '1 0' $ echo b >> b $ hg ci -Amb -d '2 0' $ echo c > c $ hg ci -Amc -d '3 0' adding c $ hg up -C 1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo d > c $ hg ci -Amc2 -d '4 0' adding c created new head $ hg up -C 3 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg parents changeset: 3:02d851b7e549 user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: c $ hg parents a changeset: 1:d786049f033a user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a hg parents c, single revision $ hg parents c changeset: 3:02d851b7e549 user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: c $ hg parents -r 3 c abort: 'c' not found in manifest! [255] $ hg parents -r 2 changeset: 1:d786049f033a user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a $ hg parents -r 2 a changeset: 1:d786049f033a user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a $ hg parents -r 2 ../a abort: ../a not under root '$TESTTMP/repo' (glob) [255] cd dir; hg parents -r 2 ../a $ mkdir dir $ cd dir $ hg parents -r 2 ../a changeset: 1:d786049f033a user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a $ hg parents -r 2 path:a changeset: 1:d786049f033a user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: a $ cd .. $ hg parents -r 2 glob:a abort: can only specify an explicit filename [255] merge working dir with 2 parents, hg parents c $ HGMERGE=true hg merge merging c 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg parents c changeset: 3:02d851b7e549 user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: c changeset: 4:48cee28d4b4e tag: tip parent: 1:d786049f033a user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: c2 merge working dir with 1 parent, hg parents $ hg up -C 2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ HGMERGE=true hg merge -r 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg parents changeset: 2:6cfac479f009 user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: b changeset: 4:48cee28d4b4e tag: tip parent: 1:d786049f033a user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: c2 merge working dir with 1 parent, hg parents c $ hg parents c changeset: 4:48cee28d4b4e tag: tip parent: 1:d786049f033a user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: c2 $ cd .. mercurial-3.7.3/tests/test-win32text.t0000644000175000017500000002054412676531525017307 0ustar mpmmpm00000000000000 $ hg init t $ cd t $ cat > unix2dos.py < import sys > > for path in sys.argv[1:]: > data = file(path, 'rb').read() > data = data.replace('\n', '\r\n') > file(path, 'wb').write(data) > EOF $ echo '[hooks]' >> .hg/hgrc $ echo 'pretxncommit.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc $ echo 'pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf' >> .hg/hgrc $ cat .hg/hgrc [hooks] pretxncommit.crlf = python:hgext.win32text.forbidcrlf pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf $ echo hello > f $ hg add f commit should succeed $ hg ci -m 1 $ hg clone . ../zoz updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cp .hg/hgrc ../zoz/.hg $ python unix2dos.py f commit should fail $ hg ci -m 2.1 attempt to commit or push text file(s) using CRLF line endings in f583ea08d42a: f transaction abort! rollback completed abort: pretxncommit.crlf hook failed [255] $ mv .hg/hgrc .hg/hgrc.bak commits should succeed $ hg ci -m 2 $ hg cp f g $ hg ci -m 2.2 push should fail $ hg push ../zoz pushing to ../zoz searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files attempt to commit or push text file(s) using CRLF line endings in bc2d09796734: g in b1aa5cde7ff4: f To prevent this mistake in your local repository, add to Mercurial.ini or .hg/hgrc: [hooks] pretxncommit.crlf = python:hgext.win32text.forbidcrlf and also consider adding: [extensions] win32text = [encode] ** = cleverencode: [decode] ** = cleverdecode: transaction abort! rollback completed abort: pretxnchangegroup.crlf hook failed [255] $ mv .hg/hgrc.bak .hg/hgrc $ echo hello > f $ hg rm g commit should succeed $ hg ci -m 2.3 push should succeed $ hg push ../zoz pushing to ../zoz searching for changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 2 files and now for something completely different $ mkdir d $ echo hello > d/f2 $ python unix2dos.py d/f2 $ hg add d/f2 $ hg ci -m 3 attempt to commit or push text file(s) using CRLF line endings in 053ba1a3035a: d/f2 transaction abort! rollback completed abort: pretxncommit.crlf hook failed [255] $ hg revert -a forgetting d/f2 (glob) $ rm d/f2 $ hg rem f $ hg ci -m 4 $ $PYTHON -c 'file("bin", "wb").write("hello\x00\x0D\x0A")' $ hg add bin $ hg ci -m 5 $ hg log -v changeset: 5:f0b1c8d75fce tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: bin description: 5 changeset: 4:77796dbcd4ad user: test date: Thu Jan 01 00:00:00 1970 +0000 files: f description: 4 changeset: 3:7c1b5430b350 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: f g description: 2.3 changeset: 2:bc2d09796734 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: g description: 2.2 changeset: 1:b1aa5cde7ff4 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: f description: 2 changeset: 0:fcf06d5c4e1d user: test date: Thu Jan 01 00:00:00 1970 +0000 files: f description: 1 $ hg clone . dupe updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ for x in a b c d; do echo content > dupe/$x; done $ hg -R dupe add adding dupe/a (glob) adding dupe/b (glob) adding dupe/c (glob) adding dupe/d (glob) $ python unix2dos.py dupe/b dupe/c dupe/d $ hg -R dupe ci -m a dupe/a $ hg -R dupe ci -m b/c dupe/[bc] $ hg -R dupe ci -m d dupe/d $ hg -R dupe log -v changeset: 8:67ac5962ab43 tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: d description: d changeset: 7:68c127d1834e user: test date: Thu Jan 01 00:00:00 1970 +0000 files: b c description: b/c changeset: 6:adbf8bf7f31d user: test date: Thu Jan 01 00:00:00 1970 +0000 files: a description: a changeset: 5:f0b1c8d75fce user: test date: Thu Jan 01 00:00:00 1970 +0000 files: bin description: 5 changeset: 4:77796dbcd4ad user: test date: Thu Jan 01 00:00:00 1970 +0000 files: f description: 4 changeset: 3:7c1b5430b350 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: f g description: 2.3 changeset: 2:bc2d09796734 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: g description: 2.2 changeset: 1:b1aa5cde7ff4 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: f description: 2 changeset: 0:fcf06d5c4e1d user: test date: Thu Jan 01 00:00:00 1970 +0000 files: f description: 1 $ hg pull dupe pulling from dupe searching for changes adding changesets adding manifests adding file changes added 3 changesets with 4 changes to 4 files attempt to commit or push text file(s) using CRLF line endings in 67ac5962ab43: d in 68c127d1834e: b in 68c127d1834e: c To prevent this mistake in your local repository, add to Mercurial.ini or .hg/hgrc: [hooks] pretxncommit.crlf = python:hgext.win32text.forbidcrlf and also consider adding: [extensions] win32text = [encode] ** = cleverencode: [decode] ** = cleverdecode: transaction abort! rollback completed abort: pretxnchangegroup.crlf hook failed [255] $ hg log -v changeset: 5:f0b1c8d75fce tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 files: bin description: 5 changeset: 4:77796dbcd4ad user: test date: Thu Jan 01 00:00:00 1970 +0000 files: f description: 4 changeset: 3:7c1b5430b350 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: f g description: 2.3 changeset: 2:bc2d09796734 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: g description: 2.2 changeset: 1:b1aa5cde7ff4 user: test date: Thu Jan 01 00:00:00 1970 +0000 files: f description: 2 changeset: 0:fcf06d5c4e1d user: test date: Thu Jan 01 00:00:00 1970 +0000 files: f description: 1 $ rm .hg/hgrc $ (echo some; echo text) > f3 $ $PYTHON -c 'file("f4.bat", "wb").write("rem empty\x0D\x0A")' $ hg add f3 f4.bat $ hg ci -m 6 $ cat bin hello\x00\r (esc) $ cat f3 some text $ cat f4.bat rem empty\r (esc) $ echo '[extensions]' >> .hg/hgrc $ echo 'win32text = ' >> .hg/hgrc $ echo '[decode]' >> .hg/hgrc $ echo '** = cleverdecode:' >> .hg/hgrc $ echo '[encode]' >> .hg/hgrc $ echo '** = cleverencode:' >> .hg/hgrc $ cat .hg/hgrc [extensions] win32text = [decode] ** = cleverdecode: [encode] ** = cleverencode: Trigger deprecation warning: $ hg id -t win32text is deprecated: https://mercurial-scm.org/wiki/Win32TextExtension tip Disable warning: $ echo '[win32text]' >> .hg/hgrc $ echo 'warn = no' >> .hg/hgrc $ hg id -t tip $ rm f3 f4.bat bin $ hg co -C WARNING: f4.bat already has CRLF line endings and does not need EOL conversion by the win32text plugin. Before your next commit, please reconsider your encode/decode settings in Mercurial.ini or $TESTTMP/t/.hg/hgrc. (glob) 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat bin hello\x00\r (esc) $ cat f3 some\r (esc) text\r (esc) $ cat f4.bat rem empty\r (esc) $ $PYTHON -c 'file("f5.sh", "wb").write("# empty\x0D\x0A")' $ hg add f5.sh $ hg ci -m 7 $ cat f5.sh # empty\r (esc) $ hg cat f5.sh # empty $ echo '% just linefeed' > linefeed $ hg ci -qAm 8 linefeed $ cat linefeed % just linefeed $ hg cat linefeed % just linefeed $ hg st -q $ hg revert -a linefeed no changes needed to linefeed $ cat linefeed % just linefeed $ hg st -q $ echo modified >> linefeed $ hg st -q M linefeed $ hg revert -a reverting linefeed $ hg st -q $ cat linefeed % just linefeed\r (esc) $ cd .. mercurial-3.7.3/tests/test-parseindex2.py0000644000175000017500000001741512676531525020054 0ustar mpmmpm00000000000000"""This unit test primarily tests parsers.parse_index2(). It also checks certain aspects of the parsers module as a whole. """ from mercurial import parsers from mercurial.node import nullid, nullrev import struct import subprocess import sys # original python implementation def gettype(q): return int(q & 0xFFFF) def offset_type(offset, type): return long(long(offset) << 16 | type) indexformatng = ">Qiiiiii20s12x" def py_parseindex(data, inline) : s = 64 cache = None index = [] nodemap = {nullid: nullrev} n = off = 0 l = len(data) - s append = index.append if inline: cache = (0, data) while off <= l: e = struct.unpack(indexformatng, data[off:off + s]) nodemap[e[7]] = n append(e) n += 1 if e[1] < 0: break off += e[1] + s else: while off <= l: e = struct.unpack(indexformatng, data[off:off + s]) nodemap[e[7]] = n append(e) n += 1 off += s e = list(index[0]) type = gettype(e[0]) e[0] = offset_type(0, type) index[0] = tuple(e) # add the magic null revision at -1 index.append((0, 0, 0, -1, -1, -1, -1, nullid)) return index, cache data_inlined = '\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x01\x8c' \ '\x00\x00\x04\x07\x00\x00\x00\x00\x00\x00\x15\x15\xff\xff\xff' \ '\xff\xff\xff\xff\xff\xebG\x97\xb7\x1fB\x04\xcf\x13V\x81\tw\x1b' \ 'w\xdduR\xda\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \ 'x\x9c\x9d\x93?O\xc30\x10\xc5\xf7|\x8a\xdb\x9a\xa8m\x06\xd8*\x95' \ '\x81B\xa1\xa2\xa2R\xcb\x86Pd\x9a\x0b5$vd_\x04\xfd\xf6\x9c\xff@' \ '\x11!\x0b\xd9\xec\xf7\xbbw\xe7gG6\xad6\x04\xdaN\xc0\x92\xa0$)' \ '\xb1\x82\xa2\xd1%\x16\xa4\x8b7\xa9\xca\xd4-\xb2Y\x02\xfc\xc9' \ '\xcaS\xf9\xaeX\xed\xb6\xd77Q\x02\x83\xd4\x19\xf5--Y\xea\xe1W' \ '\xab\xed\x10\xceR\x0f_\xdf\xdf\r\xe1,\xf5\xf0\xcb\xf5 \xceR\x0f' \ '_\xdc\x0e\x0e\xc3R\x0f_\xae\x96\x9b!\x9e\xa5\x1e\xbf\xdb,\x06' \ '\xc7q\x9a/\x88\x82\xc3B\xea\xb5\xb4TJ\x93\xb6\x82\x0e\xe16\xe6' \ 'KQ\xdb\xaf\xecG\xa3\xd1 \x01\xd3\x0b_^\xe8\xaa\xa0\xae\xad\xd1' \ '&\xbef\x1bz\x08\xb0|\xc9Xz\x06\xf6Z\x91\x90J\xaa\x17\x90\xaa' \ '\xd2\xa6\x11$5C\xcf\xba#\xa0\x03\x02*2\x92-\xfc\xb1\x94\xdf\xe2' \ '\xae\xb8\'m\x8ey0^\x85\xd3\x82\xb4\xf0`:\x9c\x00\x8a\xfd\x01' \ '\xb0\xc6\x86\x8b\xdd\xae\x80\xf3\xa9\x9fd\x16\n\x00R%\x1a\x06' \ '\xe9\xd8b\x98\x1d\xf4\xf3+\x9bf\x01\xd8p\x1b\xf3.\xed\x9f^g\xc3' \ '^\xd9W81T\xdb\xd5\x04sx|\xf2\xeb\xd6`%?x\xed"\x831\xbf\xf3\xdc' \ 'b\xeb%gaY\xe1\xad\x9f\xb9f\'1w\xa9\xa5a\x83s\x82J\xb98\xbc4\x8b' \ '\x83\x00\x9f$z\xb8#\xa5\xb1\xdf\x98\xd9\xec\x1b\x89O\xe3Ts\x9a4' \ '\x17m\x8b\xfc\x8f\xa5\x95\x9a\xfc\xfa\xed,\xe5|\xa1\xfe\x15\xb9' \ '\xbc\xb2\x93\x1f\xf2\x95\xff\xdf,\x1a\xc5\xe7\x17*\x93Oz:>\x0e' data_non_inlined = '\x00\x00\x00\x01\x00\x00\x00\x00\x00\x01D\x19' \ '\x00\x07e\x12\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\xff' \ '\xff\xff\xff\xff\xd1\xf4\xbb\xb0\xbe\xfc\x13\xbd\x8c\xd3\x9d' \ '\x0f\xcd\xd9;\x8c\x07\x8cJ/\x00\x00\x00\x00\x00\x00\x00\x00\x00' \ '\x00\x00\x00\x00\x00\x00\x01D\x19\x00\x00\x00\x00\x00\xdf\x00' \ '\x00\x01q\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\xff' \ '\xff\xff\xff\xc1\x12\xb9\x04\x96\xa4Z1t\x91\xdfsJ\x90\xf0\x9bh' \ '\x07l&\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \ '\x00\x01D\xf8\x00\x00\x00\x00\x01\x1b\x00\x00\x01\xb8\x00\x00' \ '\x00\x01\x00\x00\x00\x02\x00\x00\x00\x01\xff\xff\xff\xff\x02\n' \ '\x0e\xc6&\xa1\x92\xae6\x0b\x02i\xfe-\xe5\xbao\x05\xd1\xe7\x00' \ '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01F' \ '\x13\x00\x00\x00\x00\x01\xec\x00\x00\x03\x06\x00\x00\x00\x01' \ '\x00\x00\x00\x03\x00\x00\x00\x02\xff\xff\xff\xff\x12\xcb\xeby1' \ '\xb6\r\x98B\xcb\x07\xbd`\x8f\x92\xd9\xc4\x84\xbdK\x00\x00\x00' \ '\x00\x00\x00\x00\x00\x00\x00\x00\x00' def parse_index2(data, inline): index, chunkcache = parsers.parse_index2(data, inline) return list(index), chunkcache def importparsers(hexversion): """Import mercurial.parsers with the given sys.hexversion.""" # The file parsers.c inspects sys.hexversion to determine the version # of the currently-running Python interpreter, so we monkey-patch # sys.hexversion to simulate using different versions. code = ("import sys; sys.hexversion=%s; " "import mercurial.parsers" % hexversion) cmd = "python -c \"%s\"" % code # We need to do these tests inside a subprocess because parser.c's # version-checking code happens inside the module init function, and # when using reload() to reimport an extension module, "The init function # of extension modules is not called a second time" # (from http://docs.python.org/2/library/functions.html?#reload). p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) return p.communicate() # returns stdout, stderr def printhexfail(testnumber, hexversion, stdout, expected): try: hexstring = hex(hexversion) except TypeError: hexstring = None print ("FAILED: version test #%s with Python %s and patched " "sys.hexversion %r (%r):\n Expected %s but got:\n-->'%s'\n" % (testnumber, sys.version_info, hexversion, hexstring, expected, stdout)) def testversionokay(testnumber, hexversion): stdout, stderr = importparsers(hexversion) if stdout: printhexfail(testnumber, hexversion, stdout, expected="no stdout") def testversionfail(testnumber, hexversion): stdout, stderr = importparsers(hexversion) # We include versionerrortext to distinguish from other ImportErrors. errtext = "ImportError: %s" % parsers.versionerrortext if errtext not in stdout: printhexfail(testnumber, hexversion, stdout, expected="stdout to contain %r" % errtext) def makehex(major, minor, micro): return int("%x%02x%02x00" % (major, minor, micro), 16) def runversiontests(): """Check the version-detection logic when importing parsers.""" info = sys.version_info major, minor, micro = info[0], info[1], info[2] # Test same major-minor versions. testversionokay(1, makehex(major, minor, micro)) testversionokay(2, makehex(major, minor, micro + 1)) # Test different major-minor versions. testversionfail(3, makehex(major + 1, minor, micro)) testversionfail(4, makehex(major, minor + 1, micro)) testversionfail(5, "'foo'") def runtest() : # Only test the version-detection logic if it is present. try: parsers.versionerrortext except AttributeError: pass else: runversiontests() # Check that parse_index2() raises TypeError on bad arguments. try: parse_index2(0, True) except TypeError: pass else: print "Expected to get TypeError." # Check parsers.parse_index2() on an index file against the original # Python implementation of parseindex, both with and without inlined data. py_res_1 = py_parseindex(data_inlined, True) c_res_1 = parse_index2(data_inlined, True) py_res_2 = py_parseindex(data_non_inlined, False) c_res_2 = parse_index2(data_non_inlined, False) if py_res_1 != c_res_1: print "Parse index result (with inlined data) differs!" if py_res_2 != c_res_2: print "Parse index result (no inlined data) differs!" ix = parsers.parse_index2(data_inlined, True)[0] for i, r in enumerate(ix): if r[7] == nullid: i = -1 try: if ix[r[7]] != i: print 'Reverse lookup inconsistent for %r' % r[7].encode('hex') except TypeError: # pure version doesn't support this break print "done" runtest() mercurial-3.7.3/tests/test-clone-uncompressed.t0000644000175000017500000000524212676531525021243 0ustar mpmmpm00000000000000#require serve Initialize repository the status call is to check for issue5130 $ hg init server $ cd server $ touch foo $ hg -q commit -A -m initial >>> for i in range(1024): ... with open(str(i), 'wb') as fh: ... fh.write(str(i)) $ hg -q commit -A -m 'add a lot of files' $ hg st $ hg serve -p $HGPORT -d --pid-file=hg.pid $ cat hg.pid >> $DAEMON_PIDS $ cd .. Basic clone $ hg clone --uncompressed -U http://localhost:$HGPORT clone1 streaming all changes 1027 files to transfer, 96.3 KB of data transferred 96.3 KB in * seconds (*/sec) (glob) searching for changes no changes found Clone with background file closing enabled $ hg --debug --config worker.backgroundclose=true --config worker.backgroundcloseminfilecount=1 clone --uncompressed -U http://localhost:$HGPORT clone-background | grep -v adding using http://localhost:$HGPORT/ sending capabilities command sending branchmap command streaming all changes sending stream_out command 1027 files to transfer, 96.3 KB of data starting 4 threads for background file closing transferred 96.3 KB in * seconds (*/sec) (glob) query 1; heads sending batch command searching for changes all remote heads known locally no changes found sending getbundle command bundle2-input-bundle: with-transaction bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-part: "listkeys" (params: 1 mandatory) supported bundle2-input-bundle: 1 parts total checking for updated bookmarks preparing listkeys for "phases" sending listkeys command received listkey for "phases": 58 bytes Stream clone while repo is changing: $ mkdir changing $ cd changing extension for delaying the server process so we reliably can modify the repo while cloning $ cat > delayer.py < import time > from mercurial import extensions, scmutil > def __call__(orig, self, path, *args, **kwargs): > if path == 'data/f1.i': > time.sleep(2) > return orig(self, path, *args, **kwargs) > extensions.wrapfunction(scmutil.vfs, '__call__', __call__) > EOF prepare repo with small and big file to cover both code paths in emitrevlogdata $ hg init repo $ touch repo/f1 $ $TESTDIR/seq.py 50000 > repo/f2 $ hg -R repo ci -Aqm "0" $ hg -R repo serve -p $HGPORT1 -d --pid-file=hg.pid --config extensions.delayer=delayer.py $ cat hg.pid >> $DAEMON_PIDS clone while modifying the repo between stating file with write lock and actually serving file content $ hg clone -q --uncompressed -U http://localhost:$HGPORT1 clone & $ sleep 1 $ echo >> repo/f1 $ echo >> repo/f2 $ hg -R repo ci -m "1" $ wait $ hg -R clone id 000000000000 mercurial-3.7.3/tests/test-mq-qclone-http.t0000644000175000017500000000746712676531525020322 0ustar mpmmpm00000000000000#require killdaemons hide outer repo $ hg init $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ mkdir webdir $ cd webdir $ hg init a $ hg --cwd a qinit -c $ echo a > a/a $ hg --cwd a ci -A -m a adding a $ echo b > a/b $ hg --cwd a addremove adding b $ hg --cwd a qnew -f b.patch $ hg --cwd a qcommit -m b.patch $ hg --cwd a log --template "{desc}\n" [mq]: b.patch a $ hg --cwd a/.hg/patches log --template "{desc}\n" b.patch $ root=`pwd` $ cd .. test with recursive collection $ cat > collections.conf < [paths] > /=$root/** > EOF $ hg serve -p $HGPORT -d --pid-file=hg.pid --webdir-conf collections.conf \ > -A access-paths.log -E error-paths-1.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT '?style=raw' 200 Script output follows /a/ /a/.hg/patches/ $ hg qclone http://localhost:$HGPORT/a b requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 3 changes to 3 files updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --cwd b log --template "{desc}\n" a $ hg --cwd b qpush -a applying b.patch now at: b.patch $ hg --cwd b log --template "{desc}\n" imported patch b.patch a test with normal collection $ cat > collections1.conf < [paths] > /=$root/* > EOF $ hg serve -p $HGPORT1 -d --pid-file=hg.pid --webdir-conf collections1.conf \ > -A access-paths.log -E error-paths-1.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT1 '?style=raw' 200 Script output follows /a/ /a/.hg/patches/ $ hg qclone http://localhost:$HGPORT1/a c requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 3 changes to 3 files updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --cwd c log --template "{desc}\n" a $ hg --cwd c qpush -a applying b.patch now at: b.patch $ hg --cwd c log --template "{desc}\n" imported patch b.patch a test with old-style collection $ cat > collections2.conf < [collections] > $root=$root > EOF $ hg serve -p $HGPORT2 -d --pid-file=hg.pid --webdir-conf collections2.conf \ > -A access-paths.log -E error-paths-1.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT2 '?style=raw' 200 Script output follows /a/ /a/.hg/patches/ $ hg qclone http://localhost:$HGPORT2/a d requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 3 changes to 3 files updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg --cwd d log --template "{desc}\n" a $ hg --cwd d qpush -a applying b.patch now at: b.patch $ hg --cwd d log --template "{desc}\n" imported patch b.patch a test --mq works and uses correct repository config $ hg --cwd d outgoing --mq comparing with http://localhost:$HGPORT2/a/.hg/patches searching for changes no changes found [1] $ hg --cwd d log --mq --template '{rev} {desc|firstline}\n' 0 b.patch $ killdaemons.py mercurial-3.7.3/tests/test-copy-move-merge.t0000644000175000017500000000763412676531525020460 0ustar mpmmpm00000000000000 $ hg init t $ cd t $ echo 1 > a $ hg ci -qAm "first" $ hg cp a b $ hg mv a c $ echo 2 >> b $ echo 2 >> c $ hg ci -qAm "second" $ hg co -C 0 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo 0 > a $ echo 1 >> a $ hg ci -qAm "other" $ hg merge --debug searching for copies back to rev 1 unmatched files in other: b c all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' * src: 'a' -> dst: 'c' * checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: b8bf91eeebbc, local: add3f11052fa+, remote: 17c05bb7fcb6 preserving a for resolve of b preserving a for resolve of c removing a b: remote moved from a -> m (premerge) picked tool ':merge' for b (binary False symlink False changedelete False) merging a and b to b my b@add3f11052fa+ other b@17c05bb7fcb6 ancestor a@b8bf91eeebbc premerge successful c: remote moved from a -> m (premerge) picked tool ':merge' for c (binary False symlink False changedelete False) merging a and c to c my c@add3f11052fa+ other c@17c05bb7fcb6 ancestor a@b8bf91eeebbc premerge successful 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) file b $ cat b 0 1 2 file c $ cat c 0 1 2 Test disabling copy tracing - first verify copy metadata was kept $ hg up -qC 2 $ hg rebase --keep -d 1 -b 2 --config extensions.rebase= rebasing 2:add3f11052fa "other" (tip) merging b and a to b merging c and a to c $ cat b 0 1 2 - next verify copy metadata is lost when disabled $ hg strip -r . --config extensions.strip= 2 files updated, 0 files merged, 0 files removed, 0 files unresolved saved backup bundle to $TESTTMP/t/.hg/strip-backup/550bd84c0cd3-fc575957-backup.hg (glob) $ hg up -qC 2 $ hg rebase --keep -d 1 -b 2 --config extensions.rebase= --config experimental.disablecopytrace=True --config ui.interactive=True << EOF > c > EOF rebasing 2:add3f11052fa "other" (tip) remote changed a which local deleted use (c)hanged version, leave (d)eleted, or leave (u)nresolved? c $ cat b 1 2 $ cd .. Verify disabling copy tracing still keeps copies from rebase source $ hg init copydisable $ cd copydisable $ touch a $ hg ci -Aqm 'add a' $ touch b $ hg ci -Aqm 'add b, c' $ hg cp b x $ echo x >> x $ hg ci -qm 'copy b->x' $ hg up -q 1 $ touch z $ hg ci -Aqm 'add z' $ hg log -G -T '{rev} {desc}\n' @ 3 add z | | o 2 copy b->x |/ o 1 add b, c | o 0 add a $ hg rebase -d . -b 2 --config extensions.rebase= --config experimental.disablecopytrace=True rebasing 2:6adcf8c12e7d "copy b->x" saved backup bundle to $TESTTMP/copydisable/.hg/strip-backup/6adcf8c12e7d-ce4b3e75-backup.hg (glob) $ hg up -q 3 $ hg log -f x -T '{rev} {desc}\n' 3 copy b->x 1 add b, c $ cd ../ Verify we duplicate existing copies, instead of detecting them $ hg init copydisable3 $ cd copydisable3 $ touch a $ hg ci -Aqm 'add a' $ hg cp a b $ hg ci -Aqm 'copy a->b' $ hg mv b c $ hg ci -Aqm 'move b->c' $ hg up -q 0 $ hg cp a b $ echo b >> b $ hg ci -Aqm 'copy a->b (2)' $ hg log -G -T '{rev} {desc}\n' @ 3 copy a->b (2) | | o 2 move b->c | | | o 1 copy a->b |/ o 0 add a $ hg rebase -d 2 -s 3 --config extensions.rebase= --config experimental.disablecopytrace=True rebasing 3:47e1a9e6273b "copy a->b (2)" (tip) saved backup bundle to $TESTTMP/copydisable3/.hg/strip-backup/47e1a9e6273b-2d099c59-backup.hg (glob) $ hg log -G -f b @ changeset: 3:76024fb4b05b | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: copy a->b (2) | o changeset: 0:ac82d8b1f7c4 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add a mercurial-3.7.3/tests/test-rebase-named-branches.t0000644000175000017500000001617312676531525021551 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > > [phases] > publish=False > > [alias] > tglog = log -G --template "{rev}: '{desc}' {branches}\n" > EOF $ hg init a $ cd a $ hg unbundle "$TESTDIR/bundles/rebase.hg" adding changesets adding manifests adding file changes added 8 changesets with 7 changes to 7 files (+2 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg up tip 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. $ hg clone -q -u . a a1 $ cd a1 $ hg update 3 3 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg branch dev-one marked working directory as branch dev-one (branches are permanent and global, did you want a bookmark?) $ hg ci -m 'dev-one named branch' $ hg update 7 2 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg branch dev-two marked working directory as branch dev-two $ echo x > x $ hg add x $ hg ci -m 'dev-two named branch' $ hg tglog @ 9: 'dev-two named branch' dev-two | | o 8: 'dev-one named branch' dev-one | | o | 7: 'H' | | +---o 6: 'G' | | | o | | 5: 'F' | | | +---o 4: 'E' | | | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' Branch name containing a dash (issue3181) $ hg rebase -b dev-two -d dev-one --keepbranches rebasing 5:24b6387c8c8c "F" rebasing 6:eea13746799a "G" rebasing 7:02de42196ebe "H" rebasing 9:cb039b7cae8e "dev-two named branch" (tip) saved backup bundle to $TESTTMP/a1/.hg/strip-backup/24b6387c8c8c-24cb8001-backup.hg (glob) $ hg tglog @ 9: 'dev-two named branch' dev-two | o 8: 'H' | | o 7: 'G' |/| o | 6: 'F' | | o | 5: 'dev-one named branch' dev-one | | | o 4: 'E' | | o | 3: 'D' | | o | 2: 'C' | | o | 1: 'B' |/ o 0: 'A' $ hg rebase -s dev-one -d 0 --keepbranches rebasing 5:643fc9128048 "dev-one named branch" rebasing 6:24de4aff8e28 "F" rebasing 7:4b988a958030 "G" rebasing 8:31d0e4ba75e6 "H" rebasing 9:9e70cd31750f "dev-two named branch" (tip) saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-c4ee9ef5-backup.hg (glob) $ hg tglog @ 9: 'dev-two named branch' dev-two | o 8: 'H' | | o 7: 'G' |/| o | 6: 'F' | | o | 5: 'dev-one named branch' dev-one | | | o 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ hg update 3 3 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg branch -f dev-one marked working directory as branch dev-one $ hg ci -m 'dev-one named branch' created new head $ hg tglog @ 10: 'dev-one named branch' dev-one | | o 9: 'dev-two named branch' dev-two | | | o 8: 'H' | | | | o 7: 'G' | |/| | o | 6: 'F' | | | | o | 5: 'dev-one named branch' dev-one | | | | | o 4: 'E' | |/ o | 3: 'D' | | o | 2: 'C' | | o | 1: 'B' |/ o 0: 'A' $ hg rebase -b 'max(branch("dev-two"))' -d dev-one --keepbranches rebasing 5:bc8139ee757c "dev-one named branch" note: rebase of 5:bc8139ee757c created no changes to commit rebasing 6:42aa3cf0fa7a "F" rebasing 7:1a1e6f72ec38 "G" rebasing 8:904590360559 "H" rebasing 9:59c2e59309fe "dev-two named branch" saved backup bundle to $TESTTMP/a1/.hg/strip-backup/bc8139ee757c-f11c1080-backup.hg (glob) $ hg tglog o 9: 'dev-two named branch' dev-two | o 8: 'H' | | o 7: 'G' |/| o | 6: 'F' | | @ | 5: 'dev-one named branch' dev-one | | | o 4: 'E' | | o | 3: 'D' | | o | 2: 'C' | | o | 1: 'B' |/ o 0: 'A' $ hg rebase -s 'max(branch("dev-one"))' -d 0 --keepbranches rebasing 5:643fc9128048 "dev-one named branch" rebasing 6:679f28760620 "F" rebasing 7:549f007a9f5f "G" rebasing 8:12b2bc666e20 "H" rebasing 9:71325f8bc082 "dev-two named branch" (tip) saved backup bundle to $TESTTMP/a1/.hg/strip-backup/643fc9128048-6cdd1a52-backup.hg (glob) $ hg tglog o 9: 'dev-two named branch' dev-two | o 8: 'H' | | o 7: 'G' |/| o | 6: 'F' | | @ | 5: 'dev-one named branch' dev-one | | | o 4: 'E' |/ | o 3: 'D' | | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ hg up -r 0 > /dev/null Rebasing descendant onto ancestor across different named branches $ hg rebase -s 1 -d 9 --keepbranches rebasing 1:42ccdea3bb16 "B" rebasing 2:5fddd98957c8 "C" rebasing 3:32af7686d403 "D" saved backup bundle to $TESTTMP/a1/.hg/strip-backup/42ccdea3bb16-3cb021d3-backup.hg (glob) $ hg tglog o 9: 'D' | o 8: 'C' | o 7: 'B' | o 6: 'dev-two named branch' dev-two | o 5: 'H' | | o 4: 'G' |/| o | 3: 'F' | | o | 2: 'dev-one named branch' dev-one | | | o 1: 'E' |/ @ 0: 'A' $ hg rebase -s 5 -d 6 abort: source is ancestor of destination [255] $ hg rebase -s 6 -d 5 rebasing 6:3944801ae4ea "dev-two named branch" rebasing 7:3bdb949809d9 "B" rebasing 8:a0d543090fa4 "C" rebasing 9:e9f862ce8bad "D" (tip) saved backup bundle to $TESTTMP/a1/.hg/strip-backup/3944801ae4ea-fb46ed74-backup.hg (glob) $ hg tglog o 9: 'D' | o 8: 'C' | o 7: 'B' | o 6: 'dev-two named branch' | o 5: 'H' | | o 4: 'G' |/| o | 3: 'F' | | o | 2: 'dev-one named branch' dev-one | | | o 1: 'E' |/ @ 0: 'A' Reopen branch by rebase $ hg up -qr3 $ hg branch -q b $ hg ci -m 'create b' $ hg ci -m 'close b' --close $ hg rebase -b 8 -d b reopening closed branch head 2b586e70108d rebasing 5:8e279d293175 "H" rebasing 6:c57724c84928 "dev-two named branch" rebasing 7:160b0930ccc6 "B" rebasing 8:810110211f50 "C" rebasing 9:e522577ccdbd "D" saved backup bundle to $TESTTMP/a1/.hg/strip-backup/8e279d293175-b023e27c-backup.hg (glob) $ cd .. Rebase to other head on branch Set up a case: $ hg init case1 $ cd case1 $ touch f $ hg ci -qAm0 $ hg branch -q b $ echo >> f $ hg ci -qAm 'b1' $ hg up -qr -2 $ hg branch -qf b $ hg ci -qm 'b2' $ hg up -qr -3 $ hg branch -q c $ hg ci -m 'c1' $ hg tglog @ 3: 'c1' c | | o 2: 'b2' b |/ | o 1: 'b1' b |/ o 0: '0' $ hg clone -q . ../case2 rebase 'b2' to another lower branch head $ hg up -qr 2 $ hg rebase nothing to rebase - working directory parent is also destination [1] $ hg tglog o 3: 'c1' c | | @ 2: 'b2' b |/ | o 1: 'b1' b |/ o 0: '0' rebase 'b1' on top of the tip of the branch ('b2') - ignoring the tip branch ('c1') $ cd ../case2 $ hg up -qr 1 $ hg rebase rebasing 1:40039acb7ca5 "b1" saved backup bundle to $TESTTMP/case2/.hg/strip-backup/40039acb7ca5-342b72d1-backup.hg (glob) $ hg tglog @ 3: 'b1' b | | o 2: 'c1' c | | o | 1: 'b2' b |/ o 0: '0' rebase 'c1' to the branch head 'c2' that is closed $ hg branch -qf c $ hg ci -qm 'c2 closed' --close $ hg up -qr 2 $ hg tglog _ 4: 'c2 closed' c | o 3: 'b1' b | | @ 2: 'c1' c | | o | 1: 'b2' b |/ o 0: '0' $ hg rebase nothing to rebase - working directory parent is also destination [1] $ hg tglog _ 4: 'c2 closed' c | o 3: 'b1' b | | @ 2: 'c1' c | | o | 1: 'b2' b |/ o 0: '0' $ cd .. mercurial-3.7.3/tests/fakemergerecord.py0000644000175000017500000000125612676531525017774 0ustar mpmmpm00000000000000# Extension to write out fake unsupported records into the merge state # # from __future__ import absolute_import from mercurial import ( cmdutil, merge, ) cmdtable = {} command = cmdutil.command(cmdtable) @command('fakemergerecord', [('X', 'mandatory', None, 'add a fake mandatory record'), ('x', 'advisory', None, 'add a fake advisory record')], '') def fakemergerecord(ui, repo, *pats, **opts): ms = merge.mergestate.read(repo) records = ms._makerecords() if opts.get('mandatory'): records.append(('X', 'mandatory record')) if opts.get('advisory'): records.append(('x', 'advisory record')) ms._writerecords(records) mercurial-3.7.3/tests/test-extra-filelog-entry.t0000644000175000017500000000070012676531525021331 0ustar mpmmpm00000000000000Issue351: mq: qrefresh can create extra revlog entry $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg init $ hg qinit $ echo b > b $ hg ci -A -m foo adding b $ echo cc > b $ hg qnew -f foo.diff $ echo b > b $ hg qrefresh $ hg debugindex b rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 3 ..... 0 1e88685f5dde 000000000000 000000000000 (re) mercurial-3.7.3/tests/test-hgweb-descend-empties.t0000644000175000017500000003745412676531525021613 0ustar mpmmpm00000000000000#require serve Test chains of near empty directories, terminating 3 different ways: - a1: file at level 4 (deepest) - b1: two dirs at level 3 - d1: file at level 2 Set up the repo $ hg init test $ cd test $ mkdir -p a1/a2/a3/a4 $ mkdir -p b1/b2/b3/b4 $ mkdir -p b1/b2/b3/c4 $ mkdir -p d1/d2/d3/d4 $ echo foo > a1/a2/a3/a4/foo $ echo foo > b1/b2/b3/b4/foo $ echo foo > b1/b2/b3/c4/foo $ echo foo > d1/d2/d3/d4/foo $ echo foo > d1/d2/foo $ hg ci -Ama adding a1/a2/a3/a4/foo adding b1/b2/b3/b4/foo adding b1/b2/b3/c4/foo adding d1/d2/d3/d4/foo adding d1/d2/foo $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -E errors.log $ cat hg.pid >> $DAEMON_PIDS manifest with descending (paper) $ get-with-headers.py 127.0.0.1:$HGPORT 'file' 200 Script output follows test: c9f45f7a1659 /

                            directory / @ 0:c9f45f7a1659 tip

                            name size permissions
                            [up] drwxr-xr-x
                            dir. a1/ a2/a3/a4 drwxr-xr-x
                            dir. b1/ b2/b3 drwxr-xr-x
                            dir. d1/ d2 drwxr-xr-x
                            manifest with descending (coal) $ get-with-headers.py 127.0.0.1:$HGPORT 'file?style=coal' 200 Script output follows test: c9f45f7a1659 /

                            directory / @ 0:c9f45f7a1659 tip

                            name size permissions
                            [up] drwxr-xr-x
                            dir. a1/ a2/a3/a4 drwxr-xr-x
                            dir. b1/ b2/b3 drwxr-xr-x
                            dir. d1/ d2 drwxr-xr-x
                            manifest with descending (monoblue) $ get-with-headers.py 127.0.0.1:$HGPORT 'file?style=monoblue' 200 Script output follows test: files

                            / default tip

                            drwxr-xr-x [up]
                            drwxr-xr-x a1 a2/a3/a4 files
                            drwxr-xr-x b1 b2/b3 files
                            drwxr-xr-x d1 d2 files

                            mercurial

                            manifest with descending (gitweb) $ get-with-headers.py 127.0.0.1:$HGPORT 'file?style=gitweb' 200 Script output follows test: files
                            / default tip
                            drwxr-xr-x [up]
                            drwxr-xr-x a1 a2/a3/a4
                            drwxr-xr-x b1 b2/b3
                            drwxr-xr-x d1 d2
                            manifest with descending (spartan) $ get-with-headers.py 127.0.0.1:$HGPORT 'file?style=spartan' 200 Script output follows test: files for changeset c9f45f7a1659

                            Mercurial / files for changeset c9f45f7a1659: /

                            drwxr-xr-x      [up]
                            drwxr-xr-x      a1/ a2/a3/a4
                            drwxr-xr-x      b1/ b2/b3
                            drwxr-xr-x      d1/ d2
                            $ cat errors.log $ cd .. mercurial-3.7.3/tests/test-mq-merge.t0000644000175000017500000000672312676531525017155 0ustar mpmmpm00000000000000Setup extension: $ cat <> $HGRCPATH > [extensions] > mq = > [mq] > git = keep > EOF Test merge with mq changeset as the second parent: $ hg init m $ cd m $ touch a b c $ hg add a $ hg commit -m a $ hg add b $ hg qnew -d "0 0" b $ hg update 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg add c $ hg commit -m c created new head $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m merge abort: cannot commit over an applied mq patch [255] $ cd .. Issue529: mq aborts when merging patch deleting files $ checkundo() > { > if [ -f .hg/store/undo ]; then > echo ".hg/store/undo still exists" > fi > } Commit two dummy files in "init" changeset: $ hg init t $ cd t $ echo a > a $ echo b > b $ hg ci -Am init adding a adding b $ hg tag -l init Create a patch removing a: $ hg qnew rm_a $ hg rm a $ hg qrefresh -m "rm a" Save the patch queue so we can merge it later: $ hg qsave -c -e copy $TESTTMP/t/.hg/patches to $TESTTMP/t/.hg/patches.1 (glob) $ checkundo Update b and commit in an "update" changeset: $ hg up -C init 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b >> b $ hg st M b $ hg ci -m update created new head # Here, qpush used to abort with : # The system cannot find the file specified => a $ hg manifest a b $ hg qpush -a -m merging with queue at: $TESTTMP/t/.hg/patches.1 (glob) applying rm_a now at: rm_a $ checkundo $ hg manifest b Ensure status is correct after merge: $ hg qpop -a popping rm_a popping .hg.patches.merge.marker patch queue now empty $ cd .. Classic MQ merge sequence *with an explicit named queue*: $ hg init t2 $ cd t2 $ echo '[diff]' > .hg/hgrc $ echo 'nodates = 1' >> .hg/hgrc $ echo a > a $ hg ci -Am init adding a $ echo b > a $ hg ci -m changea $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg cp a aa $ echo c >> a $ hg qnew --git -f -e patcha $ echo d >> a $ hg qnew -d '0 0' -f -e patcha2 Create the reference queue: $ hg qsave -c -e -n refqueue copy $TESTTMP/t2/.hg/patches to $TESTTMP/t2/.hg/refqueue (glob) $ hg up -C 1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved Merge: $ HGMERGE=internal:other hg qpush -a -m -n refqueue merging with queue at: $TESTTMP/t2/.hg/refqueue (glob) applying patcha patching file a Hunk #1 succeeded at 2 with fuzz 1 (offset 0 lines). fuzz found when applying patch, stopping patch didn't work out, merging patcha 1 files updated, 0 files merged, 1 files removed, 0 files unresolved 0 files updated, 2 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) applying patcha2 now at: patcha2 Check patcha is still a git patch: $ cat .hg/patches/patcha # HG changeset patch # Parent d3873e73d99ef67873dac33fbcc66268d5d2b6f4 diff --git a/a b/a --- a/a +++ b/a @@ -1,1 +1,2 @@ -b +a +c diff --git a/a b/aa copy from a copy to aa --- a/a +++ b/aa @@ -1,1 +1,1 @@ -b +a Check patcha2 is still a regular patch: $ cat .hg/patches/patcha2 # HG changeset patch # Date 0 0 # Parent ???????????????????????????????????????? (glob) diff -r ???????????? -r ???????????? a (glob) --- a/a +++ b/a @@ -1,2 +1,3 @@ a c +d $ cd .. mercurial-3.7.3/tests/generate-working-copy-states.py0000644000175000017500000000635012676531525022370 0ustar mpmmpm00000000000000# Helper script used for generating history and working copy files and content. # The file's name corresponds to its history. The number of changesets can # be specified on the command line. With 2 changesets, files with names like # content1_content2_content1-untracked are generated. The first two filename # segments describe the contents in the two changesets. The third segment # ("content1-untracked") describes the state in the working copy, i.e. # the file has content "content1" and is untracked (since it was previously # tracked, it has been forgotten). # # This script generates the filenames and their content, but it's up to the # caller to tell hg about the state. # # There are two subcommands: # filelist # state (|wc) # # Typical usage: # # $ python $TESTDIR/generate-working-copy-states.py state 2 1 # $ hg addremove --similarity 0 # $ hg commit -m 'first' # # $ python $TESTDIR/generate-working-copy-states.py state 2 1 # $ hg addremove --similarity 0 # $ hg commit -m 'second' # # $ python $TESTDIR/generate-working-copy-states.py state 2 wc # $ hg addremove --similarity 0 # $ hg forget *_*_*-untracked # $ rm *_*_missing-* from __future__ import absolute_import import os import sys # Generates pairs of (filename, contents), where 'contents' is a list # describing the file's content at each revision (or in the working copy). # At each revision, it is either None or the file's actual content. When not # None, it may be either new content or the same content as an earlier # revisions, so all of (modified,clean,added,removed) can be tested. def generatestates(maxchangesets, parentcontents): depth = len(parentcontents) if depth == maxchangesets + 1: for tracked in ('untracked', 'tracked'): filename = "_".join([(content is None and 'missing' or content) for content in parentcontents]) + "-" + tracked yield (filename, parentcontents) else: for content in (set([None, 'content' + str(depth + 1)]) | set(parentcontents)): for combination in generatestates(maxchangesets, parentcontents + [content]): yield combination # retrieve the command line arguments target = sys.argv[1] maxchangesets = int(sys.argv[2]) if target == 'state': depth = sys.argv[3] # sort to make sure we have stable output combinations = sorted(generatestates(maxchangesets, [])) # compute file content content = [] for filename, states in combinations: if target == 'filelist': print filename elif target == 'state': if depth == 'wc': # Make sure there is content so the file gets written and can be # tracked. It will be deleted outside of this script. content.append((filename, states[maxchangesets] or 'TOBEDELETED')) else: content.append((filename, states[int(depth) - 1])) else: print >> sys.stderr, "unknown target:", target sys.exit(1) # write actual content for filename, data in content: if data is not None: f = open(filename, 'wb') f.write(data + '\n') f.close() elif os.path.exists(filename): os.remove(filename) mercurial-3.7.3/tests/test-flags.t0000644000175000017500000000746712676531525016545 0ustar mpmmpm00000000000000#require execbit $ umask 027 $ hg init test1 $ cd test1 $ touch a b $ hg add a b $ hg ci -m "added a b" $ cd .. $ hg clone test1 test3 updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg init test2 $ cd test2 $ hg pull ../test1 pulling from ../test1 requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files (run 'hg update' to get a working copy) $ hg co 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ chmod +x a $ hg ci -m "chmod +x a" the changelog should mention file a: $ hg tip --template '{files}\n' a $ cd ../test1 $ echo 123 >>a $ hg ci -m "a updated" $ hg pull ../test2 pulling from ../test2 searching for changes adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg heads changeset: 2:7f4313b42a34 tag: tip parent: 0:22a449e20da5 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: chmod +x a changeset: 1:c6ecefc45368 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a updated $ hg history changeset: 2:7f4313b42a34 tag: tip parent: 0:22a449e20da5 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: chmod +x a changeset: 1:c6ecefc45368 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a updated changeset: 0:22a449e20da5 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: added a b $ hg -v merge resolving manifests 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat a 123 $ [ -x a ] $ cd ../test3 $ echo 123 >>b $ hg ci -m "b updated" $ hg pull ../test2 pulling from ../test2 searching for changes adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 0 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg heads changeset: 2:7f4313b42a34 tag: tip parent: 0:22a449e20da5 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: chmod +x a changeset: 1:dc57ead75f79 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b updated $ hg history changeset: 2:7f4313b42a34 tag: tip parent: 0:22a449e20da5 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: chmod +x a changeset: 1:dc57ead75f79 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b updated changeset: 0:22a449e20da5 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: added a b $ hg -v merge resolving manifests 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ ls -l ../test[123]/a > foo $ cut -b 1-10 < foo -rwxr-x--- -rwxr-x--- -rwxr-x--- $ hg debugindex a rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 0 ..... 0 b80de5d13875 000000000000 000000000000 (re) $ hg debugindex -R ../test2 a rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 0 ..... 0 b80de5d13875 000000000000 000000000000 (re) $ hg debugindex -R ../test1 a rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 0 ..... 0 b80de5d13875 000000000000 000000000000 (re) 1 0 5 ..... 1 7fe919cc0336 b80de5d13875 000000000000 (re) $ cd .. mercurial-3.7.3/tests/gpg/0000755000175000017500000000000012676531544015047 5ustar mpmmpm00000000000000mercurial-3.7.3/tests/gpg/pubring.gpg0000644000175000017500000000112512676531525017212 0ustar mpmmpm00000000000000™ J5\|êÆqµ“¦íû@Ðã‚"­ðÁÓZýïQF÷µ êl"ñˆ†ƒiI”Àeÿ¥æÃÈˋӂQ|£á ÿÅ®HôMÞ®6Z ŸØzÚBq™íœµ‘Ô=¢á¡ ¥ó}ÏþŒ¦ó -! d4L@{yâAA]Gÿ‚õ,®@”éûݨm©+¶ ŠscyʽÒaž¢"%×jÆ¿†môºL?ã&”([¢5ÜßÍ<8YO?¦‰ _“ŠdŸßÄ„8h÷7Žx:BêV>¼Ðzi¤>YÚÁBÈ\$E3Ô¦z¾§,ÐSZØ·ëøÚo ðß”C¦¯G[´hgtest‰6 J5\| € ¢- §N$ñòÑ懻mDzg”&Zoç=£+kel–Òørß¹(Ú©„…³“µw1 k{Ê÷— ÏÔ=¡Þª‡}N_C1ª ¦A8‰•ægùÉΧO@$Ûîvýõå¼U¼‰ÊÛ¸ÿKQiºô^Û€ÁD/G;º.»Þ*]™ ´p;9Ä÷×q„S’'¥`–¢Ôâ¶t€3*¿ $|Ý1Ê5äijTfÄE®!Ç3ykïnÞ·Ç8ÖØä;YÔ8¢=IûMîðFä«…è+",#è´äÍXþjO„û³ '\ ¢]ÉC%›Ìšã…Æß«®ŠËjêK$•¹ƒù¶R?§¢F®dǰmercurial-3.7.3/tests/gpg/secring.gpg0000644000175000017500000000234012676531525017176 0ustar mpmmpm00000000000000•˜J5\|êÆqµ“¦íû@Ðã‚"­ðÁÓZýïQF÷µ êl"ñˆ†ƒiI”Àeÿ¥æÃÈˋӂQ|£á ÿÅ®HôMÞ®6Z ŸØzÚBq™íœµ‘Ô=¢á¡ ¥ó}ÏþŒ¦ó -! d4L@{yâAA]Gÿ‚õ,®@”éûݨm©+¶ ŠscyʽÒaž¢"%×jÆ¿†môºL?ã&”([¢5ÜßÍ<8YO?¦‰ _“ŠdŸßÄ„8h÷7Žx:BêV>¼Ðzi¤>YÚÁBÈ\$E3Ô¦z¾§,ÐSZØ·ëøÚo ðß”C¦¯G[þ1 ¢- §N$ñò WÖC–™ üÁ~»>Ã+¦Ê^ymercurial-3.7.3/tests/test-revlog-group-emptyiter.t0000644000175000017500000000125512676531525022106 0ustar mpmmpm00000000000000Issue1678: IndexError when pushing setting up base repo $ hg init a $ cd a $ touch a $ hg ci -Am a adding a $ cd .. cloning base repo $ hg clone a b updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd b setting up cset to push $ hg up null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ touch a different msg so we get a clog new entry $ hg ci -Am b adding a created new head pushing $ hg push -f ../a pushing to ../a searching for changes adding changesets adding manifests adding file changes added 1 changesets with 0 changes to 0 files (+1 heads) $ cd .. mercurial-3.7.3/tests/test-purge.t0000644000175000017500000001160612676531525016561 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [extensions] > purge = > EOF init $ hg init t $ cd t setup $ echo r1 > r1 $ hg ci -qAmr1 -d'0 0' $ mkdir directory $ echo r2 > directory/r2 $ hg ci -qAmr2 -d'1 0' $ echo 'ignored' > .hgignore $ hg ci -qAmr3 -d'2 0' delete an empty directory $ mkdir empty_dir $ hg purge -p -v empty_dir $ hg purge -v removing directory empty_dir $ ls directory r1 delete an untracked directory $ mkdir untracked_dir $ touch untracked_dir/untracked_file1 $ touch untracked_dir/untracked_file2 $ hg purge -p untracked_dir/untracked_file1 untracked_dir/untracked_file2 $ hg purge -v removing file untracked_dir/untracked_file1 removing file untracked_dir/untracked_file2 removing directory untracked_dir $ ls directory r1 delete an untracked file $ touch untracked_file $ touch untracked_file_readonly $ python < import os, stat > f= 'untracked_file_readonly' > os.chmod(f, stat.S_IMODE(os.stat(f).st_mode) & ~stat.S_IWRITE) > EOF $ hg purge -p untracked_file untracked_file_readonly $ hg purge -v removing file untracked_file removing file untracked_file_readonly $ ls directory r1 delete an untracked file in a tracked directory $ touch directory/untracked_file $ hg purge -p directory/untracked_file $ hg purge -v removing file directory/untracked_file $ ls directory r1 delete nested directories $ mkdir -p untracked_directory/nested_directory $ hg purge -p untracked_directory/nested_directory $ hg purge -v removing directory untracked_directory/nested_directory removing directory untracked_directory $ ls directory r1 delete nested directories from a subdir $ mkdir -p untracked_directory/nested_directory $ cd directory $ hg purge -p untracked_directory/nested_directory $ hg purge -v removing directory untracked_directory/nested_directory removing directory untracked_directory $ cd .. $ ls directory r1 delete only part of the tree $ mkdir -p untracked_directory/nested_directory $ touch directory/untracked_file $ cd directory $ hg purge -p ../untracked_directory untracked_directory/nested_directory $ hg purge -v ../untracked_directory removing directory untracked_directory/nested_directory removing directory untracked_directory $ cd .. $ ls directory r1 $ ls directory/untracked_file directory/untracked_file $ rm directory/untracked_file skip ignored files if --all not specified $ touch ignored $ hg purge -p $ hg purge -v $ ls directory ignored r1 $ hg purge -p --all ignored $ hg purge -v --all removing file ignored $ ls directory r1 abort with missing files until we support name mangling filesystems $ touch untracked_file $ rm r1 hide error messages to avoid changing the output when the text changes $ hg purge -p 2> /dev/null untracked_file $ hg st ! r1 ? untracked_file $ hg purge -p untracked_file $ hg purge -v 2> /dev/null removing file untracked_file $ hg st ! r1 $ hg purge -v $ hg revert --all --quiet $ hg st -a tracked file in ignored directory (issue621) $ echo directory >> .hgignore $ hg ci -m 'ignore directory' $ touch untracked_file $ hg purge -p untracked_file $ hg purge -v removing file untracked_file skip excluded files $ touch excluded_file $ hg purge -p -X excluded_file $ hg purge -v -X excluded_file $ ls directory excluded_file r1 $ rm excluded_file skip files in excluded dirs $ mkdir excluded_dir $ touch excluded_dir/file $ hg purge -p -X excluded_dir $ hg purge -v -X excluded_dir $ ls directory excluded_dir r1 $ ls excluded_dir file $ rm -R excluded_dir skip excluded empty dirs $ mkdir excluded_dir $ hg purge -p -X excluded_dir $ hg purge -v -X excluded_dir $ ls directory excluded_dir r1 $ rmdir excluded_dir skip patterns $ mkdir .svn $ touch .svn/foo $ mkdir directory/.svn $ touch directory/.svn/foo $ hg purge -p -X .svn -X '*/.svn' $ hg purge -p -X re:.*.svn $ rm -R .svn directory r1 only remove files $ mkdir -p empty_dir dir $ touch untracked_file dir/untracked_file $ hg purge -p --files dir/untracked_file untracked_file $ hg purge -v --files removing file dir/untracked_file removing file untracked_file $ ls dir empty_dir $ ls dir only remove dirs $ mkdir -p empty_dir dir $ touch untracked_file dir/untracked_file $ hg purge -p --dirs empty_dir $ hg purge -v --dirs removing directory empty_dir $ ls dir untracked_file $ ls dir untracked_file remove both files and dirs $ mkdir -p empty_dir dir $ touch untracked_file dir/untracked_file $ hg purge -p --files --dirs dir/untracked_file untracked_file empty_dir $ hg purge -v --files --dirs removing file dir/untracked_file removing file untracked_file removing directory empty_dir removing directory dir $ ls $ cd .. mercurial-3.7.3/tests/test-parse-date.t0000644000175000017500000001763512676531525017474 0ustar mpmmpm00000000000000This runs with TZ="GMT" $ hg init $ echo "test-parse-date" > a $ hg add a $ hg ci -d "2006-02-01 13:00:30" -m "rev 0" $ echo "hi!" >> a $ hg ci -d "2006-02-01 13:00:30 -0500" -m "rev 1" $ hg tag -d "2006-04-15 13:30" "Hi" $ hg backout --merge -d "2006-04-15 13:30 +0200" -m "rev 3" 1 reverting a created new head changeset 3:107ce1ee2b43 backs out changeset 1:25a1420a55f8 merging with changeset 3:107ce1ee2b43 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -d "1150000000 14400" -m "rev 4 (merge)" $ echo "fail" >> a $ hg ci -d "should fail" -m "fail" abort: invalid date: 'should fail' [255] $ hg ci -d "100000000000000000 1400" -m "fail" abort: date exceeds 32 bits: 100000000000000000 [255] $ hg ci -d "100000 1400000" -m "fail" abort: impossible time zone offset: 1400000 [255] Check with local timezone other than GMT and with DST $ TZ="PST+8PDT" $ export TZ PST=UTC-8 / PDT=UTC-7 $ hg debugrebuildstate $ echo "a" > a $ hg ci -d "2006-07-15 13:30" -m "summer@UTC-7" $ hg debugrebuildstate $ echo "b" > a $ hg ci -d "2006-07-15 13:30 +0500" -m "summer@UTC+5" $ hg debugrebuildstate $ echo "c" > a $ hg ci -d "2006-01-15 13:30" -m "winter@UTC-8" $ hg debugrebuildstate $ echo "d" > a $ hg ci -d "2006-01-15 13:30 +0500" -m "winter@UTC+5" $ hg log --template '{date|date}\n' Sun Jan 15 13:30:00 2006 +0500 Sun Jan 15 13:30:00 2006 -0800 Sat Jul 15 13:30:00 2006 +0500 Sat Jul 15 13:30:00 2006 -0700 Sun Jun 11 00:26:40 2006 -0400 Sat Apr 15 13:30:00 2006 +0200 Sat Apr 15 13:30:00 2006 +0000 Wed Feb 01 13:00:30 2006 -0500 Wed Feb 01 13:00:30 2006 +0000 Test issue1014 (fractional timezones) $ hg debugdate "1000000000 -16200" # 0430 internal: 1000000000 -16200 standard: Sun Sep 09 06:16:40 2001 +0430 $ hg debugdate "1000000000 -15300" # 0415 internal: 1000000000 -15300 standard: Sun Sep 09 06:01:40 2001 +0415 $ hg debugdate "1000000000 -14400" # 0400 internal: 1000000000 -14400 standard: Sun Sep 09 05:46:40 2001 +0400 $ hg debugdate "1000000000 0" # GMT internal: 1000000000 0 standard: Sun Sep 09 01:46:40 2001 +0000 $ hg debugdate "1000000000 14400" # -0400 internal: 1000000000 14400 standard: Sat Sep 08 21:46:40 2001 -0400 $ hg debugdate "1000000000 15300" # -0415 internal: 1000000000 15300 standard: Sat Sep 08 21:31:40 2001 -0415 $ hg debugdate "1000000000 16200" # -0430 internal: 1000000000 16200 standard: Sat Sep 08 21:16:40 2001 -0430 $ hg debugdate "Sat Sep 08 21:16:40 2001 +0430" internal: 999967600 -16200 standard: Sat Sep 08 21:16:40 2001 +0430 $ hg debugdate "Sat Sep 08 21:16:40 2001 -0430" internal: 1000000000 16200 standard: Sat Sep 08 21:16:40 2001 -0430 Test 12-hours times $ hg debugdate "2006-02-01 1:00:30PM +0000" internal: 1138798830 0 standard: Wed Feb 01 13:00:30 2006 +0000 $ hg debugdate "1:00:30PM" > /dev/null Normal range $ hg log -d -1 Negative range $ hg log -d "--2" abort: -2 must be nonnegative (see "hg help dates") [255] Whitespace only $ hg log -d " " abort: dates cannot consist entirely of whitespace [255] Test date formats with '>' or '<' accompanied by space characters $ hg log -d '>' --template '{date|date}\n' abort: invalid day spec, use '>DATE' [255] $ hg log -d '<' --template '{date|date}\n' abort: invalid day spec, use '' --template '{date|date}\n' abort: invalid day spec, use '>DATE' [255] $ hg log -d ' <' --template '{date|date}\n' abort: invalid day spec, use ' ' --template '{date|date}\n' abort: invalid day spec, use '>DATE' [255] $ hg log -d '< ' --template '{date|date}\n' abort: invalid day spec, use ' ' --template '{date|date}\n' abort: invalid day spec, use '>DATE' [255] $ hg log -d ' < ' --template '{date|date}\n' abort: invalid day spec, use '02/01' --template '{date|date}\n' $ hg log -d '<02/01' --template '{date|date}\n' Sun Jan 15 13:30:00 2006 +0500 Sun Jan 15 13:30:00 2006 -0800 Sat Jul 15 13:30:00 2006 +0500 Sat Jul 15 13:30:00 2006 -0700 Sun Jun 11 00:26:40 2006 -0400 Sat Apr 15 13:30:00 2006 +0200 Sat Apr 15 13:30:00 2006 +0000 Wed Feb 01 13:00:30 2006 -0500 Wed Feb 01 13:00:30 2006 +0000 $ hg log -d ' >02/01' --template '{date|date}\n' $ hg log -d ' <02/01' --template '{date|date}\n' Sun Jan 15 13:30:00 2006 +0500 Sun Jan 15 13:30:00 2006 -0800 Sat Jul 15 13:30:00 2006 +0500 Sat Jul 15 13:30:00 2006 -0700 Sun Jun 11 00:26:40 2006 -0400 Sat Apr 15 13:30:00 2006 +0200 Sat Apr 15 13:30:00 2006 +0000 Wed Feb 01 13:00:30 2006 -0500 Wed Feb 01 13:00:30 2006 +0000 $ hg log -d '> 02/01' --template '{date|date}\n' $ hg log -d '< 02/01' --template '{date|date}\n' Sun Jan 15 13:30:00 2006 +0500 Sun Jan 15 13:30:00 2006 -0800 Sat Jul 15 13:30:00 2006 +0500 Sat Jul 15 13:30:00 2006 -0700 Sun Jun 11 00:26:40 2006 -0400 Sat Apr 15 13:30:00 2006 +0200 Sat Apr 15 13:30:00 2006 +0000 Wed Feb 01 13:00:30 2006 -0500 Wed Feb 01 13:00:30 2006 +0000 $ hg log -d ' > 02/01' --template '{date|date}\n' $ hg log -d ' < 02/01' --template '{date|date}\n' Sun Jan 15 13:30:00 2006 +0500 Sun Jan 15 13:30:00 2006 -0800 Sat Jul 15 13:30:00 2006 +0500 Sat Jul 15 13:30:00 2006 -0700 Sun Jun 11 00:26:40 2006 -0400 Sat Apr 15 13:30:00 2006 +0200 Sat Apr 15 13:30:00 2006 +0000 Wed Feb 01 13:00:30 2006 -0500 Wed Feb 01 13:00:30 2006 +0000 $ hg log -d '>02/01 ' --template '{date|date}\n' $ hg log -d '<02/01 ' --template '{date|date}\n' Sun Jan 15 13:30:00 2006 +0500 Sun Jan 15 13:30:00 2006 -0800 Sat Jul 15 13:30:00 2006 +0500 Sat Jul 15 13:30:00 2006 -0700 Sun Jun 11 00:26:40 2006 -0400 Sat Apr 15 13:30:00 2006 +0200 Sat Apr 15 13:30:00 2006 +0000 Wed Feb 01 13:00:30 2006 -0500 Wed Feb 01 13:00:30 2006 +0000 $ hg log -d ' >02/01 ' --template '{date|date}\n' $ hg log -d ' <02/01 ' --template '{date|date}\n' Sun Jan 15 13:30:00 2006 +0500 Sun Jan 15 13:30:00 2006 -0800 Sat Jul 15 13:30:00 2006 +0500 Sat Jul 15 13:30:00 2006 -0700 Sun Jun 11 00:26:40 2006 -0400 Sat Apr 15 13:30:00 2006 +0200 Sat Apr 15 13:30:00 2006 +0000 Wed Feb 01 13:00:30 2006 -0500 Wed Feb 01 13:00:30 2006 +0000 $ hg log -d '> 02/01 ' --template '{date|date}\n' $ hg log -d '< 02/01 ' --template '{date|date}\n' Sun Jan 15 13:30:00 2006 +0500 Sun Jan 15 13:30:00 2006 -0800 Sat Jul 15 13:30:00 2006 +0500 Sat Jul 15 13:30:00 2006 -0700 Sun Jun 11 00:26:40 2006 -0400 Sat Apr 15 13:30:00 2006 +0200 Sat Apr 15 13:30:00 2006 +0000 Wed Feb 01 13:00:30 2006 -0500 Wed Feb 01 13:00:30 2006 +0000 $ hg log -d ' > 02/01 ' --template '{date|date}\n' $ hg log -d ' < 02/01 ' --template '{date|date}\n' Sun Jan 15 13:30:00 2006 +0500 Sun Jan 15 13:30:00 2006 -0800 Sat Jul 15 13:30:00 2006 +0500 Sat Jul 15 13:30:00 2006 -0700 Sun Jun 11 00:26:40 2006 -0400 Sat Apr 15 13:30:00 2006 +0200 Sat Apr 15 13:30:00 2006 +0000 Wed Feb 01 13:00:30 2006 -0500 Wed Feb 01 13:00:30 2006 +0000 Test issue 3764 (interpreting 'today' and 'yesterday') $ echo "hello" >> a >>> import datetime >>> today = datetime.date.today().strftime("%b %d") >>> yesterday = (datetime.date.today() - datetime.timedelta(days=1)).strftime("%b %d") >>> dates = open('dates', 'w') >>> dates.write(today + '\n') >>> dates.write(yesterday + '\n') >>> dates.close() $ hg ci -d "`sed -n '1p' dates`" -m "today is a good day to code" $ hg log -d today --template '{desc}\n' today is a good day to code $ echo "goodbye" >> a $ hg ci -d "`sed -n '2p' dates`" -m "the time traveler's code" $ hg log -d yesterday --template '{desc}\n' the time traveler's code $ echo "foo" >> a $ hg commit -d now -m 'Explicitly committed now.' $ hg log -d today --template '{desc}\n' Explicitly committed now. today is a good day to code mercurial-3.7.3/tests/test-mq-qimport.t0000644000175000017500000001603112676531525017542 0ustar mpmmpm00000000000000#require killdaemons $ cat > writelines.py < import sys > path = sys.argv[1] > args = sys.argv[2:] > assert (len(args) % 2) == 0 > > f = file(path, 'wb') > for i in xrange(len(args)/2): > count, s = args[2*i:2*i+2] > count = int(count) > s = s.decode('string_escape') > f.write(s*count) > f.close() > > EOF > cat <> $HGRCPATH > [extensions] > mq = > [diff] > git = 1 > EOF $ hg init repo $ cd repo qimport without file or revision $ hg qimport abort: no files or revisions specified [255] qimport non-existing-file $ hg qimport non-existing-file abort: unable to read file non-existing-file [255] qimport null revision $ hg qimport -r null abort: revision -1 is not mutable (see "hg help phases" for details) [255] $ hg qseries import email $ hg qimport --push -n email - < From: Username in email > Subject: [PATCH] Message in email > Date: Fri, 02 Jan 1970 00:00:00 +0000 > > Text before patch. > > # HG changeset patch > # User Username in patch > # Date 0 0 > # Node ID 1a706973a7d84cb549823634a821d9bdf21c6220 > # Parent 0000000000000000000000000000000000000000 > First line of commit message. > > More text in commit message. > --- confuse the diff detection > > diff --git a/x b/x > new file mode 100644 > --- /dev/null > +++ b/x > @@ -0,0 +1,1 @@ > +new file > Text after patch. > > EOF adding email to series file applying email now at: email hg tip -v $ hg tip -v changeset: 0:1a706973a7d8 tag: email tag: qbase tag: qtip tag: tip user: Username in patch date: Thu Jan 01 00:00:00 1970 +0000 files: x description: First line of commit message. More text in commit message. $ hg qpop popping email patch queue now empty $ hg qdelete email import URL $ echo foo >> foo $ hg add foo $ hg diff > url.diff $ hg revert --no-backup foo $ rm foo Under unix: file:///foobar/blah Under windows: file:///c:/foobar/blah $ patchurl=`pwd | tr '\\\\' /`/url.diff $ expr "$patchurl" : "\/" > /dev/null || patchurl="/$patchurl" $ hg qimport file://"$patchurl" adding url.diff to series file $ rm url.diff $ hg qun url.diff import patch that already exists $ echo foo2 >> foo $ hg add foo $ hg diff > ../url.diff $ hg revert --no-backup foo $ rm foo $ hg qimport ../url.diff abort: patch "url.diff" already exists [255] $ hg qpush applying url.diff now at: url.diff $ cat foo foo $ hg qpop popping url.diff patch queue now empty qimport -f $ hg qimport -f ../url.diff adding url.diff to series file $ hg qpush applying url.diff now at: url.diff $ cat foo foo2 $ hg qpop popping url.diff patch queue now empty build diff with CRLF $ python ../writelines.py b 5 'a\n' 5 'a\r\n' $ hg ci -Am addb adding b $ python ../writelines.py b 2 'a\n' 10 'b\n' 2 'a\r\n' $ hg diff > b.diff $ hg up -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved qimport CRLF diff $ hg qimport b.diff adding b.diff to series file $ hg qpush applying b.diff now at: b.diff try to import --push $ cat > appendfoo.diff < append foo > > diff -r 07f494440405 -r 261500830e46 baz > --- /dev/null Thu Jan 01 00:00:00 1970 +0000 > +++ b/baz Thu Jan 01 00:00:00 1970 +0000 > @@ -0,0 +1,1 @@ > +foo > EOF $ cat > appendbar.diff < append bar > > diff -r 07f494440405 -r 261500830e46 baz > --- a/baz Thu Jan 01 00:00:00 1970 +0000 > +++ b/baz Thu Jan 01 00:00:00 1970 +0000 > @@ -1,1 +1,2 @@ > foo > +bar > EOF $ hg qimport --push appendfoo.diff appendbar.diff adding appendfoo.diff to series file adding appendbar.diff to series file applying appendfoo.diff applying appendbar.diff now at: appendbar.diff $ hg qfin -a patch b.diff finalized without changeset message $ touch .hg/patches/append_foo $ hg qimport -r 'p1(.)::' $ hg qapplied append_foo__1 append_bar $ hg qfin -a $ rm .hg/patches/append_foo $ hg qimport -r 'p1(.)::' -P $ hg qpop -a popping append_bar popping append_foo patch queue now empty $ hg qdel append_foo $ hg qdel -k append_bar qimport -e $ hg qimport -e append_bar adding append_bar to series file $ hg qdel -k append_bar qimport -e --name newname oldexisitingpatch $ hg qimport -e --name this-name-is-better append_bar renaming append_bar to this-name-is-better adding this-name-is-better to series file $ hg qser this-name-is-better url.diff qimport -e --name without --force $ cp .hg/patches/this-name-is-better .hg/patches/3.diff $ hg qimport -e --name this-name-is-better 3.diff abort: patch "this-name-is-better" already exists [255] $ hg qser this-name-is-better url.diff qimport -e --name with --force $ hg qimport --force -e --name this-name-is-better 3.diff renaming 3.diff to this-name-is-better adding this-name-is-better to series file $ hg qser this-name-is-better url.diff qimport with bad name, should abort before reading file $ hg qimport non-existent-file --name .hg abort: patch name cannot begin with ".hg" [255] qimport http:// patch with leading slashes in url set up hgweb $ cd .. $ hg init served $ cd served $ echo a > a $ hg ci -Am patch adding a $ hg serve -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ cd ../repo $ hg qimport http://localhost:$HGPORT/raw-rev/0/// adding 0 to series file check qimport phase: $ hg -q qpush now at: 0 $ hg phase qparent 1: draft $ hg qimport -r qparent $ hg phase qbase 1: draft $ hg qfinish qbase $ echo '[mq]' >> $HGRCPATH $ echo 'secret=true' >> $HGRCPATH $ hg qimport -r qparent $ hg phase qbase 1: secret $ cd .. $ killdaemons.py check patch name generation for non-alpha-numeric summary line $ cd repo $ hg qpop -a -q patch queue now empty $ hg qseries -v 0 U imported_patch_b_diff 1 U 0 2 U this-name-is-better 3 U url.diff $ echo bb >> b $ hg commit -m '==++--==' $ hg qimport -r tip $ hg qseries -v 0 A 1.diff 1 U imported_patch_b_diff 2 U 0 3 U this-name-is-better 4 U url.diff check reserved patch names $ hg qpop -qa patch queue now empty $ echo >> b $ hg commit -m 'status' $ echo >> b $ hg commit -m '.' $ echo >> b $ hg commit -m 'taken' $ mkdir .hg/patches/taken $ touch .hg/patches/taken__1 $ hg qimport -r -3:: $ hg qap 1.diff__1 2.diff taken__2 check very long patch name $ hg qpop -qa patch queue now empty $ echo >> b $ hg commit -m 'abcdefghi pqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghi pqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghi pqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghi pqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghi pqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghi pqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' $ hg qimport -r . $ hg qap abcdefghi_pqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghi_pqrstuvwxyzabcdefg mercurial-3.7.3/tests/test-propertycache.py.out0000644000175000017500000000424512676531525021303 0ustar mpmmpm00000000000000 === property cache === calllog: [] cached value (unfiltered): NOCACHE = first access on unfiltered, should do a call access: 0 calllog: [0] cached value (unfiltered): 0 = second access on unfiltered, should not do call access 0 calllog: [0] cached value (unfiltered): 0 = first access on "visible" view, should do a call cached value ("visible" view): NOCACHE access: 7 calllog: [0, 7] cached value (unfiltered): 0 cached value ("visible" view): 7 = second access on "visible view", should not do call access: 7 calllog: [0, 7] cached value (unfiltered): 0 cached value ("visible" view): 7 = no effect on other view cached value ("immutable" view): NOCACHE access: 9 calllog: [0, 7, 9] cached value (unfiltered): 0 cached value ("visible" view): 7 cached value ("immutable" view): 9 === unfiltered property cache === unficalllog: [] cached value (unfiltered): NOCACHE cached value ("visible" view): NOCACHE cached value ("immutable" view): NOCACHE = first access on unfiltered, should do a call access (unfiltered): 100 unficalllog: [100] cached value (unfiltered): 100 = second access on unfiltered, should not do call access (unfiltered): 100 unficalllog: [100] cached value (unfiltered): 100 = access on view should use the unfiltered cache access (unfiltered): 100 access ("visible" view): 100 access ("immutable" view): 100 unficalllog: [100] cached value (unfiltered): 100 cached value ("visible" view): NOCACHE cached value ("immutable" view): NOCACHE = even if we clear the unfiltered cache cached value (unfiltered): NOCACHE cached value ("visible" view): NOCACHE cached value ("immutable" view): NOCACHE unficalllog: [100] access ("visible" view): 100 unficalllog: [100, 100] cached value (unfiltered): 100 cached value ("visible" view): NOCACHE cached value ("immutable" view): NOCACHE access ("immutable" view): 100 unficalllog: [100, 100] cached value (unfiltered): 100 cached value ("visible" view): NOCACHE cached value ("immutable" view): NOCACHE access (unfiltered): 100 unficalllog: [100, 100] cached value (unfiltered): 100 cached value ("visible" view): NOCACHE cached value ("immutable" view): NOCACHE mercurial-3.7.3/tests/test-import-merge.t0000644000175000017500000001044312676531525020044 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ tipparents() { > hg parents --template "{rev}:{node|short} {desc|firstline}\n" -r tip > } Test import and merge diffs $ hg init repo $ cd repo $ echo a > a $ hg ci -Am adda adding a $ echo a >> a $ hg ci -m changea $ echo c > c $ hg ci -Am addc adding c $ hg up 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo b > b $ hg ci -Am addb adding b created new head $ hg up 1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m merge $ hg export . > ../merge.diff $ grep -v '^merge$' ../merge.diff > ../merge.nomsg.diff $ cd .. $ hg clone -r2 repo repo2 adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo2 $ hg pull -r3 ../repo pulling from ../repo searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) Test without --exact and diff.p1 == workingdir.p1 $ hg up 1 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ cat > $TESTTMP/editor.sh < env | grep HGEDITFORM > echo merge > \$1 > EOF $ HGEDITOR="sh $TESTTMP/editor.sh" hg import --edit ../merge.nomsg.diff applying ../merge.nomsg.diff HGEDITFORM=import.normal.merge $ tipparents 1:540395c44225 changea 3:102a90ea7b4a addb $ hg strip --no-backup tip 0 files updated, 0 files merged, 1 files removed, 0 files unresolved Test without --exact and diff.p1 != workingdir.p1 $ hg up 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg import ../merge.diff applying ../merge.diff warning: import the patch as a normal revision (use --exact to import the patch as a merge) $ tipparents 2:890ecaa90481 addc $ hg strip --no-backup tip 0 files updated, 0 files merged, 1 files removed, 0 files unresolved Test with --exact $ hg import --exact ../merge.diff applying ../merge.diff 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ tipparents 1:540395c44225 changea 3:102a90ea7b4a addb $ hg strip --no-backup tip 0 files updated, 0 files merged, 1 files removed, 0 files unresolved Test with --bypass and diff.p1 == workingdir.p1 $ hg up 1 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg import --bypass ../merge.diff applying ../merge.diff $ tipparents 1:540395c44225 changea 3:102a90ea7b4a addb $ hg strip --no-backup tip Test with --bypass and diff.p1 != workingdir.p1 $ hg up 2 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg import --bypass ../merge.diff applying ../merge.diff warning: import the patch as a normal revision (use --exact to import the patch as a merge) $ tipparents 2:890ecaa90481 addc $ hg strip --no-backup tip Test with --bypass and --exact $ hg import --bypass --exact ../merge.diff applying ../merge.diff $ tipparents 1:540395c44225 changea 3:102a90ea7b4a addb $ hg strip --no-backup tip $ cd .. Test that --exact on a bad header doesn't corrupt the repo (issue3616) $ hg init repo3 $ cd repo3 $ echo a>a $ hg ci -Aqm0 $ echo a>>a $ hg ci -m1 $ echo a>>a $ hg ci -m2 $ echo a>a $ echo b>>a $ echo a>>a $ hg ci -m3 $ hg export 2 | head -7 > ../a.patch $ hg export tip > out >>> apatch = open("../a.patch", "ab") >>> apatch.write("".join(open("out").readlines()[7:])) $ cd .. $ hg clone -qr0 repo3 repo3-clone $ cd repo3-clone $ hg pull -qr1 ../repo3 $ hg import --exact ../a.patch applying ../a.patch 1 files updated, 0 files merged, 0 files removed, 0 files unresolved patching file a Hunk #1 succeeded at 1 with fuzz 1 (offset -1 lines). transaction abort! rollback completed abort: patch is damaged or loses information [255] $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 2 changesets, 2 total revisions mercurial-3.7.3/tests/test-newcgi.t0000644000175000017500000000323612676531525016713 0ustar mpmmpm00000000000000#require no-msys # MSYS will translate web paths as if they were file paths This tests if CGI files from after d0db3462d568 but before d74fc8dec2b4 still work. $ hg init test $ cat >hgweb.cgi < #!/usr/bin/env python > # > # An example CGI script to use hgweb, edit as necessary > > import cgitb > cgitb.enable() > > from mercurial import demandimport; demandimport.enable() > from mercurial.hgweb import hgweb > from mercurial.hgweb import wsgicgi > from mercurial.hgweb.request import wsgiapplication > > def make_web_app(): > return hgweb("test", "Empty test repository") > > wsgicgi.launch(wsgiapplication(make_web_app)) > HGWEB $ chmod 755 hgweb.cgi $ cat >hgweb.config < [paths] > test = test > HGWEBDIRCONF $ cat >hgwebdir.cgi < #!/usr/bin/env python > # > # An example CGI script to export multiple hgweb repos, edit as necessary > > import cgitb > cgitb.enable() > > from mercurial import demandimport; demandimport.enable() > from mercurial.hgweb import hgwebdir > from mercurial.hgweb import wsgicgi > from mercurial.hgweb.request import wsgiapplication > > def make_web_app(): > return hgwebdir("hgweb.config") > > wsgicgi.launch(wsgiapplication(make_web_app)) > HGWEBDIR $ chmod 755 hgwebdir.cgi $ . "$TESTDIR/cgienv" $ python hgweb.cgi > page1 $ python hgwebdir.cgi > page2 $ PATH_INFO="/test/" $ PATH_TRANSLATED="/var/something/test.cgi" $ REQUEST_URI="/test/test/" $ SCRIPT_URI="http://hg.omnifarious.org/test/test/" $ SCRIPT_URL="/test/test/" $ python hgwebdir.cgi > page3 $ grep -i error page1 page2 page3 [1] mercurial-3.7.3/tests/test-obsolete.t0000644000175000017500000010637712676531525017265 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH << EOF > [phases] > # public changeset are not obsolete > publish=false > [ui] > logtemplate="{rev}:{node|short} ({phase}) [{tags} {bookmarks}] {desc|firstline}\n" > [experimental] > # drop me once bundle2 is the default, > # added to get test change early. > bundle2-exp = True > EOF $ mkcommit() { > echo "$1" > "$1" > hg add "$1" > hg ci -m "add $1" > } $ getid() { > hg log -T "{node}\n" --hidden -r "desc('$1')" > } $ cat > debugkeys.py < def reposetup(ui, repo): > class debugkeysrepo(repo.__class__): > def listkeys(self, namespace): > ui.write('listkeys %s\n' % (namespace,)) > return super(debugkeysrepo, self).listkeys(namespace) > > if repo.local(): > repo.__class__ = debugkeysrepo > EOF $ hg init tmpa $ cd tmpa $ mkcommit kill_me Checking that the feature is properly disabled $ hg debugobsolete -d '0 0' `getid kill_me` -u babar abort: creating obsolete markers is not enabled on this repo [255] Enabling it $ cat >> $HGRCPATH << EOF > [experimental] > evolution=createmarkers,exchange > EOF Killing a single changeset without replacement $ hg debugobsolete 0 abort: changeset references must be full hexadecimal node identifiers [255] $ hg debugobsolete '00' abort: changeset references must be full hexadecimal node identifiers [255] $ hg debugobsolete -d '0 0' `getid kill_me` -u babar $ hg debugobsolete 97b7c2d76b1845ed3eb988cd612611e72406cef0 0 (Thu Jan 01 00:00:00 1970 +0000) {'user': 'babar'} (test that mercurial is not confused) $ hg up null --quiet # having 0 as parent prevents it to be hidden $ hg tip -1:000000000000 (public) [tip ] $ hg up --hidden tip --quiet Killing a single changeset with itself should fail (simple local safeguard) $ hg debugobsolete `getid kill_me` `getid kill_me` abort: bad obsmarker input: in-marker cycle with 97b7c2d76b1845ed3eb988cd612611e72406cef0 [255] $ cd .. Killing a single changeset with replacement (and testing the format option) $ hg init tmpb $ cd tmpb $ mkcommit a $ mkcommit b $ mkcommit original_c $ hg up "desc('b')" 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ mkcommit new_c created new head $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden $ hg debugobsolete --config format.obsstore-version=0 --flag 12 `getid original_c` `getid new_c` -d '121 120' $ hg log -r 'hidden()' --template '{rev}:{node|short} {desc}\n' --hidden 2:245bde4270cd add original_c $ hg debugrevlog -cd # rev p1rev p2rev start end deltastart base p1 p2 rawsize totalsize compression heads chainlen 0 -1 -1 0 59 0 0 0 0 58 58 0 1 0 1 0 -1 59 118 59 59 0 0 58 116 0 1 0 2 1 -1 118 193 118 118 59 0 76 192 0 1 0 3 1 -1 193 260 193 193 59 0 66 258 0 2 0 $ hg debugobsolete 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} (check for version number of the obsstore) $ dd bs=1 count=1 if=.hg/store/obsstore 2>/dev/null \x00 (no-eol) (esc) do it again (it read the obsstore before adding new changeset) $ hg up '.^' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ mkcommit new_2_c created new head $ hg debugobsolete -d '1337 0' `getid new_c` `getid new_2_c` $ hg debugobsolete 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} Register two markers with a missing node $ hg up '.^' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ mkcommit new_3_c created new head $ hg debugobsolete -d '1338 0' `getid new_2_c` 1337133713371337133713371337133713371337 $ hg debugobsolete -d '1339 0' 1337133713371337133713371337133713371337 `getid new_3_c` $ hg debugobsolete 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} Refuse pathological nullid successors $ hg debugobsolete -d '9001 0' 1337133713371337133713371337133713371337 0000000000000000000000000000000000000000 transaction abort! rollback completed abort: bad obsolescence marker detected: invalid successors nullid [255] Check that graphlog detect that a changeset is obsolete: $ hg log -G @ 5:5601fb93a350 (draft) [tip ] add new_3_c | o 1:7c3bad9141dc (draft) [ ] add b | o 0:1f0dee641bb7 (draft) [ ] add a check that heads does not report them $ hg heads 5:5601fb93a350 (draft) [tip ] add new_3_c $ hg heads --hidden 5:5601fb93a350 (draft) [tip ] add new_3_c 4:ca819180edb9 (draft) [ ] add new_2_c 3:cdbce2fbb163 (draft) [ ] add new_c 2:245bde4270cd (draft) [ ] add original_c check that summary does not report them $ hg init ../sink $ echo '[paths]' >> .hg/hgrc $ echo 'default=../sink' >> .hg/hgrc $ hg summary --remote parent: 5:5601fb93a350 tip add new_3_c branch: default commit: (clean) update: (current) phases: 3 draft remote: 3 outgoing $ hg summary --remote --hidden parent: 5:5601fb93a350 tip add new_3_c branch: default commit: (clean) update: 3 new changesets, 4 branch heads (merge) phases: 6 draft remote: 3 outgoing check that various commands work well with filtering $ hg tip 5:5601fb93a350 (draft) [tip ] add new_3_c $ hg log -r 6 abort: unknown revision '6'! [255] $ hg log -r 4 abort: hidden revision '4'! (use --hidden to access hidden revisions) [255] $ hg debugrevspec 'rev(6)' $ hg debugrevspec 'rev(4)' $ hg debugrevspec 'null' -1 Check that public changeset are not accounted as obsolete: $ hg --hidden phase --public 2 $ hg log -G @ 5:5601fb93a350 (draft) [tip ] add new_3_c | | o 2:245bde4270cd (public) [ ] add original_c |/ o 1:7c3bad9141dc (public) [ ] add b | o 0:1f0dee641bb7 (public) [ ] add a And that bumped changeset are detected -------------------------------------- If we didn't filtered obsolete changesets out, 3 and 4 would show up too. Also note that the bumped changeset (5:5601fb93a350) is not a direct successor of the public changeset $ hg log --hidden -r 'bumped()' 5:5601fb93a350 (draft) [tip ] add new_3_c And that we can't push bumped changeset $ hg push ../tmpa -r 0 --force #(make repo related) pushing to ../tmpa searching for changes warning: repository is unrelated adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) $ hg push ../tmpa pushing to ../tmpa searching for changes abort: push includes bumped changeset: 5601fb93a350! [255] Fixing "bumped" situation We need to create a clone of 5 and add a special marker with a flag $ hg summary parent: 5:5601fb93a350 tip add new_3_c branch: default commit: (clean) update: 1 new changesets, 2 branch heads (merge) phases: 1 draft bumped: 1 changesets $ hg up '5^' 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg revert -ar 5 adding new_3_c $ hg ci -m 'add n3w_3_c' created new head $ hg debugobsolete -d '1338 0' --flags 1 `getid new_3_c` `getid n3w_3_c` $ hg log -r 'bumped()' $ hg log -G @ 6:6f9641995072 (draft) [tip ] add n3w_3_c | | o 2:245bde4270cd (public) [ ] add original_c |/ o 1:7c3bad9141dc (public) [ ] add b | o 0:1f0dee641bb7 (public) [ ] add a $ cd .. Revision 0 is hidden -------------------- $ hg init rev0hidden $ cd rev0hidden $ mkcommit kill0 $ hg up -q null $ hg debugobsolete `getid kill0` $ mkcommit a $ mkcommit b Should pick the first visible revision as "repo" node $ hg archive ../archive-null $ cat ../archive-null/.hg_archival.txt repo: 1f0dee641bb7258c56bd60e93edfa2405381c41e node: 7c3bad9141dcb46ff89abf5f61856facd56e476c branch: default latesttag: null latesttagdistance: 2 changessincelatesttag: 2 $ cd .. Exchange Test ============================ Destination repo does not have any data --------------------------------------- Simple incoming test $ hg init tmpc $ cd tmpc $ hg incoming ../tmpb comparing with ../tmpb 0:1f0dee641bb7 (public) [ ] add a 1:7c3bad9141dc (public) [ ] add b 2:245bde4270cd (public) [ ] add original_c 6:6f9641995072 (draft) [tip ] add n3w_3_c Try to pull markers (extinct changeset are excluded but marker are pushed) $ hg pull ../tmpb pulling from ../tmpb requesting all changes adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 4 files (+1 heads) 5 new obsolescence markers (run 'hg heads' to see heads, 'hg merge' to merge) $ hg debugobsolete 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} Rollback//Transaction support $ hg debugobsolete -d '1340 0' aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb $ hg debugobsolete 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb 0 (Thu Jan 01 00:22:20 1970 +0000) {'user': 'test'} $ hg rollback -n repository tip rolled back to revision 3 (undo debugobsolete) $ hg rollback repository tip rolled back to revision 3 (undo debugobsolete) $ hg debugobsolete 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} $ cd .. Try to push markers $ hg init tmpd $ hg -R tmpb push tmpd pushing to tmpd searching for changes adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 4 files (+1 heads) 5 new obsolescence markers $ hg -R tmpd debugobsolete | sort 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} Check obsolete keys are exchanged only if source has an obsolete store $ hg init empty $ hg --config extensions.debugkeys=debugkeys.py -R empty push tmpd pushing to tmpd listkeys phases listkeys bookmarks no changes found listkeys phases [1] clone support (markers are copied and extinct changesets are included to allow hardlinks) $ hg clone tmpb clone-dest updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R clone-dest log -G --hidden @ 6:6f9641995072 (draft) [tip ] add n3w_3_c | | x 5:5601fb93a350 (draft) [ ] add new_3_c |/ | x 4:ca819180edb9 (draft) [ ] add new_2_c |/ | x 3:cdbce2fbb163 (draft) [ ] add new_c |/ | o 2:245bde4270cd (public) [ ] add original_c |/ o 1:7c3bad9141dc (public) [ ] add b | o 0:1f0dee641bb7 (public) [ ] add a $ hg -R clone-dest debugobsolete 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} Destination repo have existing data --------------------------------------- On pull $ hg init tmpe $ cd tmpe $ hg debugobsolete -d '1339 0' 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 $ hg pull ../tmpb pulling from ../tmpb requesting all changes adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 4 files (+1 heads) 5 new obsolescence markers (run 'hg heads' to see heads, 'hg merge' to merge) $ hg debugobsolete 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} On push $ hg push ../tmpc pushing to ../tmpc searching for changes no changes found 1 new obsolescence markers [1] $ hg -R ../tmpc debugobsolete 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} detect outgoing obsolete and unstable --------------------------------------- $ hg log -G o 3:6f9641995072 (draft) [tip ] add n3w_3_c | | o 2:245bde4270cd (public) [ ] add original_c |/ o 1:7c3bad9141dc (public) [ ] add b | o 0:1f0dee641bb7 (public) [ ] add a $ hg up 'desc("n3w_3_c")' 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ mkcommit original_d $ mkcommit original_e $ hg debugobsolete --record-parents `getid original_d` -d '0 0' $ hg debugobsolete | grep `getid original_d` 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} $ hg log -r 'obsolete()' 4:94b33453f93b (draft) [ ] add original_d $ hg summary parent: 5:cda648ca50f5 tip add original_e branch: default commit: (clean) update: 1 new changesets, 2 branch heads (merge) phases: 3 draft unstable: 1 changesets $ hg log -G -r '::unstable()' @ 5:cda648ca50f5 (draft) [tip ] add original_e | x 4:94b33453f93b (draft) [ ] add original_d | o 3:6f9641995072 (draft) [ ] add n3w_3_c | o 1:7c3bad9141dc (public) [ ] add b | o 0:1f0dee641bb7 (public) [ ] add a refuse to push obsolete changeset $ hg push ../tmpc/ -r 'desc("original_d")' pushing to ../tmpc/ searching for changes abort: push includes obsolete changeset: 94b33453f93b! [255] refuse to push unstable changeset $ hg push ../tmpc/ pushing to ../tmpc/ searching for changes abort: push includes unstable changeset: cda648ca50f5! [255] Test that extinct changeset are properly detected $ hg log -r 'extinct()' Don't try to push extinct changeset $ hg init ../tmpf $ hg out ../tmpf comparing with ../tmpf searching for changes 0:1f0dee641bb7 (public) [ ] add a 1:7c3bad9141dc (public) [ ] add b 2:245bde4270cd (public) [ ] add original_c 3:6f9641995072 (draft) [ ] add n3w_3_c 4:94b33453f93b (draft) [ ] add original_d 5:cda648ca50f5 (draft) [tip ] add original_e $ hg push ../tmpf -f # -f because be push unstable too pushing to ../tmpf searching for changes adding changesets adding manifests adding file changes added 6 changesets with 6 changes to 6 files (+1 heads) 7 new obsolescence markers no warning displayed $ hg push ../tmpf pushing to ../tmpf searching for changes no changes found [1] Do not warn about new head when the new head is a successors of a remote one $ hg log -G @ 5:cda648ca50f5 (draft) [tip ] add original_e | x 4:94b33453f93b (draft) [ ] add original_d | o 3:6f9641995072 (draft) [ ] add n3w_3_c | | o 2:245bde4270cd (public) [ ] add original_c |/ o 1:7c3bad9141dc (public) [ ] add b | o 0:1f0dee641bb7 (public) [ ] add a $ hg up -q 'desc(n3w_3_c)' $ mkcommit obsolete_e created new head $ hg debugobsolete `getid 'original_e'` `getid 'obsolete_e'` $ hg outgoing ../tmpf # parasite hg outgoing testin comparing with ../tmpf searching for changes 6:3de5eca88c00 (draft) [tip ] add obsolete_e $ hg push ../tmpf pushing to ../tmpf searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) 1 new obsolescence markers test relevance computation --------------------------------------- Checking simple case of "marker relevance". Reminder of the repo situation $ hg log --hidden --graph @ 6:3de5eca88c00 (draft) [tip ] add obsolete_e | | x 5:cda648ca50f5 (draft) [ ] add original_e | | | x 4:94b33453f93b (draft) [ ] add original_d |/ o 3:6f9641995072 (draft) [ ] add n3w_3_c | | o 2:245bde4270cd (public) [ ] add original_c |/ o 1:7c3bad9141dc (public) [ ] add b | o 0:1f0dee641bb7 (public) [ ] add a List of all markers $ hg debugobsolete 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob) List of changesets with no chain $ hg debugobsolete --hidden --rev ::2 List of changesets that are included on marker chain $ hg debugobsolete --hidden --rev 6 cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob) List of changesets with a longer chain, (including a pruned children) $ hg debugobsolete --hidden --rev 3 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} List of both $ hg debugobsolete --hidden --rev 3::6 1337133713371337133713371337133713371337 5601fb93a350734d935195fee37f4054c529ff39 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 1339133913391339133913391339133913391339 ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:19 1970 +0000) {'user': 'test'} 245bde4270cd1072a27757984f9cda8ba26f08ca cdbce2fbb16313928851e97e0d85413f3f7eb77f C (Thu Jan 01 00:00:01 1970 -0002) {'user': 'test'} 5601fb93a350734d935195fee37f4054c529ff39 6f96419950729f3671185b847352890f074f7557 1 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} 94b33453f93bdb8d457ef9b770851a618bf413e1 0 {6f96419950729f3671185b847352890f074f7557} (Thu Jan 01 00:00:00 1970 +0000) {'user': 'test'} ca819180edb99ed25ceafb3e9584ac287e240b00 1337133713371337133713371337133713371337 0 (Thu Jan 01 00:22:18 1970 +0000) {'user': 'test'} cda648ca50f50482b7055c0b0c4c117bba6733d9 3de5eca88c00aa039da7399a220f4a5221faa585 0 (*) {'user': 'test'} (glob) cdbce2fbb16313928851e97e0d85413f3f7eb77f ca819180edb99ed25ceafb3e9584ac287e240b00 0 (Thu Jan 01 00:22:17 1970 +0000) {'user': 'test'} #if serve Test the debug output for exchange ---------------------------------- $ hg pull ../tmpb --config 'experimental.obsmarkers-exchange-debug=True' --config 'experimental.bundle2-exp=True' pulling from ../tmpb searching for changes no changes found obsmarker-exchange: 346 bytes received check hgweb does not explode ==================================== $ hg unbundle $TESTDIR/bundles/hgweb+obs.hg adding changesets adding manifests adding file changes added 62 changesets with 63 changes to 9 files (+60 heads) (run 'hg heads .' to see heads, 'hg merge' to merge) $ for node in `hg log -r 'desc(babar_)' --template '{node}\n'`; > do > hg debugobsolete $node > done $ hg up tip 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS check changelog view $ get-with-headers.py --headeronly localhost:$HGPORT 'shortlog/' 200 Script output follows check graph view $ get-with-headers.py --headeronly localhost:$HGPORT 'graph' 200 Script output follows check filelog view $ get-with-headers.py --headeronly localhost:$HGPORT 'log/'`hg log -r . -T "{node}"`/'babar' 200 Script output follows $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/68' 200 Script output follows $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67' 404 Not Found [1] check that web.view config option: $ killdaemons.py hg.pid $ cat >> .hg/hgrc << EOF > [web] > view=all > EOF $ wait $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/67' 200 Script output follows $ killdaemons.py hg.pid Checking _enable=False warning if obsolete marker exists $ echo '[experimental]' >> $HGRCPATH $ echo "evolution=" >> $HGRCPATH $ hg log -r tip obsolete feature not enabled but 68 markers found! 68:c15e9edfca13 (draft) [tip ] add celestine reenable for later test $ echo '[experimental]' >> $HGRCPATH $ echo "evolution=createmarkers,exchange" >> $HGRCPATH #endif Test incoming/outcoming with changesets obsoleted remotely, known locally =============================================================================== This test issue 3805 $ hg init repo-issue3805 $ cd repo-issue3805 $ echo "base" > base $ hg ci -Am "base" adding base $ echo "foo" > foo $ hg ci -Am "A" adding foo $ hg clone . ../other-issue3805 updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "bar" >> foo $ hg ci --amend $ cd ../other-issue3805 $ hg log -G @ 1:29f0c6921ddd (draft) [tip ] A | o 0:d20a80d4def3 (draft) [ ] base $ hg log -G -R ../repo-issue3805 @ 3:323a9c3ddd91 (draft) [tip ] A | o 0:d20a80d4def3 (draft) [ ] base $ hg incoming comparing with $TESTTMP/tmpe/repo-issue3805 (glob) searching for changes 3:323a9c3ddd91 (draft) [tip ] A $ hg incoming --bundle ../issue3805.hg comparing with $TESTTMP/tmpe/repo-issue3805 (glob) searching for changes 3:323a9c3ddd91 (draft) [tip ] A $ hg outgoing comparing with $TESTTMP/tmpe/repo-issue3805 (glob) searching for changes 1:29f0c6921ddd (draft) [tip ] A #if serve $ hg serve -R ../repo-issue3805 -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ hg incoming http://localhost:$HGPORT comparing with http://localhost:$HGPORT/ searching for changes 2:323a9c3ddd91 (draft) [tip ] A $ hg outgoing http://localhost:$HGPORT comparing with http://localhost:$HGPORT/ searching for changes 1:29f0c6921ddd (draft) [tip ] A $ killdaemons.py #endif This test issue 3814 (nothing to push but locally hidden changeset) $ cd .. $ hg init repo-issue3814 $ cd repo-issue3805 $ hg push -r 323a9c3ddd91 ../repo-issue3814 pushing to ../repo-issue3814 searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files 2 new obsolescence markers $ hg out ../repo-issue3814 comparing with ../repo-issue3814 searching for changes no changes found [1] Test that a local tag blocks a changeset from being hidden $ hg tag -l visible -r 1 --hidden $ hg log -G @ 3:323a9c3ddd91 (draft) [tip ] A | | x 1:29f0c6921ddd (draft) [visible ] A |/ o 0:d20a80d4def3 (draft) [ ] base Test that removing a local tag does not cause some commands to fail $ hg tag -l -r tip tiptag $ hg tags tiptag 3:323a9c3ddd91 tip 3:323a9c3ddd91 visible 1:29f0c6921ddd $ hg --config extensions.strip= strip -r tip --no-backup 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg tags visible 1:29f0c6921ddd tip 1:29f0c6921ddd Test bundle overlay onto hidden revision $ cd .. $ hg init repo-bundleoverlay $ cd repo-bundleoverlay $ echo "A" > foo $ hg ci -Am "A" adding foo $ echo "B" >> foo $ hg ci -m "B" $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo "C" >> foo $ hg ci -m "C" created new head $ hg log -G @ 2:c186d7714947 (draft) [tip ] C | | o 1:44526ebb0f98 (draft) [ ] B |/ o 0:4b34ecfb0d56 (draft) [ ] A $ hg clone -r1 . ../other-bundleoverlay adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../other-bundleoverlay $ echo "B+" >> foo $ hg ci --amend -m "B+" $ hg log -G --hidden @ 3:b7d587542d40 (draft) [tip ] B+ | | x 2:eb95e9297e18 (draft) [ ] temporary amend commit for 44526ebb0f98 | | | x 1:44526ebb0f98 (draft) [ ] B |/ o 0:4b34ecfb0d56 (draft) [ ] A $ hg incoming ../repo-bundleoverlay --bundle ../bundleoverlay.hg comparing with ../repo-bundleoverlay searching for changes 1:44526ebb0f98 (draft) [ ] B 2:c186d7714947 (draft) [tip ] C $ hg log -G -R ../bundleoverlay.hg o 4:c186d7714947 (draft) [tip ] C | | @ 3:b7d587542d40 (draft) [ ] B+ |/ o 0:4b34ecfb0d56 (draft) [ ] A #if serve Test issue 4506 $ cd .. $ hg init repo-issue4506 $ cd repo-issue4506 $ echo "0" > foo $ hg add foo $ hg ci -m "content-0" $ hg up null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo "1" > bar $ hg add bar $ hg ci -m "content-1" created new head $ hg up 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg graft 1 grafting 1:1c9eddb02162 "content-1" (tip) $ hg debugobsolete `hg log -r1 -T'{node}'` `hg log -r2 -T'{node}'` $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py --headeronly localhost:$HGPORT 'rev/1' 404 Not Found [1] $ get-with-headers.py --headeronly localhost:$HGPORT 'file/tip/bar' 200 Script output follows $ get-with-headers.py --headeronly localhost:$HGPORT 'annotate/tip/bar' 200 Script output follows $ killdaemons.py #endif Test heads computation on pending index changes with obsolescence markers $ cd .. $ cat >$TESTTMP/test_extension.py << EOF > from mercurial import cmdutil > from mercurial.i18n import _ > > cmdtable = {} > command = cmdutil.command(cmdtable) > @command("amendtransient",[], _('hg amendtransient [rev]')) > def amend(ui, repo, *pats, **opts): > def commitfunc(ui, repo, message, match, opts): > return repo.commit(message, repo['.'].user(), repo['.'].date(), match) > opts['message'] = 'Test' > opts['logfile'] = None > cmdutil.amend(ui, repo, commitfunc, repo['.'], {}, pats, opts) > print repo.changelog.headrevs() > EOF $ cat >> $HGRCPATH << EOF > [extensions] > testextension=$TESTTMP/test_extension.py > EOF $ hg init repo-issue-nativerevs-pending-changes $ cd repo-issue-nativerevs-pending-changes $ mkcommit a $ mkcommit b $ hg up ".^" 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo aa > a $ hg amendtransient [1, 3] Check that corrupted hidden cache does not crash $ printf "" > .hg/cache/hidden $ hg log -r . -T '{node}' --debug corrupted hidden cache 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol) $ hg log -r . -T '{node}' --debug 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol) Check that wrong hidden cache permission does not crash $ chmod 000 .hg/cache/hidden $ hg log -r . -T '{node}' --debug cannot read hidden cache error writing hidden changesets cache 8fd96dfc63e51ed5a8af1bec18eb4b19dbf83812 (no-eol) Test cache consistency for the visible filter 1) We want to make sure that the cached filtered revs are invalidated when bookmarks change $ cd .. $ cat >$TESTTMP/test_extension.py << EOF > from mercurial import cmdutil, extensions, bookmarks, repoview > def _bookmarkchanged(orig, bkmstoreinst, *args, **kwargs): > repo = bkmstoreinst._repo > ret = orig(bkmstoreinst, *args, **kwargs) > hidden1 = repoview.computehidden(repo) > hidden = repoview.filterrevs(repo, 'visible') > if sorted(hidden1) != sorted(hidden): > print "cache inconsistency" > return ret > def extsetup(ui): > extensions.wrapfunction(bookmarks.bmstore, 'write', _bookmarkchanged) > EOF $ hg init repo-cache-inconsistency $ cd repo-issue-nativerevs-pending-changes $ mkcommit a a already tracked! $ mkcommit b $ hg id 13bedc178fce tip $ echo "hello" > b $ hg commit --amend -m "message" $ hg book bookb -r 13bedc178fce --hidden $ hg log -r 13bedc178fce 5:13bedc178fce (draft) [ bookb] add b $ hg book -d bookb $ hg log -r 13bedc178fce abort: hidden revision '13bedc178fce'! (use --hidden to access hidden revisions) [255] Test ability to pull changeset with locally applying obsolescence markers (issue4945) $ cd .. $ hg init issue4845 $ cd issue4845 $ echo foo > f0 $ hg add f0 $ hg ci -m '0' $ echo foo > f1 $ hg add f1 $ hg ci -m '1' $ echo foo > f2 $ hg add f2 $ hg ci -m '2' $ echo bar > f2 $ hg commit --amend --config experimetnal.evolution=createmarkers $ hg log -G @ 4:b0551702f918 (draft) [tip ] 2 | o 1:e016b03fd86f (draft) [ ] 1 | o 0:a78f55e5508c (draft) [ ] 0 $ hg log -G --hidden @ 4:b0551702f918 (draft) [tip ] 2 | | x 3:f27abbcc1f77 (draft) [ ] temporary amend commit for e008cf283490 | | | x 2:e008cf283490 (draft) [ ] 2 |/ o 1:e016b03fd86f (draft) [ ] 1 | o 0:a78f55e5508c (draft) [ ] 0 $ hg strip -r 1 --config extensions.strip= 0 files updated, 0 files merged, 2 files removed, 0 files unresolved saved backup bundle to $TESTTMP/tmpe/issue4845/.hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg (glob) $ hg log -G @ 0:a78f55e5508c (draft) [tip ] 0 $ hg log -G --hidden @ 0:a78f55e5508c (draft) [tip ] 0 $ hg pull .hg/strip-backup/* pulling from .hg/strip-backup/e016b03fd86f-c41c6bcc-backup.hg searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (run 'hg update' to get a working copy) $ hg log -G o 2:b0551702f918 (draft) [tip ] 2 | o 1:e016b03fd86f (draft) [ ] 1 | @ 0:a78f55e5508c (draft) [ ] 0 $ hg log -G --hidden o 2:b0551702f918 (draft) [tip ] 2 | o 1:e016b03fd86f (draft) [ ] 1 | @ 0:a78f55e5508c (draft) [ ] 0 mercurial-3.7.3/tests/test-url.py0000644000175000017500000001667312676531525016437 0ustar mpmmpm00000000000000import os def check(a, b): if a != b: print (a, b) def cert(cn): return {'subject': ((('commonName', cn),),)} from mercurial.sslutil import _verifycert # Test non-wildcard certificates check(_verifycert(cert('example.com'), 'example.com'), None) check(_verifycert(cert('example.com'), 'www.example.com'), 'certificate is for example.com') check(_verifycert(cert('www.example.com'), 'example.com'), 'certificate is for www.example.com') # Test wildcard certificates check(_verifycert(cert('*.example.com'), 'www.example.com'), None) check(_verifycert(cert('*.example.com'), 'example.com'), 'certificate is for *.example.com') check(_verifycert(cert('*.example.com'), 'w.w.example.com'), 'certificate is for *.example.com') # Test subjectAltName san_cert = {'subject': ((('commonName', 'example.com'),),), 'subjectAltName': (('DNS', '*.example.net'), ('DNS', 'example.net'))} check(_verifycert(san_cert, 'example.net'), None) check(_verifycert(san_cert, 'foo.example.net'), None) # no fallback to subject commonName when subjectAltName has DNS check(_verifycert(san_cert, 'example.com'), 'certificate is for *.example.net, example.net') # fallback to subject commonName when no DNS in subjectAltName san_cert = {'subject': ((('commonName', 'example.com'),),), 'subjectAltName': (('IP Address', '8.8.8.8'),)} check(_verifycert(san_cert, 'example.com'), None) # Avoid some pitfalls check(_verifycert(cert('*.foo'), 'foo'), 'certificate is for *.foo') check(_verifycert(cert('*o'), 'foo'), 'certificate is for *o') check(_verifycert({'subject': ()}, 'example.com'), 'no commonName or subjectAltName found in certificate') check(_verifycert(None, 'example.com'), 'no certificate received') # Unicode (IDN) certname isn't supported check(_verifycert(cert(u'\u4f8b.jp'), 'example.jp'), 'IDN in certificate not supported') import doctest def test_url(): """ >>> from mercurial.util import url This tests for edge cases in url.URL's parsing algorithm. Most of these aren't useful for documentation purposes, so they aren't part of the class's doc tests. Query strings and fragments: >>> url('http://host/a?b#c') >>> url('http://host/a?') >>> url('http://host/a#b#c') >>> url('http://host/a#b?c') >>> url('http://host/?a#b') >>> url('http://host/?a#b', parsequery=False) >>> url('http://host/?a#b', parsefragment=False) >>> url('http://host/?a#b', parsequery=False, parsefragment=False) IPv6 addresses: >>> url('ldap://[2001:db8::7]/c=GB?objectClass?one') >>> url('ldap://joe:xxx@[2001:db8::7]:80/c=GB?objectClass?one') Missing scheme, host, etc.: >>> url('://192.0.2.16:80/') >>> url('https://mercurial-scm.org') >>> url('/foo') >>> url('bundle:/foo') >>> url('a?b#c') >>> url('http://x.com?arg=/foo') >>> url('http://joe:xxx@/foo') Just a scheme and a path: >>> url('mailto:John.Doe@example.com') >>> url('a:b:c:d') >>> url('aa:bb:cc:dd') SSH examples: >>> url('ssh://joe@host//home/joe') >>> url('ssh://joe:xxx@host/src') >>> url('ssh://joe:xxx@host') >>> url('ssh://joe@host') >>> url('ssh://host') >>> url('ssh://') >>> url('ssh:') Non-numeric port: >>> url('http://example.com:dd') >>> url('ssh://joe:xxx@host:ssh/foo') Bad authentication credentials: >>> url('http://joe@joeville:123@4:@host/a?b#c') >>> url('http://!*#?/@!*#?/:@host/a?b#c') >>> url('http://!*#?@!*#?:@host/a?b#c') >>> url('http://!*@:!*@@host/a?b#c') File paths: >>> url('a/b/c/d.g.f') >>> url('/x///z/y/') >>> url('/foo:bar') >>> url('\\\\foo:bar') >>> url('./foo:bar') Non-localhost file URL: >>> u = url('file://mercurial-scm.org/foo') Traceback (most recent call last): File "", line 1, in ? Abort: file:// URLs can only refer to localhost Empty URL: >>> u = url('') >>> u >>> str(u) '' Empty path with query string: >>> str(url('http://foo/?bar')) 'http://foo/?bar' Invalid path: >>> u = url('http://foo/bar') >>> u.path = 'bar' >>> str(u) 'http://foo/bar' >>> u = url('file:/foo/bar/baz') >>> u >>> str(u) 'file:///foo/bar/baz' >>> u.localpath() '/foo/bar/baz' >>> u = url('file:///foo/bar/baz') >>> u >>> str(u) 'file:///foo/bar/baz' >>> u.localpath() '/foo/bar/baz' >>> u = url('file:///f:oo/bar/baz') >>> u >>> str(u) 'file:///f:oo/bar/baz' >>> u.localpath() 'f:oo/bar/baz' >>> u = url('file://localhost/f:oo/bar/baz') >>> u >>> str(u) 'file://localhost/f:oo/bar/baz' >>> u.localpath() 'f:oo/bar/baz' >>> u = url('file:foo/bar/baz') >>> u >>> str(u) 'file:foo/bar/baz' >>> u.localpath() 'foo/bar/baz' """ if 'TERM' in os.environ: del os.environ['TERM'] doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE) mercurial-3.7.3/tests/test-oldcgi.t0000644000175000017500000000402012676531525016670 0ustar mpmmpm00000000000000#require no-msys # MSYS will translate web paths as if they were file paths This tests if CGI files from before d0db3462d568 still work. $ hg init test $ cat >hgweb.cgi < #!/usr/bin/env python > # > # An example CGI script to use hgweb, edit as necessary > > import cgitb, os, sys > cgitb.enable() > > # sys.path.insert(0, "/path/to/python/lib") # if not a system-wide install > from mercurial import hgweb > > h = hgweb.hgweb("test", "Empty test repository") > h.run() > HGWEB $ chmod 755 hgweb.cgi $ cat >hgweb.config < [paths] > test = test > HGWEBDIRCONF $ cat >hgwebdir.cgi < #!/usr/bin/env python > # > # An example CGI script to export multiple hgweb repos, edit as necessary > > import cgitb, sys > cgitb.enable() > > # sys.path.insert(0, "/path/to/python/lib") # if not a system-wide install > from mercurial import hgweb > > # The config file looks like this. You can have paths to individual > # repos, collections of repos in a directory tree, or both. > # > # [paths] > # virtual/path = /real/path > # virtual/path = /real/path > # > # [collections] > # /prefix/to/strip/off = /root/of/tree/full/of/repos > # > # collections example: say directory tree /foo contains repos /foo/bar, > # /foo/quux/baz. Give this config section: > # [collections] > # /foo = /foo > # Then repos will list as bar and quux/baz. > > # Alternatively you can pass a list of ('virtual/path', '/real/path') tuples > # or use a dictionary with entries like 'virtual/path': '/real/path' > > h = hgweb.hgwebdir("hgweb.config") > h.run() > HGWEBDIR $ chmod 755 hgwebdir.cgi $ . "$TESTDIR/cgienv" $ python hgweb.cgi > page1 $ python hgwebdir.cgi > page2 $ PATH_INFO="/test/" $ PATH_TRANSLATED="/var/something/test.cgi" $ REQUEST_URI="/test/test/" $ SCRIPT_URI="http://hg.omnifarious.org/test/test/" $ SCRIPT_URL="/test/test/" $ python hgwebdir.cgi > page3 $ grep -i error page1 page2 page3 [1] mercurial-3.7.3/tests/test-subrepo-git.t0000644000175000017500000006343312676531525017704 0ustar mpmmpm00000000000000#require git make git commits repeatable $ echo "[core]" >> $HOME/.gitconfig $ echo "autocrlf = false" >> $HOME/.gitconfig $ GIT_AUTHOR_NAME='test'; export GIT_AUTHOR_NAME $ GIT_AUTHOR_EMAIL='test@example.org'; export GIT_AUTHOR_EMAIL $ GIT_AUTHOR_DATE='1234567891 +0000'; export GIT_AUTHOR_DATE $ GIT_COMMITTER_NAME="$GIT_AUTHOR_NAME"; export GIT_COMMITTER_NAME $ GIT_COMMITTER_EMAIL="$GIT_AUTHOR_EMAIL"; export GIT_COMMITTER_EMAIL $ GIT_COMMITTER_DATE="$GIT_AUTHOR_DATE"; export GIT_COMMITTER_DATE $ GIT_CONFIG_NOSYSTEM=1; export GIT_CONFIG_NOSYSTEM root hg repo $ hg init t $ cd t $ echo a > a $ hg add a $ hg commit -m a $ cd .. new external git repo $ mkdir gitroot $ cd gitroot $ git init -q $ echo g > g $ git add g $ git commit -q -m g add subrepo clone $ cd ../t $ echo 's = [git]../gitroot' > .hgsub $ git clone -q ../gitroot s $ hg add .hgsub $ hg commit -m 'new git subrepo' $ hg debugsub path s source ../gitroot revision da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 record a new commit from upstream from a different branch $ cd ../gitroot $ git checkout -q -b testing $ echo gg >> g $ git commit -q -a -m gg $ cd ../t/s $ git pull -q >/dev/null 2>/dev/null $ git checkout -q -b testing origin/testing >/dev/null $ cd .. $ hg status --subrepos M s/g $ hg commit -m 'update git subrepo' $ hg debugsub path s source ../gitroot revision 126f2a14290cd5ce061fdedc430170e8d39e1c5a make $GITROOT pushable, by replacing it with a clone with nothing checked out $ cd .. $ git clone gitroot gitrootbare --bare -q $ rm -rf gitroot $ mv gitrootbare gitroot clone root $ cd t $ hg clone . ../tc 2> /dev/null updating to branch default cloning subrepo s from $TESTTMP/gitroot 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../tc $ hg debugsub path s source ../gitroot revision 126f2a14290cd5ce061fdedc430170e8d39e1c5a update to previous substate $ hg update 1 -q $ cat s/g g $ hg debugsub path s source ../gitroot revision da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 clone root, make local change $ cd ../t $ hg clone . ../ta 2> /dev/null updating to branch default cloning subrepo s from $TESTTMP/gitroot 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../ta $ echo ggg >> s/g $ hg status --subrepos M s/g $ hg diff --subrepos diff --git a/s/g b/s/g index 089258f..85341ee 100644 --- a/s/g +++ b/s/g @@ -1,2 +1,3 @@ g gg +ggg $ hg commit --subrepos -m ggg committing subrepository s $ hg debugsub path s source ../gitroot revision 79695940086840c99328513acbe35f90fcd55e57 clone root separately, make different local change $ cd ../t $ hg clone . ../tb 2> /dev/null updating to branch default cloning subrepo s from $TESTTMP/gitroot 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../tb/s $ hg status --subrepos $ echo f > f $ hg status --subrepos ? s/f $ hg add . adding f $ git add f $ cd .. $ hg status --subrepos A s/f $ hg commit --subrepos -m f committing subrepository s $ hg debugsub path s source ../gitroot revision aa84837ccfbdfedcdcdeeedc309d73e6eb069edc user b push changes $ hg push 2>/dev/null pushing to $TESTTMP/t (glob) pushing branch testing of subrepo s searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files user a pulls, merges, commits $ cd ../ta $ hg pull pulling from $TESTTMP/t (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hg merge 2>/dev/null subrepository s diverged (local revision: 7969594, remote revision: aa84837) (M)erge, keep (l)ocal or keep (r)emote? m pulling subrepo s from $TESTTMP/gitroot 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg st --subrepos s A s/f $ cat s/f f $ cat s/g g gg ggg $ hg commit --subrepos -m 'merge' committing subrepository s $ hg status --subrepos --rev 1:5 M .hgsubstate M s/g A s/f $ hg debugsub path s source ../gitroot revision f47b465e1bce645dbf37232a00574aa1546ca8d3 $ hg push 2>/dev/null pushing to $TESTTMP/t (glob) pushing branch testing of subrepo s searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files make upstream git changes $ cd .. $ git clone -q gitroot gitclone $ cd gitclone $ echo ff >> f $ git commit -q -a -m ff $ echo fff >> f $ git commit -q -a -m fff $ git push origin testing 2>/dev/null make and push changes to hg without updating the subrepo $ cd ../t $ hg clone . ../td 2>&1 | egrep -v '^Cloning into|^done\.' updating to branch default cloning subrepo s from $TESTTMP/gitroot checking out detached HEAD in subrepo s check out a git branch if you intend to make changes 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../td $ echo aa >> a $ hg commit -m aa $ hg push pushing to $TESTTMP/t (glob) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files sync to upstream git, distribute changes $ cd ../ta $ hg pull -u -q $ cd s $ git pull -q >/dev/null 2>/dev/null $ cd .. $ hg commit -m 'git upstream sync' $ hg debugsub path s source ../gitroot revision 32a343883b74769118bb1d3b4b1fbf9156f4dddc $ hg push -q $ cd ../tb $ hg pull -q $ hg update 2>/dev/null pulling subrepo s from $TESTTMP/gitroot 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg debugsub path s source ../gitroot revision 32a343883b74769118bb1d3b4b1fbf9156f4dddc create a new git branch $ cd s $ git checkout -b b2 Switched to a new branch 'b2' $ echo a>a $ git add a $ git commit -qm 'add a' $ cd .. $ hg commit -m 'add branch in s' pulling new git branch should not create tracking branch named 'origin/b2' (issue3870) $ cd ../td/s $ git remote set-url origin $TESTTMP/tb/s $ git branch --no-track oldtesting $ cd .. $ hg pull -q ../tb $ hg up From $TESTTMP/tb/s * [new branch] b2 -> origin/b2 Previous HEAD position was f47b465... merge Switched to a new branch 'b2' pulling subrepo s from $TESTTMP/tb/s 1 files updated, 0 files merged, 0 files removed, 0 files unresolved update to a revision without the subrepo, keeping the local git repository $ cd ../t $ hg up 0 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ ls -a s . .. .git $ hg up 2 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ ls -a s . .. .git g archive subrepos $ cd ../tc $ hg pull -q $ hg archive --subrepos -r 5 ../archive 2>/dev/null pulling subrepo s from $TESTTMP/gitroot $ cd ../archive $ cat s/f f $ cat s/g g gg ggg $ hg -R ../tc archive --subrepo -r 5 -X ../tc/**f ../archive_x 2>/dev/null $ find ../archive_x | sort | grep -v pax_global_header ../archive_x ../archive_x/.hg_archival.txt ../archive_x/.hgsub ../archive_x/.hgsubstate ../archive_x/a ../archive_x/s ../archive_x/s/g $ hg -R ../tc archive -S ../archive.tgz --prefix '.' 2>/dev/null $ tar -tzf ../archive.tgz | sort | grep -v pax_global_header .hg_archival.txt .hgsub .hgsubstate a s/g create nested repo $ cd .. $ hg init outer $ cd outer $ echo b>b $ hg add b $ hg commit -m b $ hg clone ../t inner 2> /dev/null updating to branch default cloning subrepo s from $TESTTMP/gitroot 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo inner = inner > .hgsub $ hg add .hgsub $ hg commit -m 'nested sub' nested commit $ echo ffff >> inner/s/f $ hg status --subrepos M inner/s/f $ hg commit --subrepos -m nested committing subrepository inner committing subrepository inner/s (glob) nested archive $ hg archive --subrepos ../narchive $ ls ../narchive/inner/s | grep -v pax_global_header f g relative source expansion $ cd .. $ mkdir d $ hg clone t d/t 2> /dev/null updating to branch default cloning subrepo s from $TESTTMP/gitroot 3 files updated, 0 files merged, 0 files removed, 0 files unresolved Don't crash if the subrepo is missing $ hg clone t missing -q $ cd missing $ rm -rf s $ hg status -S $ hg sum | grep commit commit: 1 subrepos $ hg push -q abort: subrepo s is missing (in subrepo s) [255] $ hg commit --subrepos -qm missing abort: subrepo s is missing (in subrepo s) [255] #if symlink Don't crash if subrepo is a broken symlink $ ln -s broken s $ hg status -S $ hg push -q abort: subrepo s is missing (in subrepo s) [255] $ hg commit --subrepos -qm missing abort: subrepo s is missing (in subrepo s) [255] $ rm s #endif $ hg update -C 2> /dev/null cloning subrepo s from $TESTTMP/gitroot 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg sum | grep commit commit: (clean) Don't crash if the .hgsubstate entry is missing $ hg update 1 -q $ hg rm .hgsubstate $ hg commit .hgsubstate -m 'no substate' nothing changed [1] $ hg tag -l nosubstate $ hg manifest .hgsub .hgsubstate a $ hg status -S R .hgsubstate $ hg sum | grep commit commit: 1 removed, 1 subrepos (new branch head) $ hg commit -m 'restore substate' nothing changed [1] $ hg manifest .hgsub .hgsubstate a $ hg sum | grep commit commit: 1 removed, 1 subrepos (new branch head) $ hg update -qC nosubstate $ ls s g issue3109: false positives in git diff-index $ hg update -q $ touch -t 200001010000 s/g $ hg status --subrepos $ touch -t 200001010000 s/g $ hg sum | grep commit commit: (clean) Check hg update --clean $ cd $TESTTMP/ta $ echo > s/g $ cd s $ echo c1 > f1 $ echo c1 > f2 $ git add f1 $ cd .. $ hg status -S M s/g A s/f1 ? s/f2 $ ls s f f1 f2 g $ hg update --clean 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg status -S ? s/f1 ? s/f2 $ ls s f f1 f2 g Sticky subrepositories, no changes $ cd $TESTTMP/ta $ hg id -n 7 $ cd s $ git rev-parse HEAD 32a343883b74769118bb1d3b4b1fbf9156f4dddc $ cd .. $ hg update 1 > /dev/null 2>&1 $ hg id -n 1 $ cd s $ git rev-parse HEAD da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 $ cd .. Sticky subrepositories, file changes $ touch s/f1 $ cd s $ git add f1 $ cd .. $ hg id -n 1+ $ cd s $ git rev-parse HEAD da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 $ cd .. $ hg update 4 subrepository s diverged (local revision: da5f5b1, remote revision: aa84837) (M)erge, keep (l)ocal or keep (r)emote? m subrepository sources for s differ use (l)ocal source (da5f5b1) or (r)emote source (aa84837)? l 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -n 4+ $ cd s $ git rev-parse HEAD da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 $ cd .. $ hg update --clean tip > /dev/null 2>&1 Sticky subrepository, revision updates $ hg id -n 7 $ cd s $ git rev-parse HEAD 32a343883b74769118bb1d3b4b1fbf9156f4dddc $ cd .. $ cd s $ git checkout aa84837ccfbdfedcdcdeeedc309d73e6eb069edc Previous HEAD position was 32a3438... fff HEAD is now at aa84837... f $ cd .. $ hg update 1 subrepository s diverged (local revision: 32a3438, remote revision: da5f5b1) (M)erge, keep (l)ocal or keep (r)emote? m subrepository sources for s differ (in checked out version) use (l)ocal source (32a3438) or (r)emote source (da5f5b1)? l 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -n 1+ $ cd s $ git rev-parse HEAD aa84837ccfbdfedcdcdeeedc309d73e6eb069edc $ cd .. Sticky subrepository, file changes and revision updates $ touch s/f1 $ cd s $ git add f1 $ git rev-parse HEAD aa84837ccfbdfedcdcdeeedc309d73e6eb069edc $ cd .. $ hg id -n 1+ $ hg update 7 subrepository s diverged (local revision: 32a3438, remote revision: 32a3438) (M)erge, keep (l)ocal or keep (r)emote? m subrepository sources for s differ use (l)ocal source (32a3438) or (r)emote source (32a3438)? l 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -n 7+ $ cd s $ git rev-parse HEAD aa84837ccfbdfedcdcdeeedc309d73e6eb069edc $ cd .. Sticky repository, update --clean $ hg update --clean tip 2>/dev/null 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -n 7 $ cd s $ git rev-parse HEAD 32a343883b74769118bb1d3b4b1fbf9156f4dddc $ cd .. Test subrepo already at intended revision: $ cd s $ git checkout 32a343883b74769118bb1d3b4b1fbf9156f4dddc HEAD is now at 32a3438... fff $ cd .. $ hg update 1 Previous HEAD position was 32a3438... fff HEAD is now at da5f5b1... g 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -n 1 $ cd s $ git rev-parse HEAD da5f5b1d8ffcf62fb8327bcd3c89a4367a6018e7 $ cd .. Test forgetting files, not implemented in git subrepo, used to traceback #if no-windows $ hg forget 'notafile*' notafile*: No such file or directory [1] #else $ hg forget 'notafile' notafile: * (glob) [1] #endif $ cd .. Test sanitizing ".hg/hgrc" in subrepo $ cd t $ hg tip -q 7:af6d2edbb0d3 $ hg update -q -C af6d2edbb0d3 $ cd s $ git checkout -q -b sanitize-test $ mkdir .hg $ echo '.hg/hgrc in git repo' > .hg/hgrc $ mkdir -p sub/.hg $ echo 'sub/.hg/hgrc in git repo' > sub/.hg/hgrc $ git add .hg sub $ git commit -qm 'add .hg/hgrc to be sanitized at hg update' $ git push -q origin sanitize-test $ cd .. $ grep ' s$' .hgsubstate 32a343883b74769118bb1d3b4b1fbf9156f4dddc s $ hg commit -qm 'commit with git revision including .hg/hgrc' $ hg parents -q 8:3473d20bddcf $ grep ' s$' .hgsubstate c4069473b459cf27fd4d7c2f50c4346b4e936599 s $ cd .. $ hg -R tc pull -q $ hg -R tc update -q -C 3473d20bddcf 2>&1 | sort warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/.hg' (glob) warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/sub/.hg' (glob) $ cd tc $ hg parents -q 8:3473d20bddcf $ grep ' s$' .hgsubstate c4069473b459cf27fd4d7c2f50c4346b4e936599 s $ test -f s/.hg/hgrc [1] $ test -f s/sub/.hg/hgrc [1] $ cd .. additional test for "git merge --ff" route: $ cd t $ hg tip -q 8:3473d20bddcf $ hg update -q -C af6d2edbb0d3 $ cd s $ git checkout -q testing $ mkdir .hg $ echo '.hg/hgrc in git repo' > .hg/hgrc $ mkdir -p sub/.hg $ echo 'sub/.hg/hgrc in git repo' > sub/.hg/hgrc $ git add .hg sub $ git commit -qm 'add .hg/hgrc to be sanitized at hg update (git merge --ff)' $ git push -q origin testing $ cd .. $ grep ' s$' .hgsubstate 32a343883b74769118bb1d3b4b1fbf9156f4dddc s $ hg commit -qm 'commit with git revision including .hg/hgrc' $ hg parents -q 9:ed23f7fe024e $ grep ' s$' .hgsubstate f262643c1077219fbd3858d54e78ef050ef84fbf s $ cd .. $ cd tc $ hg update -q -C af6d2edbb0d3 $ test -f s/.hg/hgrc [1] $ test -f s/sub/.hg/hgrc [1] $ cd .. $ hg -R tc pull -q $ hg -R tc update -q -C ed23f7fe024e 2>&1 | sort warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/.hg' (glob) warning: removing potentially hostile 'hgrc' in '$TESTTMP/tc/s/sub/.hg' (glob) $ cd tc $ hg parents -q 9:ed23f7fe024e $ grep ' s$' .hgsubstate f262643c1077219fbd3858d54e78ef050ef84fbf s $ test -f s/.hg/hgrc [1] $ test -f s/sub/.hg/hgrc [1] Test that sanitizing is omitted in meta data area: $ mkdir s/.git/.hg $ echo '.hg/hgrc in git metadata area' > s/.git/.hg/hgrc $ hg update -q -C af6d2edbb0d3 checking out detached HEAD in subrepo s check out a git branch if you intend to make changes check differences made by most recent change $ cd s $ cat > foobar << EOF > woopwoop > > foo > bar > EOF $ git add foobar $ cd .. $ hg diff --subrepos diff --git a/s/foobar b/s/foobar new file mode 100644 index 0000000..8a5a5e2 --- /dev/null +++ b/s/foobar @@ -0,0 +1,4 @@ +woopwoop + +foo +bar $ hg commit --subrepos -m "Added foobar" committing subrepository s created new head $ hg diff -c . --subrepos --nodates diff -r af6d2edbb0d3 -r 255ee8cf690e .hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,1 +1,1 @@ -32a343883b74769118bb1d3b4b1fbf9156f4dddc s +fd4dbf828a5b2fcd36b2bcf21ea773820970d129 s diff --git a/s/foobar b/s/foobar new file mode 100644 index 0000000..8a5a5e2 --- /dev/null +++ b/s/foobar @@ -0,0 +1,4 @@ +woopwoop + +foo +bar check output when only diffing the subrepository $ hg diff -c . --subrepos s diff --git a/s/foobar b/s/foobar new file mode 100644 index 0000000..8a5a5e2 --- /dev/null +++ b/s/foobar @@ -0,0 +1,4 @@ +woopwoop + +foo +bar check output when diffing something else $ hg diff -c . --subrepos .hgsubstate --nodates diff -r af6d2edbb0d3 -r 255ee8cf690e .hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,1 +1,1 @@ -32a343883b74769118bb1d3b4b1fbf9156f4dddc s +fd4dbf828a5b2fcd36b2bcf21ea773820970d129 s add new changes, including whitespace $ cd s $ cat > foobar << EOF > woop woop > > foo > bar > EOF $ echo foo > barfoo $ git add barfoo $ cd .. $ hg diff --subrepos --ignore-all-space diff --git a/s/barfoo b/s/barfoo new file mode 100644 index 0000000..257cc56 --- /dev/null +++ b/s/barfoo @@ -0,0 +1 @@ +foo $ hg diff --subrepos s/foobar diff --git a/s/foobar b/s/foobar index 8a5a5e2..bd5812a 100644 --- a/s/foobar +++ b/s/foobar @@ -1,4 +1,4 @@ -woopwoop +woop woop foo bar execute a diffstat the output contains a regex, because git 1.7.10 and 1.7.11 change the amount of whitespace $ hg diff --subrepos --stat \s*barfoo |\s*1 + (re) \s*foobar |\s*2 +- (re) 2 files changed, 2 insertions\(\+\), 1 deletions?\(-\) (re) adding an include should ignore the other elements $ hg diff --subrepos -I s/foobar diff --git a/s/foobar b/s/foobar index 8a5a5e2..bd5812a 100644 --- a/s/foobar +++ b/s/foobar @@ -1,4 +1,4 @@ -woopwoop +woop woop foo bar adding an exclude should ignore this element $ hg diff --subrepos -X s/foobar diff --git a/s/barfoo b/s/barfoo new file mode 100644 index 0000000..257cc56 --- /dev/null +++ b/s/barfoo @@ -0,0 +1 @@ +foo moving a file should show a removal and an add $ hg revert --all reverting subrepo ../gitroot $ cd s $ git mv foobar woop $ cd .. $ hg diff --subrepos diff --git a/s/foobar b/s/foobar deleted file mode 100644 index 8a5a5e2..0000000 --- a/s/foobar +++ /dev/null @@ -1,4 +0,0 @@ -woopwoop - -foo -bar diff --git a/s/woop b/s/woop new file mode 100644 index 0000000..8a5a5e2 --- /dev/null +++ b/s/woop @@ -0,0 +1,4 @@ +woopwoop + +foo +bar $ rm s/woop revert the subrepository $ hg revert --all reverting subrepo ../gitroot $ hg status --subrepos ? s/barfoo ? s/foobar.orig $ mv s/foobar.orig s/foobar $ hg revert --no-backup s reverting subrepo ../gitroot $ hg status --subrepos ? s/barfoo revert moves orig files to the right place $ echo 'bloop' > s/foobar $ hg revert --all --verbose --config 'ui.origbackuppath=.hg/origbackups' reverting subrepo ../gitroot creating directory: $TESTTMP/tc/.hg/origbackups (glob) saving current version of foobar as $TESTTMP/tc/.hg/origbackups/foobar.orig (glob) $ ls .hg/origbackups foobar.orig $ rm -rf .hg/origbackups show file at specific revision $ cat > s/foobar << EOF > woop woop > fooo bar > EOF $ hg commit --subrepos -m "updated foobar" committing subrepository s $ cat > s/foobar << EOF > current foobar > (should not be visible using hg cat) > EOF $ hg cat -r . s/foobar woop woop fooo bar (no-eol) $ hg cat -r "parents(.)" s/foobar > catparents $ mkdir -p tmp/s $ hg cat -r "parents(.)" --output tmp/%% s/foobar $ diff tmp/% catparents $ hg cat -r "parents(.)" --output tmp/%s s/foobar $ diff tmp/foobar catparents $ hg cat -r "parents(.)" --output tmp/%d/otherfoobar s/foobar $ diff tmp/s/otherfoobar catparents $ hg cat -r "parents(.)" --output tmp/%p s/foobar $ diff tmp/s/foobar catparents $ hg cat -r "parents(.)" --output tmp/%H s/foobar $ diff tmp/255ee8cf690ec86e99b1e80147ea93ece117cd9d catparents $ hg cat -r "parents(.)" --output tmp/%R s/foobar $ diff tmp/10 catparents $ hg cat -r "parents(.)" --output tmp/%h s/foobar $ diff tmp/255ee8cf690e catparents $ rm tmp/10 $ hg cat -r "parents(.)" --output tmp/%r s/foobar $ diff tmp/10 catparents $ mkdir tmp/tc $ hg cat -r "parents(.)" --output tmp/%b/foobar s/foobar $ diff tmp/tc/foobar catparents cleanup $ rm -r tmp $ rm catparents add git files, using either files or patterns $ echo "hsss! hsssssssh!" > s/snake.python $ echo "ccc" > s/c.c $ echo "cpp" > s/cpp.cpp $ hg add s/snake.python s/c.c s/cpp.cpp $ hg st --subrepos s M s/foobar A s/c.c A s/cpp.cpp A s/snake.python ? s/barfoo $ hg revert s reverting subrepo ../gitroot $ hg add --subrepos "glob:**.python" adding s/snake.python (glob) $ hg st --subrepos s A s/snake.python ? s/barfoo ? s/c.c ? s/cpp.cpp ? s/foobar.orig $ hg revert s reverting subrepo ../gitroot $ hg add --subrepos s adding s/barfoo (glob) adding s/c.c (glob) adding s/cpp.cpp (glob) adding s/foobar.orig (glob) adding s/snake.python (glob) $ hg st --subrepos s A s/barfoo A s/c.c A s/cpp.cpp A s/foobar.orig A s/snake.python $ hg revert s reverting subrepo ../gitroot make sure everything is reverted correctly $ hg st --subrepos s ? s/barfoo ? s/c.c ? s/cpp.cpp ? s/foobar.orig ? s/snake.python $ hg add --subrepos --exclude "path:s/c.c" adding s/barfoo (glob) adding s/cpp.cpp (glob) adding s/foobar.orig (glob) adding s/snake.python (glob) $ hg st --subrepos s A s/barfoo A s/cpp.cpp A s/foobar.orig A s/snake.python ? s/c.c $ hg revert --all -q .hgignore should not have influence in subrepos $ cat > .hgignore << EOF > syntax: glob > *.python > EOF $ hg add .hgignore $ hg add --subrepos "glob:**.python" s/barfoo adding s/snake.python (glob) $ hg st --subrepos s A s/barfoo A s/snake.python ? s/c.c ? s/cpp.cpp ? s/foobar.orig $ hg revert --all -q .gitignore should have influence, except for explicitly added files (no patterns) $ cat > s/.gitignore << EOF > *.python > EOF $ hg add s/.gitignore $ hg st --subrepos s A s/.gitignore ? s/barfoo ? s/c.c ? s/cpp.cpp ? s/foobar.orig $ hg st --subrepos s --all A s/.gitignore ? s/barfoo ? s/c.c ? s/cpp.cpp ? s/foobar.orig I s/snake.python C s/f C s/foobar C s/g $ hg add --subrepos "glob:**.python" $ hg st --subrepos s A s/.gitignore ? s/barfoo ? s/c.c ? s/cpp.cpp ? s/foobar.orig $ hg add --subrepos s/snake.python $ hg st --subrepos s A s/.gitignore A s/snake.python ? s/barfoo ? s/c.c ? s/cpp.cpp ? s/foobar.orig correctly do a dry run $ hg add --subrepos s --dry-run adding s/barfoo (glob) adding s/c.c (glob) adding s/cpp.cpp (glob) adding s/foobar.orig (glob) $ hg st --subrepos s A s/.gitignore A s/snake.python ? s/barfoo ? s/c.c ? s/cpp.cpp ? s/foobar.orig error given when adding an already tracked file $ hg add s/.gitignore s/.gitignore already tracked! [1] $ hg add s/g s/g already tracked! [1] removed files can be re-added removing files using 'rm' or 'git rm' has the same effect, since we ignore the staging area $ hg ci --subrepos -m 'snake' committing subrepository s $ cd s $ rm snake.python (remove leftover .hg so Mercurial doesn't look for a root here) $ rm -rf .hg $ hg status --subrepos --all . R snake.python ? barfoo ? c.c ? cpp.cpp ? foobar.orig C .gitignore C f C foobar C g $ git rm snake.python rm 'snake.python' $ hg status --subrepos --all . R snake.python ? barfoo ? c.c ? cpp.cpp ? foobar.orig C .gitignore C f C foobar C g $ touch snake.python $ cd .. $ hg add s/snake.python $ hg status -S M s/snake.python ? .hgignore ? s/barfoo ? s/c.c ? s/cpp.cpp ? s/foobar.orig $ hg revert --all -q make sure we show changed files, rather than changed subtrees $ mkdir s/foo $ touch s/foo/bwuh $ hg add s/foo/bwuh $ hg commit -S -m "add bwuh" committing subrepository s $ hg status -S --change . M .hgsubstate A s/foo/bwuh ? s/barfoo ? s/c.c ? s/cpp.cpp ? s/foobar.orig ? s/snake.python.orig test for Git CVE-2016-3068 $ hg init malicious-subrepository $ cd malicious-subrepository $ echo "s = [git]ext::sh -c echo% pwned% >&2" > .hgsub $ git init s Initialized empty Git repository in $TESTTMP/tc/malicious-subrepository/s/.git/ $ cd s $ git commit --allow-empty -m 'empty' [master (root-commit) 153f934] empty $ cd .. $ hg add .hgsub $ hg commit -m "add subrepo" $ cd .. $ env -u GIT_ALLOW_PROTOCOL hg clone malicious-subrepository malicious-subrepository-protected Cloning into '$TESTTMP/tc/malicious-subrepository-protected/s'... fatal: transport 'ext' not allowed updating to branch default cloning subrepo s from ext::sh -c echo% pwned% >&2 abort: git clone error 128 in s (in subrepo s) [255] whitelisting of ext should be respected (that's the git submodule behaviour) $ env GIT_ALLOW_PROTOCOL=ext hg clone malicious-subrepository malicious-subrepository-clone-allowed Cloning into '$TESTTMP/tc/malicious-subrepository-clone-allowed/s'... pwned fatal: Could not read from remote repository. Please make sure you have the correct access rights and the repository exists. updating to branch default cloning subrepo s from ext::sh -c echo% pwned% >&2 abort: git clone error 128 in s (in subrepo s) [255] mercurial-3.7.3/tests/test-empty-file.t0000644000175000017500000000212412676531525017505 0ustar mpmmpm00000000000000 $ hg init a $ cd a $ touch empty1 $ hg add empty1 $ hg commit -m 'add empty1' $ touch empty2 $ hg add empty2 $ hg commit -m 'add empty2' $ hg up -C 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ touch empty3 $ hg add empty3 $ hg commit -m 'add empty3' created new head $ hg heads changeset: 2:a1cb177e0d44 tag: tip parent: 0:1e1d9c4e5b64 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add empty3 changeset: 1:097d2b0e17f6 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: add empty2 $ hg merge 1 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) Before changeset 05257fd28591, we didn't notice the empty file that came from rev 1: $ hg status M empty2 $ hg commit -m merge $ hg manifest --debug tip b80de5d138758541c5f05265ad144ab9fa86d1db 644 empty1 b80de5d138758541c5f05265ad144ab9fa86d1db 644 empty2 b80de5d138758541c5f05265ad144ab9fa86d1db 644 empty3 $ cd .. mercurial-3.7.3/tests/test-import-unknown.t0000644000175000017500000000260612676531525020446 0ustar mpmmpm00000000000000 $ cat <> $HGRCPATH > [extensions] > purge = > EOF $ hg init test $ cd test $ echo a > changed $ echo a > removed $ echo a > source $ hg ci -Am addfiles adding changed adding removed adding source $ echo a >> changed $ echo a > added $ hg add added $ hg rm removed $ hg cp source copied $ hg diff --git > ../unknown.diff Test adding on top of an unknown file $ hg up -qC 0 $ hg purge $ echo a > added $ hg import --no-commit ../unknown.diff applying ../unknown.diff file added already exists 1 out of 1 hunks FAILED -- saving rejects to file added.rej abort: patch failed to apply [255] Test modifying an unknown file $ hg revert -aq $ hg purge $ hg rm changed $ hg ci -m removechanged $ echo a > changed $ hg import --no-commit ../unknown.diff applying ../unknown.diff abort: cannot patch changed: file is not tracked [255] Test removing an unknown file $ hg up -qC 0 $ hg purge $ hg rm removed $ hg ci -m removeremoved created new head $ echo a > removed $ hg import --no-commit ../unknown.diff applying ../unknown.diff abort: cannot patch removed: file is not tracked [255] Test copying onto an unknown file $ hg up -qC 0 $ hg purge $ echo a > copied $ hg import --no-commit ../unknown.diff applying ../unknown.diff abort: cannot create copied: destination already exists [255] $ cd .. mercurial-3.7.3/tests/test-rebase-cache.t0000644000175000017500000002165612676531525017747 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > mq= > > [phases] > publish=False > > [alias] > tglog = log -G --template "{rev}: '{desc}' {branches}\n" > theads = heads --template "{rev}: '{desc}' {branches}\n" > EOF $ hg init a $ cd a $ echo a > a $ hg ci -Am A adding a $ hg branch branch1 marked working directory as branch branch1 (branches are permanent and global, did you want a bookmark?) $ hg ci -m 'branch1' $ echo b > b $ hg ci -Am B adding b $ hg up -q 0 $ hg branch branch2 marked working directory as branch branch2 $ hg ci -m 'branch2' $ echo c > C $ hg ci -Am C adding C $ hg up -q 2 $ hg branch -f branch2 marked working directory as branch branch2 $ echo d > d $ hg ci -Am D adding d created new head $ echo e > e $ hg ci -Am E adding e $ hg update default 0 files updated, 0 files merged, 3 files removed, 0 files unresolved $ hg branch branch3 marked working directory as branch branch3 $ hg ci -m 'branch3' $ echo f > f $ hg ci -Am F adding f $ cd .. Rebase part of branch2 (5-6) onto branch3 (8): $ hg clone -q -u . a a1 $ cd a1 $ hg tglog @ 8: 'F' branch3 | o 7: 'branch3' branch3 | | o 6: 'E' branch2 | | | o 5: 'D' branch2 | | | | o 4: 'C' branch2 | | | +---o 3: 'branch2' branch2 | | | o 2: 'B' branch1 | | | o 1: 'branch1' branch1 |/ o 0: 'A' $ hg branches branch3 8:4666b71e8e32 branch2 6:5097051d331d branch1 2:0a03079c47fd (inactive) default 0:1994f17a630e (inactive) $ hg theads 8: 'F' branch3 6: 'E' branch2 4: 'C' branch2 2: 'B' branch1 0: 'A' $ hg rebase -s 5 -d 8 rebasing 5:635859577d0b "D" rebasing 6:5097051d331d "E" saved backup bundle to $TESTTMP/a1/.hg/strip-backup/635859577d0b-89160bff-backup.hg (glob) $ hg branches branch3 8:466cdfb14b62 branch2 4:e4fdb121d036 branch1 2:0a03079c47fd default 0:1994f17a630e (inactive) $ hg theads 8: 'E' branch3 4: 'C' branch2 2: 'B' branch1 0: 'A' $ hg tglog o 8: 'E' branch3 | o 7: 'D' branch3 | @ 6: 'F' branch3 | o 5: 'branch3' branch3 | | o 4: 'C' branch2 | | | o 3: 'branch2' branch2 |/ | o 2: 'B' branch1 | | | o 1: 'branch1' branch1 |/ o 0: 'A' $ cd .. Rebase head of branch3 (8) onto branch2 (6): $ hg clone -q -u . a a2 $ cd a2 $ hg tglog @ 8: 'F' branch3 | o 7: 'branch3' branch3 | | o 6: 'E' branch2 | | | o 5: 'D' branch2 | | | | o 4: 'C' branch2 | | | +---o 3: 'branch2' branch2 | | | o 2: 'B' branch1 | | | o 1: 'branch1' branch1 |/ o 0: 'A' $ hg rebase -s 8 -d 6 rebasing 8:4666b71e8e32 "F" (tip) saved backup bundle to $TESTTMP/a2/.hg/strip-backup/4666b71e8e32-fc1c4e96-backup.hg (glob) $ hg branches branch2 8:6b4bdc1b5ac0 branch3 7:653b9feb4616 branch1 2:0a03079c47fd (inactive) default 0:1994f17a630e (inactive) $ hg theads 8: 'F' branch2 7: 'branch3' branch3 4: 'C' branch2 2: 'B' branch1 0: 'A' $ hg tglog @ 8: 'F' branch2 | | o 7: 'branch3' branch3 | | o | 6: 'E' branch2 | | o | 5: 'D' branch2 | | | | o 4: 'C' branch2 | | | | | o 3: 'branch2' branch2 | |/ o | 2: 'B' branch1 | | o | 1: 'branch1' branch1 |/ o 0: 'A' $ hg verify -q $ cd .. Rebase entire branch3 (7-8) onto branch2 (6): $ hg clone -q -u . a a3 $ cd a3 $ hg tglog @ 8: 'F' branch3 | o 7: 'branch3' branch3 | | o 6: 'E' branch2 | | | o 5: 'D' branch2 | | | | o 4: 'C' branch2 | | | +---o 3: 'branch2' branch2 | | | o 2: 'B' branch1 | | | o 1: 'branch1' branch1 |/ o 0: 'A' $ hg rebase -s 7 -d 6 rebasing 7:653b9feb4616 "branch3" note: rebase of 7:653b9feb4616 created no changes to commit rebasing 8:4666b71e8e32 "F" (tip) saved backup bundle to $TESTTMP/a3/.hg/strip-backup/653b9feb4616-3c88de16-backup.hg (glob) $ hg branches branch2 7:6b4bdc1b5ac0 branch1 2:0a03079c47fd (inactive) default 0:1994f17a630e (inactive) $ hg theads 7: 'F' branch2 4: 'C' branch2 2: 'B' branch1 0: 'A' $ hg tglog @ 7: 'F' branch2 | o 6: 'E' branch2 | o 5: 'D' branch2 | | o 4: 'C' branch2 | | | o 3: 'branch2' branch2 | | o | 2: 'B' branch1 | | o | 1: 'branch1' branch1 |/ o 0: 'A' $ hg verify -q Stripping multiple branches in one go bypasses the fast-case code to update the branch cache. $ hg strip 2 0 files updated, 0 files merged, 4 files removed, 0 files unresolved saved backup bundle to $TESTTMP/a3/.hg/strip-backup/0a03079c47fd-11b7c407-backup.hg (glob) $ hg tglog o 3: 'C' branch2 | o 2: 'branch2' branch2 | | @ 1: 'branch1' branch1 |/ o 0: 'A' $ hg branches branch2 3:e4fdb121d036 branch1 1:63379ac49655 default 0:1994f17a630e (inactive) $ hg theads 3: 'C' branch2 1: 'branch1' branch1 0: 'A' Fast path branchcache code should not be invoked if branches stripped is not the same as branches remaining. $ hg init b $ cd b $ hg branch branch1 marked working directory as branch branch1 (branches are permanent and global, did you want a bookmark?) $ hg ci -m 'branch1' $ hg branch branch2 marked working directory as branch branch2 $ hg ci -m 'branch2' $ hg branch -f branch1 marked working directory as branch branch1 $ echo a > A $ hg ci -Am A adding A created new head $ hg tglog @ 2: 'A' branch1 | o 1: 'branch2' branch2 | o 0: 'branch1' branch1 $ hg theads 2: 'A' branch1 1: 'branch2' branch2 $ hg strip 2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved saved backup bundle to $TESTTMP/a3/b/.hg/strip-backup/a5b4b27ed7b4-a3b6984e-backup.hg (glob) $ hg theads 1: 'branch2' branch2 0: 'branch1' branch1 Make sure requesting to strip a revision already stripped does not confuse things. Try both orders. $ cd .. $ hg init c $ cd c $ echo a > a $ hg ci -Am A adding a $ echo b > b $ hg ci -Am B adding b $ echo c > c $ hg ci -Am C adding c $ echo d > d $ hg ci -Am D adding d $ echo e > e $ hg ci -Am E adding e $ hg tglog @ 4: 'E' | o 3: 'D' | o 2: 'C' | o 1: 'B' | o 0: 'A' $ hg strip 3 4 0 files updated, 0 files merged, 2 files removed, 0 files unresolved saved backup bundle to $TESTTMP/a3/c/.hg/strip-backup/67a385d4e6f2-b9243789-backup.hg (glob) $ hg theads 2: 'C' $ hg strip 2 1 0 files updated, 0 files merged, 2 files removed, 0 files unresolved saved backup bundle to $TESTTMP/a3/c/.hg/strip-backup/6c81ed0049f8-a687065f-backup.hg (glob) $ hg theads 0: 'A' Make sure rebase does not break for phase/filter related reason ---------------------------------------------------------------- (issue3858) $ cd .. $ cat >> $HGRCPATH << EOF > [ui] > logtemplate={rev} {desc} {phase}\n > EOF $ hg init c4 $ cd c4 $ echo a > a $ hg ci -Am A adding a $ echo b > b $ hg ci -Am B adding b $ hg up 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo c > c $ hg ci -Am C adding c created new head $ hg up 1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m d $ hg up 2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo e > e $ hg ci -Am E adding e created new head $ hg merge 3 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m F $ hg up 3 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo g > g $ hg ci -Am G adding g created new head $ echo h > h $ hg ci -Am H adding h $ hg up 5 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo i > i $ hg ci -Am I adding i Turn most changeset public $ hg ph -p 7 $ hg heads 8 I draft 7 H public $ hg log -G @ 8 I draft | | o 7 H public | | | o 6 G public | | o | 5 F draft |\| o | 4 E draft | | | o 3 d public |/| o | 2 C public | | | o 1 B public |/ o 0 A public $ cat > $TESTTMP/checkeditform.sh < env | grep HGEDITFORM > true > EOF $ HGEDITOR="sh $TESTTMP/checkeditform.sh" hg rebase --dest 7 --source 5 -e rebasing 5:361a99976cc9 "F" HGEDITFORM=rebase.merge rebasing 8:326cfedc031c "I" (tip) HGEDITFORM=rebase.normal saved backup bundle to $TESTTMP/a3/c4/.hg/strip-backup/361a99976cc9-35e980d0-backup.hg (glob) mercurial-3.7.3/tests/hghave0000755000175000017500000000335712676531525015471 0ustar mpmmpm00000000000000#!/usr/bin/env python """Test the running system for features availability. Exit with zero if all features are there, non-zero otherwise. If a feature name is prefixed with "no-", the absence of feature is tested. """ import optparse import os, sys import hghave checks = hghave.checks def list_features(): for name, feature in sorted(checks.iteritems()): desc = feature[1] print name + ':', desc def test_features(): failed = 0 for name, feature in checks.iteritems(): check, _ = feature try: check() except Exception, e: print "feature %s failed: %s" % (name, e) failed += 1 return failed parser = optparse.OptionParser("%prog [options] [features]") parser.add_option("--test-features", action="store_true", help="test available features") parser.add_option("--list-features", action="store_true", help="list available features") def _loadaddon(): if 'TESTDIR' in os.environ: # loading from '.' isn't needed, because `hghave` should be # running at TESTTMP in this case path = os.environ['TESTDIR'] else: path = '.' if not os.path.exists(os.path.join(path, 'hghaveaddon.py')): return sys.path.insert(0, path) try: import hghaveaddon except BaseException, inst: sys.stderr.write('failed to import hghaveaddon.py from %r: %s\n' % (path, inst)) sys.exit(2) sys.path.pop(0) if __name__ == '__main__': options, args = parser.parse_args() _loadaddon() if options.list_features: list_features() sys.exit(0) if options.test_features: sys.exit(test_features()) hghave.require(args) mercurial-3.7.3/tests/dummyssh0000755000175000017500000000100412676531525016063 0ustar mpmmpm00000000000000#!/usr/bin/env python import sys import os os.chdir(os.getenv('TESTTMP')) if sys.argv[1] != "user@dummy": sys.exit(-1) os.environ["SSH_CLIENT"] = "127.0.0.1 1 2" log = open("dummylog", "ab") log.write("Got arguments") for i, arg in enumerate(sys.argv[1:]): log.write(" %d:%s" % (i + 1, arg)) log.write("\n") log.close() hgcmd = sys.argv[2] if os.name == 'nt': # hack to make simple unix single quote quoting work on windows hgcmd = hgcmd.replace("'", '"') r = os.system(hgcmd) sys.exit(bool(r)) mercurial-3.7.3/tests/test-demandimport.py0000644000175000017500000000235312676531525020306 0ustar mpmmpm00000000000000from mercurial import demandimport demandimport.enable() import os if os.name != 'nt': try: import distutils.msvc9compiler print ('distutils.msvc9compiler needs to be an immediate ' 'importerror on non-windows platforms') distutils.msvc9compiler except ImportError: pass import re rsub = re.sub def f(obj): l = repr(obj) l = rsub("0x[0-9a-fA-F]+", "0x?", l) l = rsub("from '.*'", "from '?'", l) l = rsub("'<[a-z]*>'", "''", l) return l import os print "os =", f(os) print "os.system =", f(os.system) print "os =", f(os) from mercurial import util print "util =", f(util) print "util.system =", f(util.system) print "util =", f(util) print "util.system =", f(util.system) from mercurial import hgweb print "hgweb =", f(hgweb) print "hgweb_mod =", f(hgweb.hgweb_mod) print "hgweb =", f(hgweb) import re as fred print "fred =", f(fred) import sys as re print "re =", f(re) print "fred =", f(fred) print "fred.sub =", f(fred.sub) print "fred =", f(fred) print "re =", f(re) print "re.stderr =", f(re.stderr) print "re =", f(re) demandimport.disable() os.environ['HGDEMANDIMPORT'] = 'disable' demandimport.enable() from mercurial import node print "node =", f(node) mercurial-3.7.3/tests/test-merge-subrepos.t0000644000175000017500000000066612676531525020402 0ustar mpmmpm00000000000000 $ hg init $ echo a > a $ hg ci -qAm 'add a' $ hg init subrepo $ echo 'subrepo = http://example.net/libfoo' > .hgsub $ hg ci -qAm 'added subrepo' $ hg up -qC 0 $ echo ax > a $ hg ci -m 'changed a' created new head $ hg up -qC 1 $ cd subrepo $ echo b > b $ hg add b $ cd .. Should fail, since there are added files to subrepo: $ hg merge abort: uncommitted changes in subrepository 'subrepo' [255] mercurial-3.7.3/tests/test-convert-p4-filetypes.t0000644000175000017500000005217312676531525021446 0ustar mpmmpm00000000000000#require p4 execbit symlink $ echo "[extensions]" >> $HGRCPATH $ echo "convert = " >> $HGRCPATH create p4 depot $ P4ROOT=`pwd`/depot; export P4ROOT $ P4AUDIT=$P4ROOT/audit; export P4AUDIT $ P4JOURNAL=$P4ROOT/journal; export P4JOURNAL $ P4LOG=$P4ROOT/log; export P4LOG $ P4PORT=localhost:$HGPORT; export P4PORT $ P4DEBUG=1; export P4DEBUG $ P4CHARSET=utf8; export P4CHARSET start the p4 server $ [ ! -d $P4ROOT ] && mkdir $P4ROOT $ p4d -f -J off -xi >$P4ROOT/stdout 2>$P4ROOT/stderr $ p4d -f -J off >$P4ROOT/stdout 2>$P4ROOT/stderr & $ echo $! >> $DAEMON_PIDS $ trap "echo stopping the p4 server ; p4 admin stop" EXIT wait for the server to initialize $ while ! p4 ; do > sleep 1 > done >/dev/null 2>/dev/null create a client spec $ P4CLIENT=hg-p4-import; export P4CLIENT $ DEPOTPATH=//depot/test-mercurial-import/... $ p4 client -o | sed '/^View:/,$ d' >p4client $ echo View: >>p4client $ echo " $DEPOTPATH //$P4CLIENT/..." >>p4client $ p4 client -i T2=`echo $T | tr [:upper:] [:lower:]` > case $T in > apple) > ;; > symlink*) > echo "this is target $T" >target_$T2 > ln -s target_$T file_$T2 > p4 add target_$T2 > p4 add -t $T file_$T2 > ;; > binary*) > $PYTHON -c "file('file_$T2', 'wb').write('this is $T')" > p4 add -t $T file_$T2 > ;; > *) > echo "this is $T" >file_$T2 > p4 add -t $T file_$T2 > ;; > esac > done //depot/test-mercurial-import/file_text#1 - opened for add //depot/test-mercurial-import/file_binary#1 - opened for add //depot/test-mercurial-import/target_symlink#1 - opened for add //depot/test-mercurial-import/file_symlink#1 - opened for add //depot/test-mercurial-import/file_text+m#1 - opened for add //depot/test-mercurial-import/file_text+w#1 - opened for add //depot/test-mercurial-import/file_text+x#1 - opened for add //depot/test-mercurial-import/file_text+k#1 - opened for add //depot/test-mercurial-import/file_text+kx#1 - opened for add //depot/test-mercurial-import/file_text+ko#1 - opened for add //depot/test-mercurial-import/file_text+l#1 - opened for add //depot/test-mercurial-import/file_text+c#1 - opened for add //depot/test-mercurial-import/file_text+d#1 - opened for add //depot/test-mercurial-import/file_text+f#1 - opened for add //depot/test-mercurial-import/file_text+s#1 - opened for add //depot/test-mercurial-import/file_text+s2#1 - opened for add //depot/test-mercurial-import/file_binary+k#1 - opened for add //depot/test-mercurial-import/file_binary+x#1 - opened for add //depot/test-mercurial-import/file_binary+kx#1 - opened for add //depot/test-mercurial-import/target_symlink+k#1 - opened for add //depot/test-mercurial-import/file_symlink+k#1 - opened for add //depot/test-mercurial-import/file_ctext#1 - opened for add //depot/test-mercurial-import/file_cxtext#1 - opened for add //depot/test-mercurial-import/file_ktext#1 - opened for add //depot/test-mercurial-import/file_kxtext#1 - opened for add //depot/test-mercurial-import/file_ltext#1 - opened for add //depot/test-mercurial-import/file_tempobj#1 - opened for add //depot/test-mercurial-import/file_ubinary#1 - opened for add //depot/test-mercurial-import/file_uxbinary#1 - opened for add //depot/test-mercurial-import/file_xbinary#1 - opened for add //depot/test-mercurial-import/file_xltext#1 - opened for add //depot/test-mercurial-import/file_xtempobj#1 - opened for add //depot/test-mercurial-import/file_xtext#1 - opened for add $ p4 submit -d initial Submitting change 1. Locking 33 files ... add //depot/test-mercurial-import/file_binary#1 add //depot/test-mercurial-import/file_binary+k#1 add //depot/test-mercurial-import/file_binary+kx#1 add //depot/test-mercurial-import/file_binary+x#1 add //depot/test-mercurial-import/file_ctext#1 add //depot/test-mercurial-import/file_cxtext#1 add //depot/test-mercurial-import/file_ktext#1 add //depot/test-mercurial-import/file_kxtext#1 add //depot/test-mercurial-import/file_ltext#1 add //depot/test-mercurial-import/file_symlink#1 add //depot/test-mercurial-import/file_symlink+k#1 add //depot/test-mercurial-import/file_tempobj#1 add //depot/test-mercurial-import/file_text#1 add //depot/test-mercurial-import/file_text+c#1 add //depot/test-mercurial-import/file_text+d#1 add //depot/test-mercurial-import/file_text+f#1 add //depot/test-mercurial-import/file_text+k#1 add //depot/test-mercurial-import/file_text+ko#1 add //depot/test-mercurial-import/file_text+kx#1 add //depot/test-mercurial-import/file_text+l#1 add //depot/test-mercurial-import/file_text+m#1 add //depot/test-mercurial-import/file_text+s#1 add //depot/test-mercurial-import/file_text+s2#1 add //depot/test-mercurial-import/file_text+w#1 add //depot/test-mercurial-import/file_text+x#1 add //depot/test-mercurial-import/file_ubinary#1 add //depot/test-mercurial-import/file_uxbinary#1 add //depot/test-mercurial-import/file_xbinary#1 add //depot/test-mercurial-import/file_xltext#1 add //depot/test-mercurial-import/file_xtempobj#1 add //depot/test-mercurial-import/file_xtext#1 add //depot/test-mercurial-import/target_symlink#1 add //depot/test-mercurial-import/target_symlink+k#1 Change 1 submitted. //depot/test-mercurial-import/file_binary+k#1 - refreshing //depot/test-mercurial-import/file_binary+kx#1 - refreshing //depot/test-mercurial-import/file_ktext#1 - refreshing //depot/test-mercurial-import/file_kxtext#1 - refreshing //depot/test-mercurial-import/file_symlink+k#1 - refreshing //depot/test-mercurial-import/file_text+k#1 - refreshing //depot/test-mercurial-import/file_text+ko#1 - refreshing //depot/test-mercurial-import/file_text+kx#1 - refreshing test keyword expansion $ p4 edit file_* target_* //depot/test-mercurial-import/file_binary#1 - opened for edit //depot/test-mercurial-import/file_binary+k#1 - opened for edit //depot/test-mercurial-import/file_binary+kx#1 - opened for edit //depot/test-mercurial-import/file_binary+x#1 - opened for edit //depot/test-mercurial-import/file_ctext#1 - opened for edit //depot/test-mercurial-import/file_cxtext#1 - opened for edit //depot/test-mercurial-import/file_ktext#1 - opened for edit //depot/test-mercurial-import/file_kxtext#1 - opened for edit //depot/test-mercurial-import/file_ltext#1 - opened for edit //depot/test-mercurial-import/file_symlink#1 - opened for edit //depot/test-mercurial-import/file_symlink+k#1 - opened for edit //depot/test-mercurial-import/file_tempobj#1 - opened for edit //depot/test-mercurial-import/file_text#1 - opened for edit //depot/test-mercurial-import/file_text+c#1 - opened for edit //depot/test-mercurial-import/file_text+d#1 - opened for edit //depot/test-mercurial-import/file_text+f#1 - opened for edit //depot/test-mercurial-import/file_text+k#1 - opened for edit //depot/test-mercurial-import/file_text+ko#1 - opened for edit //depot/test-mercurial-import/file_text+kx#1 - opened for edit //depot/test-mercurial-import/file_text+l#1 - opened for edit //depot/test-mercurial-import/file_text+m#1 - opened for edit //depot/test-mercurial-import/file_text+s#1 - opened for edit //depot/test-mercurial-import/file_text+s2#1 - opened for edit //depot/test-mercurial-import/file_text+w#1 - opened for edit //depot/test-mercurial-import/file_text+x#1 - opened for edit //depot/test-mercurial-import/file_ubinary#1 - opened for edit //depot/test-mercurial-import/file_uxbinary#1 - opened for edit //depot/test-mercurial-import/file_xbinary#1 - opened for edit //depot/test-mercurial-import/file_xltext#1 - opened for edit //depot/test-mercurial-import/file_xtempobj#1 - opened for edit //depot/test-mercurial-import/file_xtext#1 - opened for edit //depot/test-mercurial-import/target_symlink#1 - opened for edit //depot/test-mercurial-import/target_symlink+k#1 - opened for edit $ for T in $TYPES ; do > T2=`echo $T | tr [:upper:] [:lower:]` > echo '$Id$' >>file_$T2 > echo '$Header$' >>file_$T2 > echo '$Date$' >>file_$T2 > echo '$DateTime$' >>file_$T2 > echo '$Change$' >>file_$T2 > echo '$File$' >>file_$T2 > echo '$Revision$' >>file_$T2 > echo '$Header$$Header$Header$' >>file_$T2 > done $ ln -s 'target_$Header$' crazy_symlink+k $ p4 add -t symlink+k crazy_symlink+k //depot/test-mercurial-import/crazy_symlink+k#1 - opened for add $ p4 submit -d keywords Submitting change 2. Locking 34 files ... add //depot/test-mercurial-import/crazy_symlink+k#1 edit //depot/test-mercurial-import/file_binary#2 edit //depot/test-mercurial-import/file_binary+k#2 edit //depot/test-mercurial-import/file_binary+kx#2 edit //depot/test-mercurial-import/file_binary+x#2 edit //depot/test-mercurial-import/file_ctext#2 edit //depot/test-mercurial-import/file_cxtext#2 edit //depot/test-mercurial-import/file_ktext#2 edit //depot/test-mercurial-import/file_kxtext#2 edit //depot/test-mercurial-import/file_ltext#2 edit //depot/test-mercurial-import/file_symlink#2 edit //depot/test-mercurial-import/file_symlink+k#2 edit //depot/test-mercurial-import/file_tempobj#2 edit //depot/test-mercurial-import/file_text#2 edit //depot/test-mercurial-import/file_text+c#2 edit //depot/test-mercurial-import/file_text+d#2 edit //depot/test-mercurial-import/file_text+f#2 edit //depot/test-mercurial-import/file_text+k#2 edit //depot/test-mercurial-import/file_text+ko#2 edit //depot/test-mercurial-import/file_text+kx#2 edit //depot/test-mercurial-import/file_text+l#2 edit //depot/test-mercurial-import/file_text+m#2 edit //depot/test-mercurial-import/file_text+s#2 edit //depot/test-mercurial-import/file_text+s2#2 edit //depot/test-mercurial-import/file_text+w#2 edit //depot/test-mercurial-import/file_text+x#2 edit //depot/test-mercurial-import/file_ubinary#2 edit //depot/test-mercurial-import/file_uxbinary#2 edit //depot/test-mercurial-import/file_xbinary#2 edit //depot/test-mercurial-import/file_xltext#2 edit //depot/test-mercurial-import/file_xtempobj#2 edit //depot/test-mercurial-import/file_xtext#2 edit //depot/test-mercurial-import/target_symlink#2 edit //depot/test-mercurial-import/target_symlink+k#2 Change 2 submitted. //depot/test-mercurial-import/crazy_symlink+k#1 - refreshing //depot/test-mercurial-import/file_binary+k#2 - refreshing //depot/test-mercurial-import/file_binary+kx#2 - refreshing //depot/test-mercurial-import/file_ktext#2 - refreshing //depot/test-mercurial-import/file_kxtext#2 - refreshing //depot/test-mercurial-import/file_symlink+k#2 - refreshing //depot/test-mercurial-import/file_text+k#2 - refreshing //depot/test-mercurial-import/file_text+ko#2 - refreshing //depot/test-mercurial-import/file_text+kx#2 - refreshing check keywords in p4 $ grep -H Header file_* file_binary:$Header$ file_binary:$Header$$Header$Header$ file_binary+k:$Header: //depot/test-mercurial-import/file_binary+k#2 $ file_binary+k:$Header: //depot/test-mercurial-import/file_binary+k#2 $$Header: //depot/test-mercurial-import/file_binary+k#2 $Header$ file_binary+kx:$Header: //depot/test-mercurial-import/file_binary+kx#2 $ file_binary+kx:$Header: //depot/test-mercurial-import/file_binary+kx#2 $$Header: //depot/test-mercurial-import/file_binary+kx#2 $Header$ file_binary+x:$Header$ file_binary+x:$Header$$Header$Header$ file_ctext:$Header$ file_ctext:$Header$$Header$Header$ file_cxtext:$Header$ file_cxtext:$Header$$Header$Header$ file_ktext:$Header: //depot/test-mercurial-import/file_ktext#2 $ file_ktext:$Header: //depot/test-mercurial-import/file_ktext#2 $$Header: //depot/test-mercurial-import/file_ktext#2 $Header$ file_kxtext:$Header: //depot/test-mercurial-import/file_kxtext#2 $ file_kxtext:$Header: //depot/test-mercurial-import/file_kxtext#2 $$Header: //depot/test-mercurial-import/file_kxtext#2 $Header$ file_ltext:$Header$ file_ltext:$Header$$Header$Header$ file_symlink:$Header$ file_symlink:$Header$$Header$Header$ file_symlink+k:$Header$ file_symlink+k:$Header$$Header$Header$ file_tempobj:$Header$ file_tempobj:$Header$$Header$Header$ file_text:$Header$ file_text:$Header$$Header$Header$ file_text+c:$Header$ file_text+c:$Header$$Header$Header$ file_text+d:$Header$ file_text+d:$Header$$Header$Header$ file_text+f:$Header$ file_text+f:$Header$$Header$Header$ file_text+k:$Header: //depot/test-mercurial-import/file_text+k#2 $ file_text+k:$Header: //depot/test-mercurial-import/file_text+k#2 $$Header: //depot/test-mercurial-import/file_text+k#2 $Header$ file_text+ko:$Header: //depot/test-mercurial-import/file_text+ko#2 $ file_text+ko:$Header: //depot/test-mercurial-import/file_text+ko#2 $$Header: //depot/test-mercurial-import/file_text+ko#2 $Header$ file_text+kx:$Header: //depot/test-mercurial-import/file_text+kx#2 $ file_text+kx:$Header: //depot/test-mercurial-import/file_text+kx#2 $$Header: //depot/test-mercurial-import/file_text+kx#2 $Header$ file_text+l:$Header$ file_text+l:$Header$$Header$Header$ file_text+m:$Header$ file_text+m:$Header$$Header$Header$ file_text+s:$Header$ file_text+s:$Header$$Header$Header$ file_text+s2:$Header$ file_text+s2:$Header$$Header$Header$ file_text+w:$Header$ file_text+w:$Header$$Header$Header$ file_text+x:$Header$ file_text+x:$Header$$Header$Header$ file_ubinary:$Header$ file_ubinary:$Header$$Header$Header$ file_uxbinary:$Header$ file_uxbinary:$Header$$Header$Header$ file_xbinary:$Header$ file_xbinary:$Header$$Header$Header$ file_xltext:$Header$ file_xltext:$Header$$Header$Header$ file_xtempobj:$Header$ file_xtempobj:$Header$$Header$Header$ file_xtext:$Header$ file_xtext:$Header$$Header$Header$ convert $ hg convert -s p4 $DEPOTPATH dst initializing destination dst repository reading p4 views collecting p4 changelists 1 initial 2 keywords scanning source... sorting... converting... 1 initial 0 keywords $ hg -R dst log --template 'rev={rev} desc="{desc}" tags="{tags}" files="{files}"\n' rev=1 desc="keywords" tags="tip" files="crazy_symlink+k file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtext target_symlink target_symlink+k" rev=0 desc="initial" tags="" files="file_binary file_binary+k file_binary+kx file_binary+x file_ctext file_cxtext file_ktext file_kxtext file_ltext file_symlink file_symlink+k file_text file_text+c file_text+d file_text+f file_text+k file_text+ko file_text+kx file_text+l file_text+m file_text+s2 file_text+w file_text+x file_ubinary file_uxbinary file_xbinary file_xltext file_xtext target_symlink target_symlink+k" revision 0 $ hg -R dst update 0 30 files updated, 0 files merged, 0 files removed, 0 files unresolved $ head dst/file_* | cat -v ==> dst/file_binary <== this is binary ==> dst/file_binary+k <== this is binary+k ==> dst/file_binary+kx <== this is binary+kx ==> dst/file_binary+x <== this is binary+x ==> dst/file_ctext <== this is ctext ==> dst/file_cxtext <== this is cxtext ==> dst/file_ktext <== this is ktext ==> dst/file_kxtext <== this is kxtext ==> dst/file_ltext <== this is ltext ==> dst/file_symlink <== this is target symlink ==> dst/file_symlink+k <== this is target symlink+k ==> dst/file_text <== this is text ==> dst/file_text+c <== this is text+C ==> dst/file_text+d <== this is text+D ==> dst/file_text+f <== this is text+F ==> dst/file_text+k <== this is text+k ==> dst/file_text+ko <== this is text+ko ==> dst/file_text+kx <== this is text+kx ==> dst/file_text+l <== this is text+l ==> dst/file_text+m <== this is text+m ==> dst/file_text+s2 <== this is text+S2 ==> dst/file_text+w <== this is text+w ==> dst/file_text+x <== this is text+x ==> dst/file_ubinary <== this is ubinary ==> dst/file_uxbinary <== this is uxbinary ==> dst/file_xbinary <== this is xbinary ==> dst/file_xltext <== this is xltext ==> dst/file_xtext <== this is xtext revision 1 $ hg -R dst update 1 30 files updated, 0 files merged, 0 files removed, 0 files unresolved $ head dst/file_* | cat -v ==> dst/file_binary <== this is binary$Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_binary+k <== this is binary+k$Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_binary+kx <== this is binary+kx$Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_binary+x <== this is binary+x$Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_ctext <== this is ctext $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_cxtext <== this is cxtext $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_ktext <== this is ktext $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_kxtext <== this is kxtext $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_ltext <== this is ltext $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_symlink <== this is target symlink $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_symlink+k <== this is target symlink+k $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text <== this is text $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text+c <== this is text+C $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text+d <== this is text+D $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text+f <== this is text+F $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text+k <== this is text+k $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text+ko <== this is text+ko $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text+kx <== this is text+kx $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text+l <== this is text+l $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text+m <== this is text+m $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text+s <== this is text+S $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text+s2 <== this is text+S2 $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text+w <== this is text+w $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_text+x <== this is text+x $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_ubinary <== this is ubinary $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_uxbinary <== this is uxbinary $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_xbinary <== this is xbinary $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_xltext <== this is xltext $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ ==> dst/file_xtext <== this is xtext $Id$ $Header$ $Date$ $DateTime$ $Change$ $File$ $Revision$ $Header$$Header$Header$ crazy_symlink $ readlink crazy_symlink+k target_$Header: //depot/test-mercurial-import/crazy_symlink+k#1 $ $ readlink dst/crazy_symlink+k target_$Header$ exit trap: stopping the p4 server mercurial-3.7.3/tests/hgterm.ti0000644000175000017500000000241012676531525016112 0ustar mpmmpm00000000000000hgterm, am, km, mir, msgr, xenl, colors#8, cols#80, it#8, lines#24, pairs#64, acsc=``aaffggiijjkkllmmnnooppqqrrssttuuvvwwxxyyzz{{||}}~~, bel=^G, bold=\E[1m, clear=\E[H\E[2J, cr=\r, csr=\E[%i%p1%d;%p2%dr, cub=\E[%p1%dD, cub1=\b, cud=\E[%p1%dB, cud1=\n, cuf=\E[%p1%dC, cuf1=\E[C, cup=\E[%i%p1%d;%p2%dH, cuu=\E[%p1%dA, cuu1=\E[A, dch=\E[%p1%dP, dch1=\E[P, dl=\E[%p1%dM, dl1=\E[M, ed=\E[J, el=\E[K, enacs=\E)0, home=\E[H, ht=\t, hts=\EH, il=\E[%p1%dL, il1=\E[L, ind=\n, is2=\E[m\E[?7h\E[4l\E>\E7\E[r\E[?1;3;4;6l\E8, kbs=\b, kcub1=\EOD, kcud1=\EOB, kcuf1=\EOC, kcuu1=\EOA, kdch1=\E[3~, kf1=\E[11~, kf10=\E[21~, kf11=\E[23~, kf12=\E[24~, kf13=\E[25~, kf14=\E[26~, kf15=\E[28~, kf16=\E[29~, kf17=\E[31~, kf18=\E[32~, kf19=\E[33~, kf2=\E[12~, kf20=\E[34~, kf3=\E[13~, kf4=\E[14~, kf5=\E[15~, kf6=\E[17~, kf7=\E[18~, kf8=\E[19~, kf9=\E[20~, kfnd=\E[1~, kich1=\E[2~, kmous=\E[M, knp=\E[6~, kpp=\E[5~, kslt=\E[4~, op=\E[m, rc=\E8, rev=\E[7m, ri=\EM, rmacs=^O, rmcup=\E[2J\E[?47l\E8, rmir=\E[4l, rmkx=\E[?1l\E>, rmso=\E[m, rmul=\E[m, rs2=\E[m\E[?7h\E[4l\E>\E7\E[r\E[?1;3;4;6l\E8, sc=\E7, setab=\E[4%p1%dm, setaf=\E[3%p1%dm, sgr0=\E[m, smacs=^N, smcup=\E7\E[?47h, smir=\E[4h, smkx=\E[?1h\E=, smso=\E[7m, smul=\E[4m, tbc=\E[3g, u6=\E[%i%d;%dR, u7=\E[6n, u8=\E[?1;2c, u9=\E[c, mercurial-3.7.3/tests/hgweberror.py0000644000175000017500000000112412676531525017007 0ustar mpmmpm00000000000000# A dummy extension that installs an hgweb command that throws an Exception. from __future__ import absolute_import from mercurial.hgweb import ( webcommands, ) def raiseerror(web, req, tmpl): '''Dummy web command that raises an uncaught Exception.''' # Simulate an error after partial response. if 'partialresponse' in req.form: req.respond(200, 'text/plain') req.write('partial content\n') raise AttributeError('I am an uncaught error!') def extsetup(ui): setattr(webcommands, 'raiseerror', raiseerror) webcommands.__all__.append('raiseerror') mercurial-3.7.3/tests/heredoctest.py0000644000175000017500000000077612676531525017166 0ustar mpmmpm00000000000000from __future__ import absolute_import import sys globalvars = {} lines = sys.stdin.readlines() while lines: l = lines.pop(0) if l.startswith('SALT'): print(l[:-1]) elif l.startswith('>>> '): snippet = l[4:] while lines and lines[0].startswith('... '): l = lines.pop(0) snippet += l[4:] c = compile(snippet, '', 'single') try: exec(c, globalvars) except Exception as inst: print(repr(inst)) mercurial-3.7.3/tests/test-run-tests.t0000644000175000017500000004362612676531525017412 0ustar mpmmpm00000000000000This file tests the behavior of run-tests.py itself. Avoid interference from actual test env: $ unset HGTEST_JOBS $ unset HGTEST_TIMEOUT $ unset HGTEST_PORT $ unset HGTEST_SHELL Smoke test with install ============ $ run-tests.py $HGTEST_RUN_TESTS_PURE -l # Ran 0 tests, 0 skipped, 0 warned, 0 failed. Define a helper to avoid the install step ============= $ rt() > { > run-tests.py --with-hg=`which hg` "$@" > } a succesful test ======================= $ cat > test-success.t << EOF > $ echo babar > babar > $ echo xyzzy > never happens (?) > xyzzy > nor this (?) > EOF $ rt . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. failing test ================== $ cat > test-failure.t << EOF > $ echo babar > rataxes > This is a noop statement so that > this test is still more bytes than success. > EOF >>> fh = open('test-failure-unicode.t', 'wb') >>> fh.write(u' $ echo babar\u03b1\n'.encode('utf-8')) and None >>> fh.write(u' l\u03b5\u03b5t\n'.encode('utf-8')) and None $ rt --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err @@ -1,4 +1,4 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. ERROR: test-failure.t output changed !. --- $TESTTMP/test-failure-unicode.t +++ $TESTTMP/test-failure-unicode.t.err @@ -1,2 +1,2 @@ $ echo babar\xce\xb1 (esc) - l\xce\xb5\xce\xb5t (esc) + babar\xce\xb1 (esc) ERROR: test-failure-unicode.t output changed ! Failed test-failure.t: output changed Failed test-failure-unicode.t: output changed # Ran 3 tests, 0 skipped, 0 warned, 2 failed. python hash seed: * (glob) [1] test --xunit support $ rt --xunit=xunit.xml --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err @@ -1,4 +1,4 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. ERROR: test-failure.t output changed !. --- $TESTTMP/test-failure-unicode.t +++ $TESTTMP/test-failure-unicode.t.err @@ -1,2 +1,2 @@ $ echo babar\xce\xb1 (esc) - l\xce\xb5\xce\xb5t (esc) + babar\xce\xb1 (esc) ERROR: test-failure-unicode.t output changed ! Failed test-failure.t: output changed Failed test-failure-unicode.t: output changed # Ran 3 tests, 0 skipped, 0 warned, 2 failed. python hash seed: * (glob) [1] $ cat xunit.xml (glob) (glob) (glob) $ rm test-failure-unicode.t test for --retest ==================== $ rt --retest --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err @@ -1,4 +1,4 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. ERROR: test-failure.t output changed ! Failed test-failure.t: output changed # Ran 2 tests, 1 skipped, 0 warned, 1 failed. python hash seed: * (glob) [1] Selecting Tests To Run ====================== successful $ rt test-success.t . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. success w/ keyword $ rt -k xyzzy . # Ran 2 tests, 1 skipped, 0 warned, 0 failed. failed $ rt test-failure.t --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err @@ -1,4 +1,4 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. ERROR: test-failure.t output changed ! Failed test-failure.t: output changed # Ran 1 tests, 0 skipped, 0 warned, 1 failed. python hash seed: * (glob) [1] failure w/ keyword $ rt -k rataxes --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err @@ -1,4 +1,4 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. ERROR: test-failure.t output changed ! Failed test-failure.t: output changed # Ran 2 tests, 1 skipped, 0 warned, 1 failed. python hash seed: * (glob) [1] Verify that when a process fails to start we show a useful message ================================================================== $ cat > test-serve-fail.t < $ echo 'abort: child process failed to start blah' > EOF $ rt test-serve-fail.t ERROR: test-serve-fail.t output changed ! Failed test-serve-fail.t: server failed to start (HGPORT=*) (glob) # Ran 1 tests, 0 skipped, 0 warned, 1 failed. python hash seed: * (glob) [1] $ rm test-serve-fail.t Verify that we can try other ports =================================== $ hg init inuse $ hg serve -R inuse -p $HGPORT -d --pid-file=blocks.pid $ cat blocks.pid >> $DAEMON_PIDS $ cat > test-serve-inuse.t < $ hg serve -R `pwd`/inuse -p \$HGPORT -d --pid-file=hg.pid > $ cat hg.pid >> \$DAEMON_PIDS > EOF $ rt test-serve-inuse.t . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. $ rm test-serve-inuse.t Running In Debug Mode ====================== $ rt --debug 2>&1 | grep -v pwd + echo *SALT* 0 0 (glob) *SALT* 0 0 (glob) + echo babar babar + echo *SALT* 4 0 (glob) *SALT* 4 0 (glob) *+ echo *SALT* 0 0 (glob) *SALT* 0 0 (glob) + echo babar babar + echo *SALT* 2 0 (glob) *SALT* 2 0 (glob) + echo xyzzy xyzzy + echo *SALT* 6 0 (glob) *SALT* 6 0 (glob) . # Ran 2 tests, 0 skipped, 0 warned, 0 failed. Parallel runs ============== (duplicate the failing test to get predictable output) $ cp test-failure.t test-failure-copy.t $ rt --jobs 2 test-failure*.t -n !! Failed test-failure*.t: output changed (glob) Failed test-failure*.t: output changed (glob) # Ran 2 tests, 0 skipped, 0 warned, 2 failed. python hash seed: * (glob) [1] failures in parallel with --first should only print one failure >>> f = open('test-nothing.t', 'w') >>> f.write('foo\n' * 1024) and None >>> f.write(' $ sleep 1') and None $ rt --jobs 2 --first --- $TESTTMP/test-failure*.t (glob) +++ $TESTTMP/test-failure*.t.err (glob) @@ -1,4 +1,4 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. Failed test-failure*.t: output changed (glob) Failed test-nothing.t: output changed # Ran 2 tests, 0 skipped, 0 warned, 2 failed. python hash seed: * (glob) [1] (delete the duplicated test file) $ rm test-failure-copy.t test-nothing.t Interactive run =============== (backup the failing test) $ cp test-failure.t backup Refuse the fix $ echo 'n' | rt -i --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err @@ -1,4 +1,4 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. Accept this change? [n] ERROR: test-failure.t output changed !. Failed test-failure.t: output changed # Ran 2 tests, 0 skipped, 0 warned, 1 failed. python hash seed: * (glob) [1] $ cat test-failure.t $ echo babar rataxes This is a noop statement so that this test is still more bytes than success. Interactive with custom view $ echo 'n' | rt -i --view echo $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err (glob) Accept this change? [n]* (glob) ERROR: test-failure.t output changed !. Failed test-failure.t: output changed # Ran 2 tests, 0 skipped, 0 warned, 1 failed. python hash seed: * (glob) [1] View the fix $ echo 'y' | rt --view echo $TESTTMP/test-failure.t $TESTTMP/test-failure.t.err (glob) ERROR: test-failure.t output changed !. Failed test-failure.t: output changed # Ran 2 tests, 0 skipped, 0 warned, 1 failed. python hash seed: * (glob) [1] Accept the fix $ echo " $ echo 'saved backup bundle to \$TESTTMP/foo.hg'" >> test-failure.t $ echo " saved backup bundle to \$TESTTMP/foo.hg" >> test-failure.t $ echo " $ echo 'saved backup bundle to \$TESTTMP/foo.hg'" >> test-failure.t $ echo " saved backup bundle to \$TESTTMP/foo.hg (glob)" >> test-failure.t $ echo " $ echo 'saved backup bundle to \$TESTTMP/foo.hg'" >> test-failure.t $ echo " saved backup bundle to \$TESTTMP/*.hg (glob)" >> test-failure.t $ echo 'y' | rt -i 2>&1 --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err @@ -1,9 +1,9 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. $ echo 'saved backup bundle to $TESTTMP/foo.hg' - saved backup bundle to $TESTTMP/foo.hg + saved backup bundle to $TESTTMP/foo.hg* (glob) $ echo 'saved backup bundle to $TESTTMP/foo.hg' saved backup bundle to $TESTTMP/foo.hg* (glob) $ echo 'saved backup bundle to $TESTTMP/foo.hg' Accept this change? [n] .. # Ran 2 tests, 0 skipped, 0 warned, 0 failed. $ sed -e 's,(glob)$,&<,g' test-failure.t $ echo babar babar This is a noop statement so that this test is still more bytes than success. $ echo 'saved backup bundle to $TESTTMP/foo.hg' saved backup bundle to $TESTTMP/foo.hg (glob)< $ echo 'saved backup bundle to $TESTTMP/foo.hg' saved backup bundle to $TESTTMP/foo.hg (glob)< $ echo 'saved backup bundle to $TESTTMP/foo.hg' saved backup bundle to $TESTTMP/*.hg (glob)< (reinstall) $ mv backup test-failure.t No Diff =============== $ rt --nodiff !. Failed test-failure.t: output changed # Ran 2 tests, 0 skipped, 0 warned, 1 failed. python hash seed: * (glob) [1] test --tmpdir support $ rt --tmpdir=$TESTTMP/keep test-success.t Keeping testtmp dir: $TESTTMP/keep/child1/test-success.t (glob) Keeping threadtmp dir: $TESTTMP/keep/child1 (glob) . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. timeouts ======== $ cat > test-timeout.t < $ sleep 2 > $ echo pass > pass > EOF > echo '#require slow' > test-slow-timeout.t > cat test-timeout.t >> test-slow-timeout.t $ rt --timeout=1 --slowtimeout=3 test-timeout.t test-slow-timeout.t st Skipped test-slow-timeout.t: missing feature: allow slow tests Failed test-timeout.t: timed out # Ran 1 tests, 1 skipped, 0 warned, 1 failed. python hash seed: * (glob) [1] $ rt --timeout=1 --slowtimeout=3 \ > test-timeout.t test-slow-timeout.t --allow-slow-tests .t Failed test-timeout.t: timed out # Ran 2 tests, 0 skipped, 0 warned, 1 failed. python hash seed: * (glob) [1] $ rm test-timeout.t test-slow-timeout.t test for --time ================== $ rt test-success.t --time . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. # Producing time report start end cuser csys real Test \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} test-success.t (re) test for --time with --job enabled ==================================== $ rt test-success.t --time --jobs 2 . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. # Producing time report start end cuser csys real Test \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} \s*[\d\.]{5} test-success.t (re) Skips ================ $ cat > test-skip.t < $ echo xyzzy > #require false > EOF $ rt --nodiff !.s Skipped test-skip.t: missing feature: nail clipper Failed test-failure.t: output changed # Ran 2 tests, 1 skipped, 0 warned, 1 failed. python hash seed: * (glob) [1] $ rt --keyword xyzzy .s Skipped test-skip.t: missing feature: nail clipper # Ran 2 tests, 2 skipped, 0 warned, 0 failed. Skips with xml $ rt --keyword xyzzy \ > --xunit=xunit.xml .s Skipped test-skip.t: missing feature: nail clipper # Ran 2 tests, 2 skipped, 0 warned, 0 failed. $ cat xunit.xml (glob) Missing skips or blacklisted skips don't count as executed: $ echo test-failure.t > blacklist $ rt --blacklist=blacklist --json\ > test-failure.t test-bogus.t ss Skipped test-bogus.t: Doesn't exist Skipped test-failure.t: blacklisted # Ran 0 tests, 2 skipped, 0 warned, 0 failed. $ cat report.json testreport ={ "test-bogus.t": { "result": "skip" }, "test-failure.t": { "result": "skip" } } (no-eol) #if json test for --json ================== $ rt --json --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err @@ -1,4 +1,4 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. ERROR: test-failure.t output changed !.s Skipped test-skip.t: missing feature: nail clipper Failed test-failure.t: output changed # Ran 2 tests, 1 skipped, 0 warned, 1 failed. python hash seed: * (glob) [1] $ cat report.json testreport ={ "test-failure.t": [\{] (re) "csys": "\s*[\d\.]{4,5}", ? (re) "cuser": "\s*[\d\.]{4,5}", ? (re) "diff": "---.+\+\+\+.+", ? (re) "end": "\s*[\d\.]{4,5}", ? (re) "result": "failure", ? (re) "start": "\s*[\d\.]{4,5}", ? (re) "time": "\s*[\d\.]{4,5}" (re) }, ? (re) "test-skip.t": { "csys": "\s*[\d\.]{4,5}", ? (re) "cuser": "\s*[\d\.]{4,5}", ? (re) "diff": "", ? (re) "end": "\s*[\d\.]{4,5}", ? (re) "result": "skip", ? (re) "start": "\s*[\d\.]{4,5}", ? (re) "time": "\s*[\d\.]{4,5}" (re) }, ? (re) "test-success.t": [\{] (re) "csys": "\s*[\d\.]{4,5}", ? (re) "cuser": "\s*[\d\.]{4,5}", ? (re) "diff": "", ? (re) "end": "\s*[\d\.]{4,5}", ? (re) "result": "success", ? (re) "start": "\s*[\d\.]{4,5}", ? (re) "time": "\s*[\d\.]{4,5}" (re) } } (no-eol) Test that failed test accepted through interactive are properly reported: $ cp test-failure.t backup $ echo y | rt --json -i --- $TESTTMP/test-failure.t +++ $TESTTMP/test-failure.t.err @@ -1,4 +1,4 @@ $ echo babar - rataxes + babar This is a noop statement so that this test is still more bytes than success. Accept this change? [n] ..s Skipped test-skip.t: missing feature: nail clipper # Ran 2 tests, 1 skipped, 0 warned, 0 failed. $ cat report.json testreport ={ "test-failure.t": [\{] (re) "csys": "\s*[\d\.]{4,5}", ? (re) "cuser": "\s*[\d\.]{4,5}", ? (re) "diff": "", ? (re) "end": "\s*[\d\.]{4,5}", ? (re) "result": "success", ? (re) "start": "\s*[\d\.]{4,5}", ? (re) "time": "\s*[\d\.]{4,5}" (re) }, ? (re) "test-skip.t": { "csys": "\s*[\d\.]{4,5}", ? (re) "cuser": "\s*[\d\.]{4,5}", ? (re) "diff": "", ? (re) "end": "\s*[\d\.]{4,5}", ? (re) "result": "skip", ? (re) "start": "\s*[\d\.]{4,5}", ? (re) "time": "\s*[\d\.]{4,5}" (re) }, ? (re) "test-success.t": [\{] (re) "csys": "\s*[\d\.]{4,5}", ? (re) "cuser": "\s*[\d\.]{4,5}", ? (re) "diff": "", ? (re) "end": "\s*[\d\.]{4,5}", ? (re) "result": "success", ? (re) "start": "\s*[\d\.]{4,5}", ? (re) "time": "\s*[\d\.]{4,5}" (re) } } (no-eol) $ mv backup test-failure.t #endif backslash on end of line with glob matching is handled properly $ cat > test-glob-backslash.t << EOF > $ echo 'foo bar \\' > foo * \ (glob) > EOF $ rt test-glob-backslash.t . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. $ rm -f test-glob-backslash.t Test reusability for third party tools ====================================== $ mkdir "$TESTTMP"/anothertests $ cd "$TESTTMP"/anothertests test that `run-tests.py` can execute hghave, even if it runs not in Mercurial source tree. $ cat > test-hghave.t < #require true > $ echo foo > foo > EOF $ rt $HGTEST_RUN_TESTS_PURE test-hghave.t . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. test that RUNTESTDIR refers the directory, in which `run-tests.py` now running is placed. $ cat > test-runtestdir.t < - $TESTDIR, in which test-run-tests.t is placed > - \$TESTDIR, in which test-runtestdir.t is placed (expanded at runtime) > - \$RUNTESTDIR, in which run-tests.py is placed (expanded at runtime) > > #if windows > $ test "\$TESTDIR" = "$TESTTMP\anothertests" > #else > $ test "\$TESTDIR" = "$TESTTMP"/anothertests > #endif > $ test "\$RUNTESTDIR" = "$TESTDIR" > $ head -n 3 "\$RUNTESTDIR"/../contrib/check-code.py > #!/usr/bin/env python > # > # check-code - a style and portability checker for Mercurial > EOF $ rt $HGTEST_RUN_TESTS_PURE test-runtestdir.t . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. #if execbit test that TESTDIR is referred in PATH $ cat > custom-command.sh < #!/bin/sh > echo "hello world" > EOF $ chmod +x custom-command.sh $ cat > test-testdir-path.t < $ custom-command.sh > hello world > EOF $ rt $HGTEST_RUN_TESTS_PURE test-testdir-path.t . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. #endif test support for --allow-slow-tests $ cat > test-very-slow-test.t < #require slow > $ echo pass > pass > EOF $ rt $HGTEST_RUN_TESTS_PURE test-very-slow-test.t s Skipped test-very-slow-test.t: missing feature: allow slow tests # Ran 0 tests, 1 skipped, 0 warned, 0 failed. $ rt $HGTEST_RUN_TESTS_PURE --allow-slow-tests test-very-slow-test.t . # Ran 1 tests, 0 skipped, 0 warned, 0 failed. mercurial-3.7.3/tests/test-issue3084.t0000644000175000017500000002360112676531525017104 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "largefiles =" >> $HGRCPATH Create the repository outside $HOME since largefiles write to $HOME/.cache/largefiles. $ hg init test $ cd test $ echo "root" > root $ hg add root $ hg commit -m "Root commit" --config extensions.largefiles=! Ensure that .hg/largefiles isn't created before largefiles are added #if unix-permissions $ chmod 555 .hg #endif $ hg status #if unix-permissions $ chmod 755 .hg #endif $ test -f .hg/largefiles [1] $ echo "large" > foo $ hg add --large foo $ hg commit -m "Add foo as a largefile" $ hg update -r 0 getting changed largefiles 0 largefiles updated, 1 removed 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo "normal" > foo $ hg add foo $ hg commit -m "Add foo as normal file" created new head Normal file in the working copy, keeping the normal version: $ echo "n" | hg merge --config ui.interactive=Yes remote turned local normal file foo into a largefile use (l)argefile or keep (n)ormal file? n 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status $ cat foo normal Normal file in the working copy, keeping the largefile version: $ hg update -q -C $ echo "l" | hg merge --config ui.interactive=Yes remote turned local normal file foo into a largefile use (l)argefile or keep (n)ormal file? l getting changed largefiles 1 largefiles updated, 0 removed 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status M foo $ hg diff --nodates diff -r fa129ab6b5a7 .hglf/foo --- /dev/null +++ b/.hglf/foo @@ -0,0 +1,1 @@ +7f7097b041ccf68cc5561e9600da4655d21c6d18 diff -r fa129ab6b5a7 foo --- a/foo +++ /dev/null @@ -1,1 +0,0 @@ -normal $ cat foo large Largefile in the working copy, keeping the normal version: $ hg update -q -C -r 1 $ echo "n" | hg merge --config ui.interactive=Yes remote turned local largefile foo into a normal file keep (l)argefile or use (n)ormal file? n getting changed largefiles 0 largefiles updated, 0 removed 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status M foo $ hg diff --nodates diff -r ff521236428a .hglf/foo --- a/.hglf/foo +++ /dev/null @@ -1,1 +0,0 @@ -7f7097b041ccf68cc5561e9600da4655d21c6d18 diff -r ff521236428a foo --- /dev/null +++ b/foo @@ -0,0 +1,1 @@ +normal $ cat foo normal Largefile in the working copy, keeping the largefile version: $ hg update -q -C -r 1 $ echo "l" | hg merge --config ui.interactive=Yes remote turned local largefile foo into a normal file keep (l)argefile or use (n)ormal file? l 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status $ cat foo large Whatever ... commit something so we can invoke merge when updating $ hg commit -m '3: Merge' Updating from largefile to normal - no reason to prompt $ hg up -r 2 getting changed largefiles 0 largefiles updated, 0 removed 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ cat foo normal (the update above used to leave the working dir in a very weird state - clean it $ hg up -qr null $ hg up -qr 2 ) Updating from normal to largefile - no reason to prompt $ hg up -r 3 getting changed largefiles 1 largefiles updated, 0 removed 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ cat foo large $ cd .. Systematic testing of merges involving largefiles: Ancestor: normal Parent: normal-id Parent: large result: large Ancestor: normal Parent: normal2 Parent: large result: ? Ancestor: large Parent: large-id Parent: normal result: normal Ancestor: large Parent: large2 Parent: normal result: ? All cases should try merging both ways. Prepare test repo: $ hg init merges $ cd merges prepare cases with "normal" ancestor: $ hg up -qr null $ echo normal > f $ hg ci -Aqm "normal-ancestor" $ hg tag -l "normal-ancestor" $ touch f2 $ hg ci -Aqm "normal-id" $ hg tag -l "normal-id" $ echo normal2 > f $ hg ci -m "normal2" $ hg tag -l "normal2" $ echo normal > f $ hg ci -Aqm "normal-same" $ hg tag -l "normal-same" $ hg up -qr "normal-ancestor" $ hg rm f $ echo large > f $ hg add --large f $ hg ci -qm "large" $ hg tag -l "large" prepare cases with "large" ancestor: $ hg up -qr null $ echo large > f $ hg add --large f $ hg ci -qm "large-ancestor" $ hg tag -l "large-ancestor" $ touch f2 $ hg ci -Aqm "large-id" $ hg tag -l "large-id" $ echo large2 > f $ hg ci -m "large2" $ hg tag -l "large2" $ echo large > f $ hg ci -Aqm "large-same" $ hg tag -l "large-same" $ hg up -qr "large-ancestor" $ hg rm f $ echo normal > f $ hg ci -qAm "normal" $ hg tag -l "normal" $ hg log -GT '{tags}' @ normal tip | | o large-same | | | o large2 | | | o large-id |/ o large-ancestor o large | | o normal-same | | | o normal2 | | | o normal-id |/ o normal-ancestor Ancestor: normal Parent: normal-id Parent: large result: large $ hg up -Cqr normal-id $ hg merge -r large getting changed largefiles 1 largefiles updated, 0 removed 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f large swap $ hg up -Cqr large $ hg merge -r normal-id 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f large Ancestor: normal Parent: normal-same Parent: large result: large $ hg up -Cqr normal-same $ hg merge -r large getting changed largefiles 1 largefiles updated, 0 removed 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f large swap $ hg up -Cqr large $ hg merge -r normal-same 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f large Ancestor: normal Parent: normal2 Parent: large result: ? (annoying extra prompt ... but it do not do any serious harm) $ hg up -Cqr normal2 $ hg merge -r large remote turned local normal file f into a largefile use (l)argefile or keep (n)ormal file? l getting changed largefiles 1 largefiles updated, 0 removed 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f large $ hg up -Cqr normal2 $ echo n | hg merge -r large --config ui.interactive=Yes remote turned local normal file f into a largefile use (l)argefile or keep (n)ormal file? n 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f normal2 swap $ hg up -Cqr large $ hg merge -r normal2 remote turned local largefile f into a normal file keep (l)argefile or use (n)ormal file? l 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f large $ hg up -Cqr large $ echo n | hg merge -r normal2 --config ui.interactive=Yes remote turned local largefile f into a normal file keep (l)argefile or use (n)ormal file? n getting changed largefiles 0 largefiles updated, 0 removed 2 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f normal2 Ancestor: large Parent: large-id Parent: normal result: normal $ hg up -Cqr large-id $ hg merge -r normal getting changed largefiles 0 largefiles updated, 0 removed 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f normal swap $ hg up -Cqr normal $ hg merge -r large-id 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f normal Ancestor: large Parent: large-same Parent: normal result: normal $ hg up -Cqr large-same $ hg merge -r normal getting changed largefiles 0 largefiles updated, 0 removed 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f normal swap $ hg up -Cqr normal $ hg merge -r large-same 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f normal Ancestor: large Parent: large2 Parent: normal result: ? (annoying extra prompt ... but it do not do any serious harm) $ hg up -Cqr large2 $ hg merge -r normal remote turned local largefile f into a normal file keep (l)argefile or use (n)ormal file? l 0 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f large2 $ hg up -Cqr large2 $ echo n | hg merge -r normal --config ui.interactive=Yes remote turned local largefile f into a normal file keep (l)argefile or use (n)ormal file? n getting changed largefiles 0 largefiles updated, 0 removed 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f normal swap $ hg up -Cqr normal $ hg merge -r large2 remote turned local normal file f into a largefile use (l)argefile or keep (n)ormal file? l getting changed largefiles 1 largefiles updated, 0 removed 2 files updated, 0 files merged, 1 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f large2 $ hg up -Cqr normal $ echo n | hg merge -r large2 --config ui.interactive=Yes remote turned local normal file f into a largefile use (l)argefile or keep (n)ormal file? n 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f normal $ cd .. mercurial-3.7.3/tests/test-pull-branch.t0000644000175000017500000001216112676531525017643 0ustar mpmmpm00000000000000 $ hg init t $ cd t $ echo 1 > foo $ hg ci -Am1 # 0 adding foo $ hg branch branchA marked working directory as branch branchA (branches are permanent and global, did you want a bookmark?) $ echo a1 > foo $ hg ci -ma1 # 1 $ cd .. $ hg init tt $ cd tt $ hg pull ../t pulling from ../t requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files (run 'hg update' to get a working copy) $ hg up branchA 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../t $ echo a2 > foo $ hg ci -ma2 # 2 Create branch B: $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch branchB marked working directory as branch branchB $ echo b1 > foo $ hg ci -mb1 # 3 $ cd ../tt A new branch is there $ hg pull -u ../t pulling from ../t searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files (+1 heads) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Develop both branches: $ cd ../t $ hg up branchA 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a3 > foo $ hg ci -ma3 # 4 $ hg up branchB 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b2 > foo $ hg ci -mb2 # 5 $ cd ../tt Should succeed, no new heads: $ hg pull -u ../t pulling from ../t searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Add a head on other branch: $ cd ../t $ hg up branchA 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a4 > foo $ hg ci -ma4 # 6 $ hg up branchB 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b3.1 > foo $ hg ci -m b3.1 # 7 $ hg up 5 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b3.2 > foo $ hg ci -m b3.2 # 8 created new head $ cd ../tt Should succeed because there is only one head on our branch: $ hg pull -u ../t pulling from ../t searching for changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files (+1 heads) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../t $ hg up -C branchA 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a5.1 > foo $ hg ci -ma5.1 # 9 $ hg up 6 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a5.2 > foo $ hg ci -ma5.2 # 10 created new head $ hg up 7 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b4.1 > foo $ hg ci -m b4.1 # 11 $ hg up -C 8 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b4.2 > foo $ hg ci -m b4.2 # 12 $ cd ../tt $ hg pull -u ../t pulling from ../t searching for changes adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 1 files (+1 heads) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Make changes on new branch on tt $ hg up 6 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch branchC marked working directory as branch branchC $ echo b1 > bar $ hg ci -Am "commit on branchC on tt" adding bar Make changes on default branch on t $ cd ../t $ hg up -C default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a1 > bar $ hg ci -Am "commit on default on t" adding bar Pull branchC from tt $ hg pull ../tt pulling from ../tt searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads) Make changes on default and branchC on tt $ cd ../tt $ hg pull ../t pulling from ../t searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads) $ hg up -C default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a1 > bar1 $ hg ci -Am "commit on default on tt" adding bar1 $ hg up branchC 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo a1 > bar2 $ hg ci -Am "commit on branchC on tt" adding bar2 Make changes on default and branchC on t $ cd ../t $ hg up default 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo a1 > bar3 $ hg ci -Am "commit on default on t" adding bar3 $ hg up branchC 2 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo a1 > bar4 $ hg ci -Am "commit on branchC on tt" adding bar4 Pull from tt $ hg pull ../tt pulling from ../tt searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+2 heads) (run 'hg heads .' to see heads, 'hg merge' to merge) $ cd .. mercurial-3.7.3/tests/test-merge-tools.t0000644000175000017500000006241412676531525017677 0ustar mpmmpm00000000000000test merge-tools configuration - mostly exercising filemerge.py $ unset HGMERGE # make sure HGMERGE doesn't interfere with the test $ hg init revision 0 $ echo "revision 0" > f $ echo "space" >> f $ hg commit -Am "revision 0" adding f revision 1 $ echo "revision 1" > f $ echo "space" >> f $ hg commit -Am "revision 1" $ hg update 0 > /dev/null revision 2 $ echo "revision 2" > f $ echo "space" >> f $ hg commit -Am "revision 2" created new head $ hg update 0 > /dev/null revision 3 - simple to merge $ echo "revision 3" >> f $ hg commit -Am "revision 3" created new head revision 4 - hard to merge $ hg update 0 > /dev/null $ echo "revision 4" > f $ hg commit -Am "revision 4" created new head $ echo "[merge-tools]" > .hg/hgrc $ beforemerge() { > cat .hg/hgrc > echo "# hg update -C 1" > hg update -C 1 > /dev/null > } $ aftermerge() { > echo "# cat f" > cat f > echo "# hg stat" > hg stat > echo "# hg resolve --list" > hg resolve --list > rm -f f.orig > } Tool selection default is internal merge: $ beforemerge [merge-tools] # hg update -C 1 hg merge -r 2 override $PATH to ensure hgmerge not visible; use $PYTHON in case we're running from a devel copy, not a temp installation $ PATH="$BINDIR:/usr/sbin" $PYTHON "$BINDIR"/hg merge -r 2 merging f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f <<<<<<< local: ef83787e2614 - test: revision 1 revision 1 ======= revision 2 >>>>>>> other: 0185f4e0cf02 - test: revision 2 space # hg stat M f ? f.orig # hg resolve --list U f simplest hgrc using false for merge: $ echo "false.whatever=" >> .hg/hgrc $ beforemerge [merge-tools] false.whatever= # hg update -C 1 $ hg merge -r 2 merging f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f #if unix-permissions unexecutable file in $PATH shouldn't be found: $ echo "echo fail" > false $ hg up -qC 1 $ PATH="`pwd`:$BINDIR:/usr/sbin" $PYTHON "$BINDIR"/hg merge -r 2 merging f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ rm false #endif executable directory in $PATH shouldn't be found: $ mkdir false $ hg up -qC 1 $ PATH="`pwd`:$BINDIR:/usr/sbin" $PYTHON "$BINDIR"/hg merge -r 2 merging f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ rmdir false true with higher .priority gets precedence: $ echo "true.priority=1" >> .hg/hgrc $ beforemerge [merge-tools] false.whatever= true.priority=1 # hg update -C 1 $ hg merge -r 2 merging f 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list R f unless lowered on command line: $ beforemerge [merge-tools] false.whatever= true.priority=1 # hg update -C 1 $ hg merge -r 2 --config merge-tools.true.priority=-7 merging f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f or false set higher on command line: $ beforemerge [merge-tools] false.whatever= true.priority=1 # hg update -C 1 $ hg merge -r 2 --config merge-tools.false.priority=117 merging f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f or true set to disabled: $ beforemerge [merge-tools] false.whatever= true.priority=1 # hg update -C 1 $ hg merge -r 2 --config merge-tools.true.disabled=yes merging f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f or true.executable not found in PATH: $ beforemerge [merge-tools] false.whatever= true.priority=1 # hg update -C 1 $ hg merge -r 2 --config merge-tools.true.executable=nonexistentmergetool merging f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f or true.executable with bogus path: $ beforemerge [merge-tools] false.whatever= true.priority=1 # hg update -C 1 $ hg merge -r 2 --config merge-tools.true.executable=/nonexistent/mergetool merging f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f but true.executable set to cat found in PATH works: $ echo "true.executable=cat" >> .hg/hgrc $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 merging f revision 1 space revision 0 space revision 2 space 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list R f and true.executable set to cat with path works: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config merge-tools.true.executable=cat merging f revision 1 space revision 0 space revision 2 space 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list R f #if unix-permissions environment variables in true.executable are handled: $ echo 'echo "custom merge tool"' > .hg/merge.sh $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg --config merge-tools.true.executable='sh' \ > --config merge-tools.true.args=.hg/merge.sh \ > merge -r 2 merging f custom merge tool 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list R f #endif Tool selection and merge-patterns merge-patterns specifies new tool false: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config merge-patterns.f=false merging f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f merge-patterns specifies executable not found in PATH and gets warning: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=nonexistentmergetool couldn't find merge tool true specified for f merging f couldn't find merge tool true specified for f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f merge-patterns specifies executable with bogus path and gets warning: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config merge-patterns.f=true --config merge-tools.true.executable=/nonexistent/mergetool couldn't find merge tool true specified for f merging f couldn't find merge tool true specified for f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f ui.merge overrules priority ui.merge specifies false: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config ui.merge=false merging f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f ui.merge specifies internal:fail: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config ui.merge=internal:fail 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list U f ui.merge specifies :local (without internal prefix): $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config ui.merge=:local 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list R f ui.merge specifies internal:other: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config ui.merge=internal:other 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f revision 2 space # hg stat M f # hg resolve --list R f ui.merge specifies internal:prompt: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config ui.merge=internal:prompt no tool found to merge f keep (l)ocal, take (o)ther, or leave (u)nresolved? u 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list U f ui.merge specifies :prompt, with 'leave unresolved' chosen $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config ui.merge=:prompt --config ui.interactive=True << EOF > u > EOF no tool found to merge f keep (l)ocal, take (o)ther, or leave (u)nresolved? u 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list U f prompt with EOF $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config ui.merge=internal:prompt --config ui.interactive=true no tool found to merge f keep (l)ocal, take (o)ther, or leave (u)nresolved? 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list U f $ hg resolve --all --config ui.merge=internal:prompt --config ui.interactive=true no tool found to merge f keep (l)ocal, take (o)ther, or leave (u)nresolved? [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f $ rm f $ hg resolve --all --config ui.merge=internal:prompt --config ui.interactive=true no tool found to merge f keep (l)ocal, take (o)ther, or leave (u)nresolved? [1] $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list U f $ hg resolve --all --config ui.merge=internal:prompt no tool found to merge f keep (l)ocal, take (o)ther, or leave (u)nresolved? u [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f ui.merge specifies internal:dump: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config ui.merge=internal:dump merging f 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.base ? f.local ? f.orig ? f.other # hg resolve --list U f f.base: $ cat f.base revision 0 space f.local: $ cat f.local revision 1 space f.other: $ cat f.other revision 2 space $ rm f.base f.local f.other ui.merge specifies internal:other but is overruled by pattern for false: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config ui.merge=internal:other --config merge-patterns.f=false merging f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f Premerge ui.merge specifies internal:other but is overruled by --tool=false $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config ui.merge=internal:other --tool=false merging f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f HGMERGE specifies internal:other but is overruled by --tool=false $ HGMERGE=internal:other ; export HGMERGE $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --tool=false merging f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f $ unset HGMERGE # make sure HGMERGE doesn't interfere with remaining tests update is a merge ... (this also tests that files reverted with '--rev REV' are treated as "modified", even if none of mode, size and timestamp of them isn't changed on the filesystem (see also issue4583)) $ cat >> $HGRCPATH < [fakedirstatewritetime] > # emulate invoking dirstate.write() via repo.status() > # at 2000-01-01 00:00 > fakenow = 200001010000 > EOF $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg update -q 0 $ f -s f f: size=17 $ touch -t 200001010000 f $ hg debugrebuildstate $ cat >> $HGRCPATH < [extensions] > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py > EOF $ hg revert -q -r 1 . $ cat >> $HGRCPATH < [extensions] > fakedirstatewritetime = ! > EOF $ f -s f f: size=17 $ touch -t 200001010000 f $ hg status f M f $ hg update -r 2 merging f revision 1 space revision 0 space revision 2 space 0 files updated, 1 files merged, 0 files removed, 0 files unresolved $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list R f update should also have --tool $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg update -q 0 $ f -s f f: size=17 $ touch -t 200001010000 f $ hg debugrebuildstate $ cat >> $HGRCPATH < [extensions] > fakedirstatewritetime = $TESTDIR/fakedirstatewritetime.py > EOF $ hg revert -q -r 1 . $ cat >> $HGRCPATH < [extensions] > fakedirstatewritetime = ! > EOF $ f -s f f: size=17 $ touch -t 200001010000 f $ hg status f M f $ hg update -r 2 --tool false merging f merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f Default is silent simplemerge: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 3 merging f 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f revision 1 space revision 3 # hg stat M f # hg resolve --list R f .premerge=True is same: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 3 --config merge-tools.true.premerge=True merging f 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f revision 1 space revision 3 # hg stat M f # hg resolve --list R f .premerge=False executes merge-tool: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 3 --config merge-tools.true.premerge=False merging f revision 1 space revision 0 space revision 0 space revision 3 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list R f premerge=keep keeps conflict markers in: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 4 --config merge-tools.true.premerge=keep merging f <<<<<<< local: ef83787e2614 - test: revision 1 revision 1 space ======= revision 4 >>>>>>> other: 81448d39c9a0 - test: revision 4 revision 0 space revision 4 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f <<<<<<< local: ef83787e2614 - test: revision 1 revision 1 space ======= revision 4 >>>>>>> other: 81448d39c9a0 - test: revision 4 # hg stat M f # hg resolve --list R f premerge=keep-merge3 keeps conflict markers with base content: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 4 --config merge-tools.true.premerge=keep-merge3 merging f <<<<<<< local: ef83787e2614 - test: revision 1 revision 1 space ||||||| base revision 0 space ======= revision 4 >>>>>>> other: 81448d39c9a0 - test: revision 4 revision 0 space revision 4 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f <<<<<<< local: ef83787e2614 - test: revision 1 revision 1 space ||||||| base revision 0 space ======= revision 4 >>>>>>> other: 81448d39c9a0 - test: revision 4 # hg stat M f # hg resolve --list R f Tool execution set tools.args explicit to include $base $local $other $output: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config merge-tools.true.executable=head --config merge-tools.true.args='$base $local $other $output' \ > | sed 's,==> .* <==,==> ... <==,g' merging f ==> ... <== revision 0 space ==> ... <== revision 1 space ==> ... <== revision 2 space ==> ... <== revision 1 space 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f revision 1 space # hg stat M f # hg resolve --list R f Merge with "echo mergeresult > $local": $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config merge-tools.true.executable=echo --config merge-tools.true.args='mergeresult > $local' merging f 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f mergeresult # hg stat M f # hg resolve --list R f - and $local is the file f: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config merge-tools.true.executable=echo --config merge-tools.true.args='mergeresult > f' merging f 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f mergeresult # hg stat M f # hg resolve --list R f Merge with "echo mergeresult > $output" - the variable is a bit magic: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -r 2 --config merge-tools.true.executable=echo --config merge-tools.true.args='mergeresult > $output' merging f 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ aftermerge # cat f mergeresult # hg stat M f # hg resolve --list R f Merge using tool with a path that must be quoted: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ cat < 'my merge tool' > cat "\$1" "\$2" "\$3" > "\$4" > EOF $ hg --config merge-tools.true.executable='sh' \ > --config merge-tools.true.args='"./my merge tool" $base $local $other $output' \ > merge -r 2 merging f 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ rm -f 'my merge tool' $ aftermerge # cat f revision 0 space revision 1 space revision 2 space # hg stat M f # hg resolve --list R f Issue3581: Merging a filename that needs to be quoted (This test doesn't work on Windows filesystems even on Linux, so check for Unix-like permission) #if unix-permissions $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ echo "revision 5" > '"; exit 1; echo "' $ hg commit -Am "revision 5" adding "; exit 1; echo " warning: filename contains '"', which is reserved on Windows: '"; exit 1; echo "' $ hg update -C 1 > /dev/null $ echo "revision 6" > '"; exit 1; echo "' $ hg commit -Am "revision 6" adding "; exit 1; echo " warning: filename contains '"', which is reserved on Windows: '"; exit 1; echo "' created new head $ hg merge --config merge-tools.true.executable="true" -r 5 merging "; exit 1; echo " 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg update -C 1 > /dev/null #endif Merge post-processing cat is a bad merge-tool and doesn't change: $ beforemerge [merge-tools] false.whatever= true.priority=1 true.executable=cat # hg update -C 1 $ hg merge -y -r 2 --config merge-tools.true.checkchanged=1 merging f revision 1 space revision 0 space revision 2 space output file f appears unchanged was merge successful (yn)? n merging f failed! 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] $ aftermerge # cat f revision 1 space # hg stat M f ? f.orig # hg resolve --list U f #if symlink internal merge cannot handle symlinks and shouldn't try: $ hg update -q -C 1 $ rm f $ ln -s symlink f $ hg commit -qm 'f is symlink' $ hg merge -r 2 --tool internal:merge merging f warning: internal :merge cannot merge symlinks for f warning: conflicts while merging f! (edit, then use 'hg resolve --mark') 0 files updated, 0 files merged, 0 files removed, 1 files unresolved use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon [1] #endif mercurial-3.7.3/tests/test-filecache.py0000644000175000017500000001031512676531525017523 0ustar mpmmpm00000000000000import sys, os, subprocess if subprocess.call(['python', '%s/hghave' % os.environ['TESTDIR'], 'cacheable']): sys.exit(80) from mercurial import util, scmutil, extensions, hg, ui filecache = scmutil.filecache class fakerepo(object): def __init__(self): self._filecache = {} def join(self, p): return p def sjoin(self, p): return p @filecache('x', 'y') def cached(self): print 'creating' return 'string from function' def invalidate(self): for k in self._filecache: try: delattr(self, k) except AttributeError: pass def basic(repo): print "* neither file exists" # calls function repo.cached repo.invalidate() print "* neither file still exists" # uses cache repo.cached # create empty file f = open('x', 'w') f.close() repo.invalidate() print "* empty file x created" # should recreate the object repo.cached f = open('x', 'w') f.write('a') f.close() repo.invalidate() print "* file x changed size" # should recreate the object repo.cached repo.invalidate() print "* nothing changed with either file" # stats file again, reuses object repo.cached # atomic replace file, size doesn't change # hopefully st_mtime doesn't change as well so this doesn't use the cache # because of inode change f = scmutil.opener('.')('x', 'w', atomictemp=True) f.write('b') f.close() repo.invalidate() print "* file x changed inode" repo.cached # create empty file y f = open('y', 'w') f.close() repo.invalidate() print "* empty file y created" # should recreate the object repo.cached f = open('y', 'w') f.write('A') f.close() repo.invalidate() print "* file y changed size" # should recreate the object repo.cached f = scmutil.opener('.')('y', 'w', atomictemp=True) f.write('B') f.close() repo.invalidate() print "* file y changed inode" repo.cached f = scmutil.opener('.')('x', 'w', atomictemp=True) f.write('c') f.close() f = scmutil.opener('.')('y', 'w', atomictemp=True) f.write('C') f.close() repo.invalidate() print "* both files changed inode" repo.cached def fakeuncacheable(): def wrapcacheable(orig, *args, **kwargs): return False def wrapinit(orig, *args, **kwargs): pass originit = extensions.wrapfunction(util.cachestat, '__init__', wrapinit) origcacheable = extensions.wrapfunction(util.cachestat, 'cacheable', wrapcacheable) for fn in ['x', 'y']: try: os.remove(fn) except OSError: pass basic(fakerepo()) util.cachestat.cacheable = origcacheable util.cachestat.__init__ = originit def test_filecache_synced(): # test old behavior that caused filecached properties to go out of sync os.system('hg init && echo a >> a && hg ci -qAm.') repo = hg.repository(ui.ui()) # first rollback clears the filecache, but changelog to stays in __dict__ repo.rollback() repo.commit('.') # second rollback comes along and touches the changelog externally # (file is moved) repo.rollback() # but since changelog isn't under the filecache control anymore, we don't # see that it changed, and return the old changelog without reconstructing # it repo.commit('.') def setbeforeget(repo): os.remove('x') os.remove('y') repo.cached = 'string set externally' repo.invalidate() print "* neither file exists" print repo.cached repo.invalidate() f = open('x', 'w') f.write('a') f.close() print "* file x created" print repo.cached repo.cached = 'string 2 set externally' repo.invalidate() print "* string set externally again" print repo.cached repo.invalidate() f = open('y', 'w') f.write('b') f.close() print "* file y created" print repo.cached print 'basic:' print basic(fakerepo()) print print 'fakeuncacheable:' print fakeuncacheable() test_filecache_synced() print print 'setbeforeget:' print setbeforeget(fakerepo()) mercurial-3.7.3/tests/test-paths.t0000644000175000017500000000712712676531525016561 0ustar mpmmpm00000000000000 $ hg init a $ hg clone a b updating to branch default 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd a with no paths: $ hg paths $ hg paths unknown not found! [1] $ hg paths -Tjson [ ] with paths: $ echo '[paths]' >> .hg/hgrc $ echo 'dupe = ../b#tip' >> .hg/hgrc $ echo 'expand = $SOMETHING/bar' >> .hg/hgrc $ hg in dupe comparing with $TESTTMP/b (glob) no changes found [1] $ cd .. $ hg -R a in dupe comparing with $TESTTMP/b (glob) no changes found [1] $ cd a $ hg paths dupe = $TESTTMP/b#tip (glob) expand = $TESTTMP/a/$SOMETHING/bar (glob) $ SOMETHING=foo hg paths dupe = $TESTTMP/b#tip (glob) expand = $TESTTMP/a/foo/bar (glob) #if msys $ SOMETHING=//foo hg paths dupe = $TESTTMP/b#tip (glob) expand = /foo/bar #else $ SOMETHING=/foo hg paths dupe = $TESTTMP/b#tip (glob) expand = /foo/bar #endif $ hg paths -q dupe expand $ hg paths dupe $TESTTMP/b#tip (glob) $ hg paths -q dupe $ hg paths unknown not found! [1] $ hg paths -q unknown [1] formatter output with paths: $ echo 'dupe:pushurl = https://example.com/dupe' >> .hg/hgrc $ hg paths -Tjson [ { "name": "dupe", "pushurl": "https://example.com/dupe", "url": "$TESTTMP/b#tip" }, { "name": "expand", "url": "$TESTTMP/a/$SOMETHING/bar" } ] $ hg paths -Tjson dupe [ { "name": "dupe", "pushurl": "https://example.com/dupe", "url": "$TESTTMP/b#tip" } ] $ hg paths -Tjson -q unknown [ ] [1] password should be masked in plain output, but not in machine-readable output: $ echo 'insecure = http://foo:insecure@example.com/' >> .hg/hgrc $ hg paths insecure http://foo:***@example.com/ $ hg paths -Tjson insecure [ { "name": "insecure", "url": "http://foo:insecure@example.com/" } ] zeroconf wraps ui.configitems(), which shouldn't crash at least: $ hg paths --config extensions.zeroconf= dupe = $TESTTMP/b#tip (glob) dupe:pushurl = https://example.com/dupe expand = $TESTTMP/a/$SOMETHING/bar (glob) insecure = http://foo:***@example.com/ $ cd .. sub-options for an undeclared path are ignored $ hg init suboptions $ cd suboptions $ cat > .hg/hgrc << EOF > [paths] > path0 = https://example.com/path0 > path1:pushurl = https://example.com/path1 > EOF $ hg paths path0 = https://example.com/path0 unknown sub-options aren't displayed $ cat > .hg/hgrc << EOF > [paths] > path0 = https://example.com/path0 > path0:foo = https://example.com/path1 > EOF $ hg paths path0 = https://example.com/path0 :pushurl must be a URL $ cat > .hg/hgrc << EOF > [paths] > default = /path/to/nothing > default:pushurl = /not/a/url > EOF $ hg paths (paths.default:pushurl not a URL; ignoring) default = /path/to/nothing #fragment is not allowed in :pushurl $ cat > .hg/hgrc << EOF > [paths] > default = https://example.com/repo > invalid = https://example.com/repo > invalid:pushurl = https://example.com/repo#branch > EOF $ hg paths ("#fragment" in paths.invalid:pushurl not supported; ignoring) default = https://example.com/repo invalid = https://example.com/repo invalid:pushurl = https://example.com/repo $ cd .. 'file:' disables [paths] entries for clone destination $ cat >> $HGRCPATH < [paths] > gpath1 = http://hg.example.com > EOF $ hg clone a gpath1 abort: cannot create new http repository [255] $ hg clone a file:gpath1 updating to branch default 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd gpath1 $ hg -q id 000000000000 $ cd .. mercurial-3.7.3/tests/test-bheads.t0000644000175000017500000002012412676531525016660 0ustar mpmmpm00000000000000 $ heads() > { > hg heads --template '{rev}: {desc|firstline|strip} ({branches})\n' "$@" > } $ hg init a $ cd a $ echo 'root' >root $ hg add root $ hg commit -m "Adding root node" $ heads 0: Adding root node () ------- $ heads . 0: Adding root node () ======= $ echo 'a' >a $ hg add a $ hg branch a marked working directory as branch a (branches are permanent and global, did you want a bookmark?) $ hg commit -m "Adding a branch" $ heads 1: Adding a branch (a) 0: Adding root node () ------- $ heads . 1: Adding a branch (a) ======= $ hg update -C 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo 'b' >b $ hg add b $ hg branch b marked working directory as branch b $ hg commit -m "Adding b branch" $ heads 2: Adding b branch (b) 1: Adding a branch (a) 0: Adding root node () ------- $ heads . 2: Adding b branch (b) ======= $ echo 'bh1' >bh1 $ hg add bh1 $ hg commit -m "Adding b branch head 1" $ heads 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () ------- $ heads . 3: Adding b branch head 1 (b) ======= $ hg update -C 2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo 'bh2' >bh2 $ hg add bh2 $ hg commit -m "Adding b branch head 2" created new head $ heads 4: Adding b branch head 2 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () $ heads . 4: Adding b branch head 2 (b) 3: Adding b branch head 1 (b) ======= $ hg update -C 2 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo 'bh3' >bh3 $ hg add bh3 $ hg commit -m "Adding b branch head 3" created new head $ heads 5: Adding b branch head 3 (b) 4: Adding b branch head 2 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () ------- $ heads . 5: Adding b branch head 3 (b) 4: Adding b branch head 2 (b) 3: Adding b branch head 1 (b) ======= $ hg merge 4 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -m "Merging b branch head 2 and b branch head 3" $ heads 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () ------- $ heads . 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) ======= $ echo 'c' >c $ hg add c $ hg branch c marked working directory as branch c $ hg commit -m "Adding c branch" $ heads 7: Adding c branch (c) 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () ------- $ heads . 7: Adding c branch (c) ======= $ heads -r 3 . no open branch heads found on branches c (started at 3) [1] $ heads -r 2 . 7: Adding c branch (c) ------- $ hg update -C 4 0 files updated, 0 files merged, 2 files removed, 0 files unresolved ------- $ heads -r 3 . 3: Adding b branch head 1 (b) ------- $ heads -r 2 . 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) ------- $ heads -r 7 . no open branch heads found on branches b (started at 7) [1] ======= $ for i in 0 1 2 3 4 5 6 7; do > hg update -C "$i" > heads > echo '-------' > heads . > echo '-------' > done 0 files updated, 0 files merged, 2 files removed, 0 files unresolved 7: Adding c branch (c) 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () ------- 0: Adding root node () ------- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 7: Adding c branch (c) 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () ------- 1: Adding a branch (a) ------- 1 files updated, 0 files merged, 1 files removed, 0 files unresolved 7: Adding c branch (c) 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () ------- 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) ------- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 7: Adding c branch (c) 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () ------- 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) ------- 1 files updated, 0 files merged, 1 files removed, 0 files unresolved 7: Adding c branch (c) 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () ------- 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) ------- 1 files updated, 0 files merged, 1 files removed, 0 files unresolved 7: Adding c branch (c) 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () ------- 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) ------- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 7: Adding c branch (c) 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () ------- 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) ------- 1 files updated, 0 files merged, 0 files removed, 0 files unresolved 7: Adding c branch (c) 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () ------- 7: Adding c branch (c) ------- ======= $ for i in a b c z; do > heads "$i" > echo '-------' > done 1: Adding a branch (a) ------- 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) ------- 7: Adding c branch (c) ------- abort: unknown revision 'z'! ------- ======= $ heads 0 1 2 3 4 5 6 7 7: Adding c branch (c) 6: Merging b branch head 2 and b branch head 3 (b) 3: Adding b branch head 1 (b) 1: Adding a branch (a) 0: Adding root node () Topological heads: $ heads -t 7: Adding c branch (c) 3: Adding b branch head 1 (b) 1: Adding a branch (a) $ cd .. ______________ "created new head" message tests $ hg init newheadmsg $ cd newheadmsg Init: no msg $ echo 1 > a $ hg ci -Am "a0: Initial root" adding a $ echo 2 >> a $ hg ci -m "a1 (HN)" $ hg branch b marked working directory as branch b (branches are permanent and global, did you want a bookmark?) $ echo 1 > b $ hg ci -Am "b2: Initial root for branch b" adding b $ echo 2 >> b $ hg ci -m "b3 (HN)" Case NN: msg $ hg up -q null $ hg branch -f b marked working directory as branch b $ echo 1 > bb $ hg ci -Am "b4 (NN): new topo root for branch b" adding bb created new head Case HN: no msg $ echo 2 >> bb $ hg ci -m "b5 (HN)" Case BN: msg $ hg branch -f default marked working directory as branch default $ echo 1 > aa $ hg ci -Am "a6 (BN): new branch root" adding aa created new head Case CN: msg $ hg up -q 4 $ echo 3 >> bbb $ hg ci -Am "b7 (CN): regular new head" adding bbb created new head Case BB: msg $ hg up -q 4 $ hg merge -q 3 $ hg branch -f default marked working directory as branch default $ hg ci -m "a8 (BB): weird new branch root" created new head Case CB: msg $ hg up -q 4 $ hg merge -q 1 $ hg ci -m "b9 (CB): new head from branch merge" created new head Case HB: no msg $ hg up -q 7 $ hg merge -q 6 $ hg ci -m "b10 (HB): continuing head from branch merge" Case CC: msg $ hg up -q 4 $ hg merge -q 2 $ hg ci -m "b11 (CC): new head from merge" created new head Case CH: no msg $ hg up -q 2 $ hg merge -q 10 $ hg ci -m "b12 (CH): continuing head from merge" Case HH: no msg $ hg merge -q 3 $ hg ci -m "b12 (HH): merging two heads" $ cd .. mercurial-3.7.3/tests/test-issue1877.t0000644000175000017500000000214412676531525017113 0ustar mpmmpm00000000000000https://bz.mercurial-scm.org/1877 $ hg init a $ cd a $ echo a > a $ hg add a $ hg ci -m 'a' $ echo b > a $ hg ci -m'b' $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg book main $ hg book * main 0:cb9a9f314b8b $ echo c > c $ hg add c $ hg ci -m'c' created new head $ hg book * main 2:d36c0562f908 $ hg heads changeset: 2:d36c0562f908 bookmark: main tag: tip parent: 0:cb9a9f314b8b user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: c changeset: 1:1e6c11564562 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: b $ hg up 1e6c11564562 1 files updated, 0 files merged, 1 files removed, 0 files unresolved (leaving bookmark main) $ hg merge main 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg book main 2:d36c0562f908 $ hg ci -m'merge' $ hg book main 2:d36c0562f908 $ cd .. mercurial-3.7.3/tests/test-convert-svn-tags.t0000644000175000017500000000205512676531525020655 0ustar mpmmpm00000000000000#require svn svn-bindings $ cat >> $HGRCPATH < [extensions] > convert = > EOF $ svnadmin create svn-repo $ svnadmin load -q svn-repo < "$TESTDIR/svn/tags.svndump" Convert $ hg convert --datesort svn-repo A-hg initializing destination A-hg repository scanning source... sorting... converting... 5 init projA 4 adda 3 changea 2 changea2 1 changea3 0 changea updating tags $ cd A-hg $ hg log -G --template '{rev} {desc|firstline} tags: {tags}\n' o 6 update tags tags: tip | o 5 changea tags: trunk.goodtag | o 4 changea3 tags: | o 3 changea2 tags: trunk.v1 | o 2 changea tags: | o 1 adda tags: | o 0 init projA tags: $ hg tags -q tip trunk.goodtag trunk.v1 $ cd .. Convert without tags $ hg convert --datesort --config convert.svn.tags= svn-repo A-notags-hg initializing destination A-notags-hg repository scanning source... sorting... converting... 5 init projA 4 adda 3 changea 2 changea2 1 changea3 0 changea $ hg -R A-notags-hg tags -q tip mercurial-3.7.3/tests/test-import-bypass.t0000644000175000017500000002200512676531525020243 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "purge=" >> $HGRCPATH $ shortlog() { > hg log -G --template '{rev}:{node|short} {author} {date|hgdate} - {branch} - {desc|firstline}\n' > } Test --bypass with other options $ hg init repo-options $ cd repo-options $ echo a > a $ hg ci -Am adda adding a $ echo a >> a $ hg branch foo marked working directory as branch foo (branches are permanent and global, did you want a bookmark?) $ hg ci -Am changea $ hg export . > ../test.diff $ hg up null 0 files updated, 0 files merged, 1 files removed, 0 files unresolved Test importing an existing revision (this also tests that "hg import" disallows combination of '--exact' and '--edit') $ hg import --bypass --exact --edit ../test.diff abort: cannot use --exact with --edit [255] $ hg import --bypass --exact ../test.diff applying ../test.diff $ shortlog o 1:4e322f7ce8e3 test 0 0 - foo - changea | o 0:07f494440405 test 0 0 - default - adda Test failure without --exact $ hg import --bypass ../test.diff applying ../test.diff unable to find 'a' for patching abort: patch failed to apply [255] $ hg st $ shortlog o 1:4e322f7ce8e3 test 0 0 - foo - changea | o 0:07f494440405 test 0 0 - default - adda Test --user, --date and --message $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg import --bypass --u test2 -d '1 0' -m patch2 ../test.diff applying ../test.diff $ cat .hg/last-message.txt patch2 (no-eol) $ shortlog o 2:2e127d1da504 test2 1 0 - default - patch2 | | o 1:4e322f7ce8e3 test 0 0 - foo - changea |/ @ 0:07f494440405 test 0 0 - default - adda $ hg rollback repository tip rolled back to revision 1 (undo import) Test --import-branch (this also tests that editor is not invoked for '--bypass', if the patch contains the commit message, regardless of '--edit') $ HGEDITOR=cat hg import --bypass --import-branch --edit ../test.diff applying ../test.diff $ shortlog o 1:4e322f7ce8e3 test 0 0 - foo - changea | @ 0:07f494440405 test 0 0 - default - adda $ hg rollback repository tip rolled back to revision 1 (undo import) Test --strip $ hg import --bypass --strip 0 - < # HG changeset patch > # User test > # Date 0 0 > # Branch foo > # Node ID 4e322f7ce8e3e4203950eac9ece27bf7e45ffa6c > # Parent 07f4944404050f47db2e5c5071e0e84e7a27bba9 > changea > > diff -r 07f494440405 -r 4e322f7ce8e3 a > --- a Thu Jan 01 00:00:00 1970 +0000 > +++ a Thu Jan 01 00:00:00 1970 +0000 > @@ -1,1 +1,2 @@ > a > +a > EOF applying patch from stdin $ hg rollback repository tip rolled back to revision 1 (undo import) Test --strip with --bypass $ mkdir -p dir/dir2 $ echo bb > dir/dir2/b $ echo cc > dir/dir2/c $ echo d > dir/d $ hg ci -Am 'addabcd' adding dir/d adding dir/dir2/b adding dir/dir2/c $ shortlog @ 2:d805bc8236b6 test 0 0 - default - addabcd | | o 1:4e322f7ce8e3 test 0 0 - foo - changea |/ o 0:07f494440405 test 0 0 - default - adda $ hg import --bypass --strip 2 --prefix dir/ - < # HG changeset patch > # User test > # Date 0 0 > # Branch foo > changeabcd > > diff --git a/foo/a b/foo/a > new file mode 100644 > --- /dev/null > +++ b/foo/a > @@ -0,0 +1 @@ > +a > diff --git a/foo/dir2/b b/foo/dir2/b2 > rename from foo/dir2/b > rename to foo/dir2/b2 > diff --git a/foo/dir2/c b/foo/dir2/c > --- a/foo/dir2/c > +++ b/foo/dir2/c > @@ -0,0 +1 @@ > +cc > diff --git a/foo/d b/foo/d > deleted file mode 100644 > --- a/foo/d > +++ /dev/null > @@ -1,1 +0,0 @@ > -d > EOF applying patch from stdin $ shortlog o 3:5bd46886ca3e test 0 0 - default - changeabcd | @ 2:d805bc8236b6 test 0 0 - default - addabcd | | o 1:4e322f7ce8e3 test 0 0 - foo - changea |/ o 0:07f494440405 test 0 0 - default - adda $ hg diff --change 3 --git diff --git a/dir/a b/dir/a new file mode 100644 --- /dev/null +++ b/dir/a @@ -0,0 +1,1 @@ +a diff --git a/dir/d b/dir/d deleted file mode 100644 --- a/dir/d +++ /dev/null @@ -1,1 +0,0 @@ -d diff --git a/dir/dir2/b b/dir/dir2/b2 rename from dir/dir2/b rename to dir/dir2/b2 diff --git a/dir/dir2/c b/dir/dir2/c --- a/dir/dir2/c +++ b/dir/dir2/c @@ -1,1 +1,2 @@ cc +cc $ hg -q --config extensions.strip= strip . Test unsupported combinations $ hg import --bypass --no-commit ../test.diff abort: cannot use --no-commit with --bypass [255] $ hg import --bypass --similarity 50 ../test.diff abort: cannot use --similarity with --bypass [255] $ hg import --exact --prefix dir/ ../test.diff abort: cannot use --exact with --prefix [255] Test commit editor (this also tests that editor is invoked, if the patch doesn't contain the commit message, regardless of '--edit') $ cat > ../test.diff < diff -r 07f494440405 -r 4e322f7ce8e3 a > --- a/a Thu Jan 01 00:00:00 1970 +0000 > +++ b/a Thu Jan 01 00:00:00 1970 +0000 > @@ -1,1 +1,2 @@ > -a > +b > +c > EOF $ HGEDITOR=cat hg import --bypass ../test.diff applying ../test.diff HG: Enter commit message. Lines beginning with 'HG:' are removed. HG: Leave message empty to abort commit. HG: -- HG: user: test HG: branch 'default' HG: changed a abort: empty commit message [255] Test patch.eol is handled (this also tests that editor is not invoked for '--bypass', if the commit message is explicitly specified, regardless of '--edit') $ $PYTHON -c 'file("a", "wb").write("a\r\n")' $ hg ci -m makeacrlf $ HGEDITOR=cat hg import -m 'should fail because of eol' --edit --bypass ../test.diff applying ../test.diff patching file a Hunk #1 FAILED at 0 abort: patch failed to apply [255] $ hg --config patch.eol=auto import -d '0 0' -m 'test patch.eol' --bypass ../test.diff applying ../test.diff $ shortlog o 3:c606edafba99 test 0 0 - default - test patch.eol | @ 2:872023de769d test 0 0 - default - makeacrlf | | o 1:4e322f7ce8e3 test 0 0 - foo - changea |/ o 0:07f494440405 test 0 0 - default - adda Test applying multiple patches $ hg up -qC 0 $ echo e > e $ hg ci -Am adde adding e created new head $ hg export . > ../patch1.diff $ hg up -qC 1 $ echo f > f $ hg ci -Am addf adding f $ hg export . > ../patch2.diff $ cd .. $ hg clone -r1 repo-options repo-multi1 adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files updating to branch foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo-multi1 $ hg up 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg import --bypass ../patch1.diff ../patch2.diff applying ../patch1.diff applying ../patch2.diff $ shortlog o 3:bc8ca3f8a7c4 test 0 0 - default - addf | o 2:16581080145e test 0 0 - default - adde | | o 1:4e322f7ce8e3 test 0 0 - foo - changea |/ @ 0:07f494440405 test 0 0 - default - adda Test applying multiple patches with --exact $ cd .. $ hg clone -r1 repo-options repo-multi2 adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files updating to branch foo 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd repo-multi2 $ hg import --bypass --exact ../patch1.diff ../patch2.diff applying ../patch1.diff applying ../patch2.diff $ shortlog o 3:d60cb8989666 test 0 0 - foo - addf | | o 2:16581080145e test 0 0 - default - adde | | @ | 1:4e322f7ce8e3 test 0 0 - foo - changea |/ o 0:07f494440405 test 0 0 - default - adda $ cd .. Test avoiding editor invocation at applying the patch with --exact even if commit message is empty $ cd repo-options $ echo a >> a $ hg commit -m ' ' $ hg tip -T "{node}\n" 1b77bc7d1db9f0e7f1716d515b630516ab386c89 $ hg export -o ../empty-log.diff . $ hg update -q -C ".^1" $ hg --config extensions.strip= strip -q tip $ HGEDITOR=cat hg import --exact --bypass ../empty-log.diff applying ../empty-log.diff $ hg tip -T "{node}\n" 1b77bc7d1db9f0e7f1716d515b630516ab386c89 $ cd .. #if symlink execbit Test complicated patch with --exact $ hg init repo-exact $ cd repo-exact $ echo a > a $ echo c > c $ echo d > d $ echo e > e $ echo f > f $ chmod +x f $ ln -s c linkc $ hg ci -Am t adding a adding c adding d adding e adding f adding linkc $ hg cp a aa1 $ echo b >> a $ echo b > b $ hg add b $ hg cp a aa2 $ echo aa >> aa2 $ chmod +x e $ chmod -x f $ ln -s a linka $ hg rm d $ hg rm linkc $ hg mv c cc $ hg ci -m patch $ hg export --git . > ../test.diff $ hg up -C null 0 files updated, 0 files merged, 7 files removed, 0 files unresolved $ hg purge $ hg st $ hg import --bypass --exact ../test.diff applying ../test.diff The patch should have matched the exported revision and generated no additional data. If not, diff both heads to debug it. $ shortlog o 1:2978fd5c8aa4 test 0 0 - default - patch | o 0:a0e19e636a43 test 0 0 - default - t #endif $ cd .. mercurial-3.7.3/tests/test-commit-interactive.t0000644000175000017500000010615612676531525021247 0ustar mpmmpm00000000000000Set up a repo $ cat <> $HGRCPATH > [ui] > interactive = true > [extensions] > record = > EOF $ hg init a $ cd a Select no files $ touch empty-rw $ hg add empty-rw $ hg record --config ui.interactive=false abort: running non-interactively, use commit instead [255] $ hg commit -i --config ui.interactive=false abort: running non-interactively [255] $ hg commit -i empty-rw< n > EOF diff --git a/empty-rw b/empty-rw new file mode 100644 examine changes to 'empty-rw'? [Ynesfdaq?] n no changes to record $ hg tip -p changeset: -1:000000000000 tag: tip user: date: Thu Jan 01 00:00:00 1970 +0000 Select files but no hunks $ hg commit -i empty-rw< y > n > EOF diff --git a/empty-rw b/empty-rw new file mode 100644 examine changes to 'empty-rw'? [Ynesfdaq?] y abort: empty commit message [255] $ hg tip -p changeset: -1:000000000000 tag: tip user: date: Thu Jan 01 00:00:00 1970 +0000 Record empty file $ hg commit -i -d '0 0' -m empty empty-rw< y > y > EOF diff --git a/empty-rw b/empty-rw new file mode 100644 examine changes to 'empty-rw'? [Ynesfdaq?] y $ hg tip -p changeset: 0:c0708cf4e46e tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: empty Summary shows we updated to the new cset $ hg summary parent: 0:c0708cf4e46e tip empty branch: default commit: (clean) update: (current) phases: 1 draft Rename empty file $ hg mv empty-rw empty-rename $ hg commit -i -d '1 0' -m rename< y > EOF diff --git a/empty-rw b/empty-rename rename from empty-rw rename to empty-rename examine changes to 'empty-rw' and 'empty-rename'? [Ynesfdaq?] y $ hg tip -p changeset: 1:d695e8dcb197 tag: tip user: test date: Thu Jan 01 00:00:01 1970 +0000 summary: rename Copy empty file $ hg cp empty-rename empty-copy $ hg commit -i -d '2 0' -m copy< y > EOF diff --git a/empty-rename b/empty-copy copy from empty-rename copy to empty-copy examine changes to 'empty-rename' and 'empty-copy'? [Ynesfdaq?] y $ hg tip -p changeset: 2:1d4b90bea524 tag: tip user: test date: Thu Jan 01 00:00:02 1970 +0000 summary: copy Delete empty file $ hg rm empty-copy $ hg commit -i -d '3 0' -m delete< y > EOF diff --git a/empty-copy b/empty-copy deleted file mode 100644 examine changes to 'empty-copy'? [Ynesfdaq?] y $ hg tip -p changeset: 3:b39a238f01a1 tag: tip user: test date: Thu Jan 01 00:00:03 1970 +0000 summary: delete Add binary file $ hg bundle --type v1 --base -2 tip.bundle 1 changesets found $ hg add tip.bundle $ hg commit -i -d '4 0' -m binary< y > EOF diff --git a/tip.bundle b/tip.bundle new file mode 100644 this is a binary file examine changes to 'tip.bundle'? [Ynesfdaq?] y $ hg tip -p changeset: 4:ad816da3711e tag: tip user: test date: Thu Jan 01 00:00:04 1970 +0000 summary: binary diff -r b39a238f01a1 -r ad816da3711e tip.bundle Binary file tip.bundle has changed Change binary file $ hg bundle --base -2 --type v1 tip.bundle 1 changesets found $ hg commit -i -d '5 0' -m binary-change< y > EOF diff --git a/tip.bundle b/tip.bundle this modifies a binary file (all or nothing) examine changes to 'tip.bundle'? [Ynesfdaq?] y $ hg tip -p changeset: 5:dccd6f3eb485 tag: tip user: test date: Thu Jan 01 00:00:05 1970 +0000 summary: binary-change diff -r ad816da3711e -r dccd6f3eb485 tip.bundle Binary file tip.bundle has changed Rename and change binary file $ hg mv tip.bundle top.bundle $ hg bundle --base -2 --type v1 top.bundle 1 changesets found $ hg commit -i -d '6 0' -m binary-change-rename< y > EOF diff --git a/tip.bundle b/top.bundle rename from tip.bundle rename to top.bundle this modifies a binary file (all or nothing) examine changes to 'tip.bundle' and 'top.bundle'? [Ynesfdaq?] y $ hg tip -p changeset: 6:7fa44105f5b3 tag: tip user: test date: Thu Jan 01 00:00:06 1970 +0000 summary: binary-change-rename diff -r dccd6f3eb485 -r 7fa44105f5b3 tip.bundle Binary file tip.bundle has changed diff -r dccd6f3eb485 -r 7fa44105f5b3 top.bundle Binary file top.bundle has changed Add plain file $ for i in 1 2 3 4 5 6 7 8 9 10; do > echo $i >> plain > done $ hg add plain $ hg commit -i -d '7 0' -m plain plain< y > y > EOF diff --git a/plain b/plain new file mode 100644 examine changes to 'plain'? [Ynesfdaq?] y @@ -0,0 +1,10 @@ +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 record this change to 'plain'? [Ynesfdaq?] y $ hg tip -p changeset: 7:11fb457c1be4 tag: tip user: test date: Thu Jan 01 00:00:07 1970 +0000 summary: plain diff -r 7fa44105f5b3 -r 11fb457c1be4 plain --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/plain Thu Jan 01 00:00:07 1970 +0000 @@ -0,0 +1,10 @@ +1 +2 +3 +4 +5 +6 +7 +8 +9 +10 Modify end of plain file with username unset $ echo 11 >> plain $ unset HGUSER $ hg commit -i --config ui.username= -d '8 0' -m end plain abort: no username supplied (use "hg config --edit" to set your username) [255] Modify end of plain file, also test that diffopts are accounted for $ HGUSER="test" $ export HGUSER $ hg commit -i --config diff.showfunc=true -d '8 0' -m end plain < y > y > EOF diff --git a/plain b/plain 1 hunks, 1 lines changed examine changes to 'plain'? [Ynesfdaq?] y @@ -8,3 +8,4 @@ 7 8 9 10 +11 record this change to 'plain'? [Ynesfdaq?] y Modify end of plain file, no EOL $ hg tip --template '{node}' >> plain $ hg commit -i -d '9 0' -m noeol plain < y > y > EOF diff --git a/plain b/plain 1 hunks, 1 lines changed examine changes to 'plain'? [Ynesfdaq?] y @@ -9,3 +9,4 @@ 8 9 10 11 +7264f99c5f5ff3261504828afa4fb4d406c3af54 \ No newline at end of file record this change to 'plain'? [Ynesfdaq?] y Record showfunc should preserve function across sections $ cat > f1.py < def annotate(ui, repo, *pats, **opts): > """show changeset information by line for each file > > List changes in files, showing the revision id responsible for > each line. > > This command is useful for discovering when a change was made and > by whom. > > If you include -f/-u/-d, the revision number is suppressed unless > you also include -the revision number is suppressed unless > you also include -n. > > Without the -a/--text option, annotate will avoid processing files > it detects as binary. With -a, annotate will annotate the file > anyway, although the results will probably be neither useful > nor desirable. > > Returns 0 on success. > """ > return 0 > def archive(ui, repo, dest, **opts): > '''create an unversioned archive of a repository revision > > By default, the revision used is the parent of the working > directory; use -r/--rev to specify a different revision. > > The archive type is automatically detected based on file > extension (to override, use -t/--type). > > .. container:: verbose > > Valid types are: > EOF $ hg add f1.py $ hg commit -m funcs $ cat > f1.py < def annotate(ui, repo, *pats, **opts): > """show changeset information by line for each file > > List changes in files, showing the revision id responsible for > each line > > This command is useful for discovering when a change was made and > by whom. > > Without the -a/--text option, annotate will avoid processing files > it detects as binary. With -a, annotate will annotate the file > anyway, although the results will probably be neither useful > nor desirable. > > Returns 0 on success. > """ > return 0 > def archive(ui, repo, dest, **opts): > '''create an unversioned archive of a repository revision > > By default, the revision used is the parent of the working > directory; use -r/--rev to specify a different revision. > > The archive type is automatically detected based on file > extension (or override using -t/--type). > > .. container:: verbose > > Valid types are: > EOF $ hg commit -i -m interactive < y > y > y > y > EOF diff --git a/f1.py b/f1.py 3 hunks, 6 lines changed examine changes to 'f1.py'? [Ynesfdaq?] y @@ -2,8 +2,8 @@ def annotate(ui, repo, *pats, **opts): """show changeset information by line for each file List changes in files, showing the revision id responsible for - each line. + each line This command is useful for discovering when a change was made and by whom. record change 1/3 to 'f1.py'? [Ynesfdaq?] y @@ -6,11 +6,7 @@ def annotate(ui, repo, *pats, **opts): This command is useful for discovering when a change was made and by whom. - If you include -f/-u/-d, the revision number is suppressed unless - you also include -the revision number is suppressed unless - you also include -n. - Without the -a/--text option, annotate will avoid processing files it detects as binary. With -a, annotate will annotate the file anyway, although the results will probably be neither useful record change 2/3 to 'f1.py'? [Ynesfdaq?] y @@ -26,7 +22,7 @@ def archive(ui, repo, dest, **opts): directory; use -r/--rev to specify a different revision. The archive type is automatically detected based on file - extension (to override, use -t/--type). + extension (or override using -t/--type). .. container:: verbose record change 3/3 to 'f1.py'? [Ynesfdaq?] y Modify end of plain file, add EOL $ echo >> plain $ echo 1 > plain2 $ hg add plain2 $ hg commit -i -d '10 0' -m eol plain plain2 < y > y > y > y > EOF diff --git a/plain b/plain 1 hunks, 1 lines changed examine changes to 'plain'? [Ynesfdaq?] y @@ -9,4 +9,4 @@ 8 9 10 11 -7264f99c5f5ff3261504828afa4fb4d406c3af54 \ No newline at end of file +7264f99c5f5ff3261504828afa4fb4d406c3af54 record change 1/2 to 'plain'? [Ynesfdaq?] y diff --git a/plain2 b/plain2 new file mode 100644 examine changes to 'plain2'? [Ynesfdaq?] y @@ -0,0 +1,1 @@ +1 record change 2/2 to 'plain2'? [Ynesfdaq?] y Modify beginning, trim end, record both, add another file to test changes numbering $ rm plain $ for i in 2 2 3 4 5 6 7 8 9 10; do > echo $i >> plain > done $ echo 2 >> plain2 $ hg commit -i -d '10 0' -m begin-and-end plain plain2 < y > y > y > y > y > EOF diff --git a/plain b/plain 2 hunks, 3 lines changed examine changes to 'plain'? [Ynesfdaq?] y @@ -1,4 +1,4 @@ -1 +2 2 3 4 record change 1/3 to 'plain'? [Ynesfdaq?] y @@ -8,5 +8,3 @@ 7 8 9 10 -11 -7264f99c5f5ff3261504828afa4fb4d406c3af54 record change 2/3 to 'plain'? [Ynesfdaq?] y diff --git a/plain2 b/plain2 1 hunks, 1 lines changed examine changes to 'plain2'? [Ynesfdaq?] y @@ -1,1 +1,2 @@ 1 +2 record change 3/3 to 'plain2'? [Ynesfdaq?] y $ hg tip -p changeset: 13:f941910cff62 tag: tip user: test date: Thu Jan 01 00:00:10 1970 +0000 summary: begin-and-end diff -r 33abe24d946c -r f941910cff62 plain --- a/plain Thu Jan 01 00:00:10 1970 +0000 +++ b/plain Thu Jan 01 00:00:10 1970 +0000 @@ -1,4 +1,4 @@ -1 +2 2 3 4 @@ -8,5 +8,3 @@ 8 9 10 -11 -7264f99c5f5ff3261504828afa4fb4d406c3af54 diff -r 33abe24d946c -r f941910cff62 plain2 --- a/plain2 Thu Jan 01 00:00:10 1970 +0000 +++ b/plain2 Thu Jan 01 00:00:10 1970 +0000 @@ -1,1 +1,2 @@ 1 +2 Trim beginning, modify end $ rm plain > for i in 4 5 6 7 8 9 10.new; do > echo $i >> plain > done Record end $ hg commit -i -d '11 0' -m end-only plain < y > n > y > EOF diff --git a/plain b/plain 2 hunks, 4 lines changed examine changes to 'plain'? [Ynesfdaq?] y @@ -1,9 +1,6 @@ -2 -2 -3 4 5 6 7 8 9 record change 1/2 to 'plain'? [Ynesfdaq?] n @@ -4,7 +1,7 @@ 4 5 6 7 8 9 -10 +10.new record change 2/2 to 'plain'? [Ynesfdaq?] y $ hg tip -p changeset: 14:4915f538659b tag: tip user: test date: Thu Jan 01 00:00:11 1970 +0000 summary: end-only diff -r f941910cff62 -r 4915f538659b plain --- a/plain Thu Jan 01 00:00:10 1970 +0000 +++ b/plain Thu Jan 01 00:00:11 1970 +0000 @@ -7,4 +7,4 @@ 7 8 9 -10 +10.new Record beginning $ hg commit -i -d '12 0' -m begin-only plain < y > y > EOF diff --git a/plain b/plain 1 hunks, 3 lines changed examine changes to 'plain'? [Ynesfdaq?] y @@ -1,6 +1,3 @@ -2 -2 -3 4 5 6 record this change to 'plain'? [Ynesfdaq?] y $ hg tip -p changeset: 15:1b1f93d4b94b tag: tip user: test date: Thu Jan 01 00:00:12 1970 +0000 summary: begin-only diff -r 4915f538659b -r 1b1f93d4b94b plain --- a/plain Thu Jan 01 00:00:11 1970 +0000 +++ b/plain Thu Jan 01 00:00:12 1970 +0000 @@ -1,6 +1,3 @@ -2 -2 -3 4 5 6 Add to beginning, trim from end $ rm plain $ for i in 1 2 3 4 5 6 7 8 9; do > echo $i >> plain > done Record end $ hg commit -i --traceback -d '13 0' -m end-again plain< y > n > y > EOF diff --git a/plain b/plain 2 hunks, 4 lines changed examine changes to 'plain'? [Ynesfdaq?] y @@ -1,6 +1,9 @@ +1 +2 +3 4 5 6 7 8 9 record change 1/2 to 'plain'? [Ynesfdaq?] n @@ -1,7 +4,6 @@ 4 5 6 7 8 9 -10.new record change 2/2 to 'plain'? [Ynesfdaq?] y Add to beginning, middle, end $ rm plain $ for i in 1 2 3 4 5 5.new 5.reallynew 6 7 8 9 10 11; do > echo $i >> plain > done Record beginning, middle, and test that format-breaking diffopts are ignored $ hg commit -i --config diff.noprefix=True -d '14 0' -m middle-only plain < y > y > y > n > EOF diff --git a/plain b/plain 3 hunks, 7 lines changed examine changes to 'plain'? [Ynesfdaq?] y @@ -1,2 +1,5 @@ +1 +2 +3 4 5 record change 1/3 to 'plain'? [Ynesfdaq?] y @@ -1,6 +4,8 @@ 4 5 +5.new +5.reallynew 6 7 8 9 record change 2/3 to 'plain'? [Ynesfdaq?] y @@ -3,4 +8,6 @@ 6 7 8 9 +10 +11 record change 3/3 to 'plain'? [Ynesfdaq?] n $ hg tip -p changeset: 17:41cf3f5c55ae tag: tip user: test date: Thu Jan 01 00:00:14 1970 +0000 summary: middle-only diff -r a69d252246e1 -r 41cf3f5c55ae plain --- a/plain Thu Jan 01 00:00:13 1970 +0000 +++ b/plain Thu Jan 01 00:00:14 1970 +0000 @@ -1,5 +1,10 @@ +1 +2 +3 4 5 +5.new +5.reallynew 6 7 8 Record end $ hg commit -i -d '15 0' -m end-only plain < y > y > EOF diff --git a/plain b/plain 1 hunks, 2 lines changed examine changes to 'plain'? [Ynesfdaq?] y @@ -9,3 +9,5 @@ 6 7 8 9 +10 +11 record this change to 'plain'? [Ynesfdaq?] y $ hg tip -p changeset: 18:58a72f46bc24 tag: tip user: test date: Thu Jan 01 00:00:15 1970 +0000 summary: end-only diff -r 41cf3f5c55ae -r 58a72f46bc24 plain --- a/plain Thu Jan 01 00:00:14 1970 +0000 +++ b/plain Thu Jan 01 00:00:15 1970 +0000 @@ -9,3 +9,5 @@ 7 8 9 +10 +11 $ mkdir subdir $ cd subdir $ echo a > a $ hg ci -d '16 0' -Amsubdir adding subdir/a $ echo a >> a $ hg commit -i -d '16 0' -m subdir-change a < y > y > EOF diff --git a/subdir/a b/subdir/a 1 hunks, 1 lines changed examine changes to 'subdir/a'? [Ynesfdaq?] y @@ -1,1 +1,2 @@ a +a record this change to 'subdir/a'? [Ynesfdaq?] y $ hg tip -p changeset: 20:e0f6b99f6c49 tag: tip user: test date: Thu Jan 01 00:00:16 1970 +0000 summary: subdir-change diff -r abd26b51de37 -r e0f6b99f6c49 subdir/a --- a/subdir/a Thu Jan 01 00:00:16 1970 +0000 +++ b/subdir/a Thu Jan 01 00:00:16 1970 +0000 @@ -1,1 +1,2 @@ a +a $ echo a > f1 $ echo b > f2 $ hg add f1 f2 $ hg ci -mz -d '17 0' $ echo a >> f1 $ echo b >> f2 Help, quit $ hg commit -i < ? > q > EOF diff --git a/subdir/f1 b/subdir/f1 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] ? y - yes, record this change n - no, skip this change e - edit this change manually s - skip remaining changes to this file f - record remaining changes to this file d - done, skip remaining changes and files a - record all changes to all remaining files q - quit, recording no changes ? - ? (display help) examine changes to 'subdir/f1'? [Ynesfdaq?] q abort: user quit [255] Skip $ hg commit -i < s > EOF diff --git a/subdir/f1 b/subdir/f1 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] s diff --git a/subdir/f2 b/subdir/f2 1 hunks, 1 lines changed examine changes to 'subdir/f2'? [Ynesfdaq?] abort: response expected [255] No $ hg commit -i < n > EOF diff --git a/subdir/f1 b/subdir/f1 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] n diff --git a/subdir/f2 b/subdir/f2 1 hunks, 1 lines changed examine changes to 'subdir/f2'? [Ynesfdaq?] abort: response expected [255] f, quit $ hg commit -i < f > q > EOF diff --git a/subdir/f1 b/subdir/f1 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] f diff --git a/subdir/f2 b/subdir/f2 1 hunks, 1 lines changed examine changes to 'subdir/f2'? [Ynesfdaq?] q abort: user quit [255] s, all $ hg commit -i -d '18 0' -mx < s > a > EOF diff --git a/subdir/f1 b/subdir/f1 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] s diff --git a/subdir/f2 b/subdir/f2 1 hunks, 1 lines changed examine changes to 'subdir/f2'? [Ynesfdaq?] a $ hg tip -p changeset: 22:6afbbefacf35 tag: tip user: test date: Thu Jan 01 00:00:18 1970 +0000 summary: x diff -r b73c401c693c -r 6afbbefacf35 subdir/f2 --- a/subdir/f2 Thu Jan 01 00:00:17 1970 +0000 +++ b/subdir/f2 Thu Jan 01 00:00:18 1970 +0000 @@ -1,1 +1,2 @@ b +b f $ hg commit -i -d '19 0' -my < f > EOF diff --git a/subdir/f1 b/subdir/f1 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] f $ hg tip -p changeset: 23:715028a33949 tag: tip user: test date: Thu Jan 01 00:00:19 1970 +0000 summary: y diff -r 6afbbefacf35 -r 715028a33949 subdir/f1 --- a/subdir/f1 Thu Jan 01 00:00:18 1970 +0000 +++ b/subdir/f1 Thu Jan 01 00:00:19 1970 +0000 @@ -1,1 +1,2 @@ a +a #if execbit Preserve chmod +x $ chmod +x f1 $ echo a >> f1 $ hg commit -i -d '20 0' -mz < y > y > y > EOF diff --git a/subdir/f1 b/subdir/f1 old mode 100644 new mode 100755 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] y @@ -1,2 +1,3 @@ a a +a record this change to 'subdir/f1'? [Ynesfdaq?] y $ hg tip --config diff.git=True -p changeset: 24:db967c1e5884 tag: tip user: test date: Thu Jan 01 00:00:20 1970 +0000 summary: z diff --git a/subdir/f1 b/subdir/f1 old mode 100644 new mode 100755 --- a/subdir/f1 +++ b/subdir/f1 @@ -1,2 +1,3 @@ a a +a Preserve execute permission on original $ echo b >> f1 $ hg commit -i -d '21 0' -maa < y > y > y > EOF diff --git a/subdir/f1 b/subdir/f1 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] y @@ -1,3 +1,4 @@ a a a +b record this change to 'subdir/f1'? [Ynesfdaq?] y $ hg tip --config diff.git=True -p changeset: 25:88903aef81c3 tag: tip user: test date: Thu Jan 01 00:00:21 1970 +0000 summary: aa diff --git a/subdir/f1 b/subdir/f1 --- a/subdir/f1 +++ b/subdir/f1 @@ -1,3 +1,4 @@ a a a +b Preserve chmod -x $ chmod -x f1 $ echo c >> f1 $ hg commit -i -d '22 0' -mab < y > y > y > EOF diff --git a/subdir/f1 b/subdir/f1 old mode 100755 new mode 100644 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] y @@ -2,3 +2,4 @@ a a a b +c record this change to 'subdir/f1'? [Ynesfdaq?] y $ hg tip --config diff.git=True -p changeset: 26:7af84b6cf560 tag: tip user: test date: Thu Jan 01 00:00:22 1970 +0000 summary: ab diff --git a/subdir/f1 b/subdir/f1 old mode 100755 new mode 100644 --- a/subdir/f1 +++ b/subdir/f1 @@ -2,3 +2,4 @@ a a b +c #else Slightly bogus tests to get almost same repo structure as when x bit is used - but with different hashes. Mock "Preserve chmod +x" $ echo a >> f1 $ hg commit -i -d '20 0' -mz < y > y > y > EOF diff --git a/subdir/f1 b/subdir/f1 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] y @@ -1,2 +1,3 @@ a a +a record this change to 'subdir/f1'? [Ynesfdaq?] y $ hg tip --config diff.git=True -p changeset: 24:c26cfe2c4eb0 tag: tip user: test date: Thu Jan 01 00:00:20 1970 +0000 summary: z diff --git a/subdir/f1 b/subdir/f1 --- a/subdir/f1 +++ b/subdir/f1 @@ -1,2 +1,3 @@ a a +a Mock "Preserve execute permission on original" $ echo b >> f1 $ hg commit -i -d '21 0' -maa < y > y > y > EOF diff --git a/subdir/f1 b/subdir/f1 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] y @@ -1,3 +1,4 @@ a a a +b record this change to 'subdir/f1'? [Ynesfdaq?] y $ hg tip --config diff.git=True -p changeset: 25:a48d2d60adde tag: tip user: test date: Thu Jan 01 00:00:21 1970 +0000 summary: aa diff --git a/subdir/f1 b/subdir/f1 --- a/subdir/f1 +++ b/subdir/f1 @@ -1,3 +1,4 @@ a a a +b Mock "Preserve chmod -x" $ chmod -x f1 $ echo c >> f1 $ hg commit -i -d '22 0' -mab < y > y > y > EOF diff --git a/subdir/f1 b/subdir/f1 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] y @@ -2,3 +2,4 @@ a a a b +c record this change to 'subdir/f1'? [Ynesfdaq?] y $ hg tip --config diff.git=True -p changeset: 26:5cc89ae210fa tag: tip user: test date: Thu Jan 01 00:00:22 1970 +0000 summary: ab diff --git a/subdir/f1 b/subdir/f1 --- a/subdir/f1 +++ b/subdir/f1 @@ -2,3 +2,4 @@ a a b +c #endif $ cd .. Abort early when a merge is in progress $ hg up 4 1 files updated, 0 files merged, 7 files removed, 0 files unresolved $ touch iwillmergethat $ hg add iwillmergethat $ hg branch thatbranch marked working directory as branch thatbranch (branches are permanent and global, did you want a bookmark?) $ hg ci -m'new head' $ hg up default 7 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg merge thatbranch 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg commit -i -m'will abort' abort: cannot partially commit a merge (use "hg commit" instead) [255] $ hg up -C 0 files updated, 0 files merged, 1 files removed, 0 files unresolved Editing patch (and ignoring trailing text) $ cat > editor.sh << '__EOF__' > sed -e 7d -e '5s/^-/ /' -e '/^# ---/i\ > trailing\nditto' "$1" > tmp > mv tmp "$1" > __EOF__ $ cat > editedfile << '__EOF__' > This is the first line > This is the second line > This is the third line > __EOF__ $ hg add editedfile $ hg commit -medit-patch-1 $ cat > editedfile << '__EOF__' > This line has changed > This change will be committed > This is the third line > __EOF__ $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i -d '23 0' -medit-patch-2 < y > e > EOF diff --git a/editedfile b/editedfile 1 hunks, 2 lines changed examine changes to 'editedfile'? [Ynesfdaq?] y @@ -1,3 +1,3 @@ -This is the first line -This is the second line +This line has changed +This change will be committed This is the third line record this change to 'editedfile'? [Ynesfdaq?] e $ cat editedfile This line has changed This change will be committed This is the third line $ hg cat -r tip editedfile This is the first line This change will be committed This is the third line $ hg revert editedfile Trying to edit patch for whole file $ echo "This is the fourth line" >> editedfile $ hg commit -i < e > q > EOF diff --git a/editedfile b/editedfile 1 hunks, 1 lines changed examine changes to 'editedfile'? [Ynesfdaq?] e cannot edit patch for whole file examine changes to 'editedfile'? [Ynesfdaq?] q abort: user quit [255] $ hg revert editedfile Removing changes from patch $ sed -e '3s/third/second/' -e '2s/will/will not/' -e 1d editedfile > tmp $ mv tmp editedfile $ echo "This line has been added" >> editedfile $ cat > editor.sh << '__EOF__' > sed -e 's/^[-+]/ /' "$1" > tmp > mv tmp "$1" > __EOF__ $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i < y > e > EOF diff --git a/editedfile b/editedfile 1 hunks, 3 lines changed examine changes to 'editedfile'? [Ynesfdaq?] y @@ -1,3 +1,3 @@ -This is the first line -This change will be committed -This is the third line +This change will not be committed +This is the second line +This line has been added record this change to 'editedfile'? [Ynesfdaq?] e no changes to record $ cat editedfile This change will not be committed This is the second line This line has been added $ hg cat -r tip editedfile This is the first line This change will be committed This is the third line $ hg revert editedfile Invalid patch $ sed -e '3s/third/second/' -e '2s/will/will not/' -e 1d editedfile > tmp $ mv tmp editedfile $ echo "This line has been added" >> editedfile $ cat > editor.sh << '__EOF__' > sed s/This/That/ "$1" > tmp > mv tmp "$1" > __EOF__ $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i < y > e > EOF diff --git a/editedfile b/editedfile 1 hunks, 3 lines changed examine changes to 'editedfile'? [Ynesfdaq?] y @@ -1,3 +1,3 @@ -This is the first line -This change will be committed -This is the third line +This change will not be committed +This is the second line +This line has been added record this change to 'editedfile'? [Ynesfdaq?] e patching file editedfile Hunk #1 FAILED at 0 1 out of 1 hunks FAILED -- saving rejects to file editedfile.rej abort: patch failed to apply [255] $ cat editedfile This change will not be committed This is the second line This line has been added $ hg cat -r tip editedfile This is the first line This change will be committed This is the third line $ cat editedfile.rej --- editedfile +++ editedfile @@ -1,3 +1,3 @@ -That is the first line -That change will be committed -That is the third line +That change will not be committed +That is the second line +That line has been added Malformed patch - error handling $ cat > editor.sh << '__EOF__' > sed -e '/^@/p' "$1" > tmp > mv tmp "$1" > __EOF__ $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i < y > e > EOF diff --git a/editedfile b/editedfile 1 hunks, 3 lines changed examine changes to 'editedfile'? [Ynesfdaq?] y @@ -1,3 +1,3 @@ -This is the first line -This change will be committed -This is the third line +This change will not be committed +This is the second line +This line has been added record this change to 'editedfile'? [Ynesfdaq?] e abort: error parsing patch: unhandled transition: range -> range [255] Exiting editor with status 1, ignores the edit but does not stop the recording session $ HGEDITOR=false hg commit -i < y > e > n > EOF diff --git a/editedfile b/editedfile 1 hunks, 3 lines changed examine changes to 'editedfile'? [Ynesfdaq?] y @@ -1,3 +1,3 @@ -This is the first line -This change will be committed -This is the third line +This change will not be committed +This is the second line +This line has been added record this change to 'editedfile'? [Ynesfdaq?] e editor exited with exit code 1 record this change to 'editedfile'? [Ynesfdaq?] n no changes to record random text in random positions is still an error $ cat > editor.sh << '__EOF__' > sed -e '/^@/i\ > other' "$1" > tmp > mv tmp "$1" > __EOF__ $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i < y > e > EOF diff --git a/editedfile b/editedfile 1 hunks, 3 lines changed examine changes to 'editedfile'? [Ynesfdaq?] y @@ -1,3 +1,3 @@ -This is the first line -This change will be committed -This is the third line +This change will not be committed +This is the second line +This line has been added record this change to 'editedfile'? [Ynesfdaq?] e abort: error parsing patch: unhandled transition: file -> other [255] $ hg up -C 1 files updated, 0 files merged, 0 files removed, 0 files unresolved With win32text $ echo '[extensions]' >> .hg/hgrc $ echo 'win32text = ' >> .hg/hgrc $ echo '[decode]' >> .hg/hgrc $ echo '** = cleverdecode:' >> .hg/hgrc $ echo '[encode]' >> .hg/hgrc $ echo '** = cleverencode:' >> .hg/hgrc $ echo '[patch]' >> .hg/hgrc $ echo 'eol = crlf' >> .hg/hgrc Ignore win32text deprecation warning for now: $ echo '[win32text]' >> .hg/hgrc $ echo 'warn = no' >> .hg/hgrc $ echo d >> subdir/f1 $ hg commit -i -d '24 0' -mw1 < y > y > EOF diff --git a/subdir/f1 b/subdir/f1 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] y @@ -3,3 +3,4 @@ a a b c +d record this change to 'subdir/f1'? [Ynesfdaq?] y $ hg status -A subdir/f1 C subdir/f1 $ hg tip -p changeset: 30:* (glob) tag: tip user: test date: Thu Jan 01 00:00:24 1970 +0000 summary: w1 diff -r ???????????? -r ???????????? subdir/f1 (glob) --- a/subdir/f1 Thu Jan 01 00:00:23 1970 +0000 +++ b/subdir/f1 Thu Jan 01 00:00:24 1970 +0000 @@ -3,3 +3,4 @@ a b c +d Test --user when ui.username not set $ unset HGUSER $ echo e >> subdir/f1 $ hg commit -i --config ui.username= -d '8 0' --user xyz -m "user flag" < y > y > EOF diff --git a/subdir/f1 b/subdir/f1 1 hunks, 1 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] y @@ -4,3 +4,4 @@ a b c d +e record this change to 'subdir/f1'? [Ynesfdaq?] y $ hg status -A subdir/f1 C subdir/f1 $ hg log --template '{author}\n' -l 1 xyz $ HGUSER="test" $ export HGUSER Moving files $ hg update -C . 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg mv plain plain3 $ echo somechange >> plain3 $ hg commit -i -d '23 0' -mmoving_files << EOF > y > y > EOF diff --git a/plain b/plain3 rename from plain rename to plain3 1 hunks, 1 lines changed examine changes to 'plain' and 'plain3'? [Ynesfdaq?] y @@ -11,3 +11,4 @@ 8 9 10 11 +somechange record this change to 'plain3'? [Ynesfdaq?] y The #if execbit block above changes the hash here on some systems $ hg status -A plain3 C plain3 $ hg tip changeset: 32:* (glob) tag: tip user: test date: Thu Jan 01 00:00:23 1970 +0000 summary: moving_files Editing patch of newly added file $ hg update -C . 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat > editor.sh << '__EOF__' > cat "$1" | sed "s/first/very/g" > tt > mv tt "$1" > __EOF__ $ cat > newfile << '__EOF__' > This is the first line > This is the second line > This is the third line > __EOF__ $ hg add newfile $ HGEDITOR="\"sh\" \"`pwd`/editor.sh\"" hg commit -i -d '23 0' -medit-patch-new < y > e > EOF diff --git a/newfile b/newfile new file mode 100644 examine changes to 'newfile'? [Ynesfdaq?] y @@ -0,0 +1,3 @@ +This is the first line +This is the second line +This is the third line record this change to 'newfile'? [Ynesfdaq?] e $ hg cat -r tip newfile This is the very line This is the second line This is the third line $ cat newfile This is the first line This is the second line This is the third line Add new file from within a subdirectory $ hg update -C . 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ mkdir folder $ cd folder $ echo "foo" > bar $ hg add bar $ hg commit -i -d '23 0' -mnewfilesubdir < y > y > EOF diff --git a/folder/bar b/folder/bar new file mode 100644 examine changes to 'folder/bar'? [Ynesfdaq?] y @@ -0,0 +1,1 @@ +foo record this change to 'folder/bar'? [Ynesfdaq?] y The #if execbit block above changes the hashes here on some systems $ hg tip -p changeset: 34:* (glob) tag: tip user: test date: Thu Jan 01 00:00:23 1970 +0000 summary: newfilesubdir diff -r * -r * folder/bar (glob) --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/folder/bar Thu Jan 01 00:00:23 1970 +0000 @@ -0,0 +1,1 @@ +foo $ cd .. $ hg status -A folder/bar C folder/bar Clear win32text configuration before size/timestamp sensitive test $ cat >> .hg/hgrc < [extensions] > win32text = ! > [decode] > ** = ! > [encode] > ** = ! > [patch] > eol = strict > EOF $ hg update -q -C null $ hg update -q -C tip Test that partially committed file is still treated as "modified", even if none of mode, size and timestamp is changed on the filesystem (see also issue4583). $ cat > subdir/f1 < A > a > a > b > c > d > E > EOF $ hg diff --git subdir/f1 diff --git a/subdir/f1 b/subdir/f1 --- a/subdir/f1 +++ b/subdir/f1 @@ -1,7 +1,7 @@ -a +A a a b c d -e +E $ touch -t 200001010000 subdir/f1 $ cat >> .hg/hgrc < # emulate invoking patch.internalpatch() at 2000-01-01 00:00 > [fakepatchtime] > fakenow = 200001010000 > > [extensions] > fakepatchtime = $TESTDIR/fakepatchtime.py > EOF $ hg commit -i -m 'commit subdir/f1 partially' < y > y > n > EOF diff --git a/subdir/f1 b/subdir/f1 2 hunks, 2 lines changed examine changes to 'subdir/f1'? [Ynesfdaq?] y @@ -1,6 +1,6 @@ -a +A a a b c d record change 1/2 to 'subdir/f1'? [Ynesfdaq?] y @@ -2,6 +2,6 @@ a a b c d -e +E record change 2/2 to 'subdir/f1'? [Ynesfdaq?] n $ cat >> .hg/hgrc < [extensions] > fakepatchtime = ! > EOF $ hg debugstate | grep ' subdir/f1$' n 0 -1 unset subdir/f1 $ hg status -A subdir/f1 M subdir/f1 mercurial-3.7.3/tests/test-push-validation.t0000644000175000017500000000364312676531525020550 0ustar mpmmpm00000000000000 $ hg init test $ cd test $ cat > .hg/hgrc < [server] > validate=1 > EOF $ echo alpha > alpha $ echo beta > beta $ hg addr adding alpha adding beta $ hg ci -m 1 $ cd .. $ hg clone test test-clone updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved Test spurious filelog entries: $ cd test-clone $ echo blah >> beta $ cp .hg/store/data/beta.i tmp1 $ hg ci -m 2 $ cp .hg/store/data/beta.i tmp2 $ hg -q rollback $ mv tmp2 .hg/store/data/beta.i $ echo blah >> beta $ hg ci -m '2 (corrupt)' Expected to fail: $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files beta@1: dddc47b3ba30 not in manifests 2 files, 2 changesets, 4 total revisions 1 integrity errors encountered! (first damaged changeset appears to be 1) [1] $ hg push pushing to $TESTTMP/test (glob) searching for changes adding changesets adding manifests adding file changes transaction abort! rollback completed abort: received spurious file revlog entry [255] $ hg -q rollback $ mv tmp1 .hg/store/data/beta.i $ echo beta > beta Test missing filelog entries: $ cp .hg/store/data/beta.i tmp $ echo blah >> beta $ hg ci -m '2 (corrupt)' $ mv tmp .hg/store/data/beta.i Expected to fail: $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files beta@1: dddc47b3ba30 in manifests not found 2 files, 2 changesets, 2 total revisions 1 integrity errors encountered! (first damaged changeset appears to be 1) [1] $ hg push pushing to $TESTTMP/test (glob) searching for changes adding changesets adding manifests adding file changes transaction abort! rollback completed abort: missing file data for beta:dddc47b3ba30e54484720ce0f4f768a0f4b6efb9 - run hg verify [255] $ cd .. mercurial-3.7.3/tests/test-rebase-interruptions.t0000644000175000017500000001040112676531525021613 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > rebase= > > [phases] > publish=False > > [alias] > tglog = log -G --template "{rev}: '{desc}' {branches}\n" > tglogp = log -G --template "{rev}:{phase} '{desc}' {branches}\n" > EOF $ hg init a $ cd a $ echo A > A $ hg ci -Am A adding A $ echo B > B $ hg ci -Am B adding B $ echo C >> A $ hg ci -m C $ hg up -q -C 0 $ echo D >> A $ hg ci -m D created new head $ echo E > E $ hg ci -Am E adding E $ cd .. Changes during an interruption - continue: $ hg clone -q -u . a a1 $ cd a1 $ hg tglog @ 4: 'E' | o 3: 'D' | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' Rebasing B onto E: $ hg rebase -s 1 -d 4 rebasing 1:27547f69f254 "B" rebasing 2:965c486023db "C" merging A warning: conflicts while merging A! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] Force a commit on C during the interruption: $ hg up -q -C 2 --config 'extensions.rebase=!' $ echo 'Extra' > Extra $ hg add Extra $ hg ci -m 'Extra' --config 'extensions.rebase=!' Force this commit onto secret phase $ hg phase --force --secret 6 $ hg tglogp @ 6:secret 'Extra' | | o 5:draft 'B' | | | o 4:draft 'E' | | | o 3:draft 'D' | | o | 2:draft 'C' | | o | 1:draft 'B' |/ o 0:draft 'A' Resume the rebasing: $ hg rebase --continue already rebased 1:27547f69f254 "B" as 45396c49d53b rebasing 2:965c486023db "C" merging A warning: conflicts while merging A! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] Solve the conflict and go on: $ echo 'conflict solved' > A $ rm A.orig $ hg resolve -m A (no more unresolved files) continue: hg rebase --continue $ hg rebase --continue already rebased 1:27547f69f254 "B" as 45396c49d53b rebasing 2:965c486023db "C" warning: new changesets detected on source branch, not stripping $ hg tglogp o 7:draft 'C' | | o 6:secret 'Extra' | | o | 5:draft 'B' | | @ | 4:draft 'E' | | o | 3:draft 'D' | | | o 2:draft 'C' | | | o 1:draft 'B' |/ o 0:draft 'A' $ cd .. Changes during an interruption - abort: $ hg clone -q -u . a a2 $ cd a2 $ hg tglog @ 4: 'E' | o 3: 'D' | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' Rebasing B onto E: $ hg rebase -s 1 -d 4 rebasing 1:27547f69f254 "B" rebasing 2:965c486023db "C" merging A warning: conflicts while merging A! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] Force a commit on B' during the interruption: $ hg up -q -C 5 --config 'extensions.rebase=!' $ echo 'Extra' > Extra $ hg add Extra $ hg ci -m 'Extra' --config 'extensions.rebase=!' $ hg tglog @ 6: 'Extra' | o 5: 'B' | o 4: 'E' | o 3: 'D' | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' Abort the rebasing: $ hg rebase --abort warning: new changesets detected on target branch, can't strip rebase aborted $ hg tglog @ 6: 'Extra' | o 5: 'B' | o 4: 'E' | o 3: 'D' | | o 2: 'C' | | | o 1: 'B' |/ o 0: 'A' $ cd .. Changes during an interruption - abort (again): $ hg clone -q -u . a a3 $ cd a3 $ hg tglogp @ 4:draft 'E' | o 3:draft 'D' | | o 2:draft 'C' | | | o 1:draft 'B' |/ o 0:draft 'A' Rebasing B onto E: $ hg rebase -s 1 -d 4 rebasing 1:27547f69f254 "B" rebasing 2:965c486023db "C" merging A warning: conflicts while merging A! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] Change phase on B and B' $ hg up -q -C 5 --config 'extensions.rebase=!' $ hg phase --public 1 $ hg phase --public 5 $ hg phase --secret -f 2 $ hg tglogp @ 5:public 'B' | o 4:public 'E' | o 3:public 'D' | | o 2:secret 'C' | | | o 1:public 'B' |/ o 0:public 'A' Abort the rebasing: $ hg rebase --abort warning: can't clean up public changesets 45396c49d53b rebase aborted $ hg tglogp @ 5:public 'B' | o 4:public 'E' | o 3:public 'D' | | o 2:secret 'C' | | | o 1:public 'B' |/ o 0:public 'A' $ cd .. mercurial-3.7.3/tests/test-histedit-outgoing.t0000644000175000017500000000727412676531525021113 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > histedit= > EOF $ initrepos () > { > hg init r > cd r > for x in a b c ; do > echo $x > $x > hg add $x > hg ci -m $x > done > cd .. > hg clone r r2 | grep -v updating > cd r2 > for x in d e f ; do > echo $x > $x > hg add $x > hg ci -m $x > done > cd .. > hg init r3 > cd r3 > for x in g h i ; do > echo $x > $x > hg add $x > hg ci -m $x > done > cd .. > } $ initrepos 3 files updated, 0 files merged, 0 files removed, 0 files unresolved show the edit commands offered by outgoing $ cd r2 $ HGEDITOR=cat hg histedit --outgoing ../r | grep -v comparing | grep -v searching pick 055a42cdd887 3 d pick e860deea161a 4 e pick 652413bf663e 5 f # Edit history between 055a42cdd887 and 652413bf663e # # Commits are listed from least to most recent # # Commands: # # e, edit = use commit, but stop for amending # m, mess = edit commit message without changing commit content # p, pick = use commit # d, drop = remove commit from history # f, fold = use commit, but combine it with the one above # r, roll = like fold, but discard this commit's description # $ cd .. show the error from unrelated repos $ cd r3 $ HGEDITOR=cat hg histedit --outgoing ../r | grep -v comparing | grep -v searching abort: repository is unrelated [1] $ cd .. show the error from unrelated repos $ cd r3 $ HGEDITOR=cat hg histedit --force --outgoing ../r comparing with ../r searching for changes warning: repository is unrelated pick 2a4042b45417 0 g pick 68c46b4927ce 1 h pick 51281e65ba79 2 i # Edit history between 2a4042b45417 and 51281e65ba79 # # Commits are listed from least to most recent # # Commands: # # e, edit = use commit, but stop for amending # m, mess = edit commit message without changing commit content # p, pick = use commit # d, drop = remove commit from history # f, fold = use commit, but combine it with the one above # r, roll = like fold, but discard this commit's description # $ cd .. test sensitivity to branch in URL: $ cd r2 $ hg -q update 2 $ hg -q branch foo $ hg commit -m 'create foo branch' $ HGEDITOR=cat hg histedit --outgoing '../r#foo' | grep -v comparing | grep -v searching pick f26599ee3441 6 create foo branch # Edit history between f26599ee3441 and f26599ee3441 # # Commits are listed from least to most recent # # Commands: # # e, edit = use commit, but stop for amending # m, mess = edit commit message without changing commit content # p, pick = use commit # d, drop = remove commit from history # f, fold = use commit, but combine it with the one above # r, roll = like fold, but discard this commit's description # test to check number of roots in outgoing revisions $ hg -q outgoing -G --template '{node|short}({branch})' '../r' @ f26599ee3441(foo) o 652413bf663e(default) | o e860deea161a(default) | o 055a42cdd887(default) $ HGEDITOR=cat hg -q histedit --outgoing '../r' abort: there are ambiguous outgoing revisions (see "hg help histedit" for more detail) [255] $ hg -q update -C 2 $ echo aa >> a $ hg -q commit -m 'another head on default' $ hg -q outgoing -G --template '{node|short}({branch})' '../r#default' @ 3879dc049647(default) o 652413bf663e(default) | o e860deea161a(default) | o 055a42cdd887(default) $ HGEDITOR=cat hg -q histedit --outgoing '../r#default' abort: there are ambiguous outgoing revisions (see "hg help histedit" for more detail) [255] $ cd .. mercurial-3.7.3/tests/missing-comment.hg0000644000175000017500000000206512676531525017725 0ustar mpmmpm00000000000000HG10BZh91AY&SY¨ï!ÿÿü–ÇcßÿýÓ—Øÿïÿôìyï2 Cè3M>c‰ç¶ßÐ^Sƒk )&€õ2¡  SFšz™Ð 2€mi¦ši¦ÉÈ0š040&ŒŒ ɦ#CÄÓ&˜Œ#hфШ&È™L1¨ 4h 2hi¦ši¦ÉÈ0š040&ŒŒ ɦ#CÄÓ&˜Œ#hфР)5<†šÒS=M2ÐhÐi¦FŒ†#@hѧ©ê4=@ôLšiêÔÓØöúõŒ+[ô†ªÁÝüWv,f ”X žBÿ[Â÷|E„àÆ¸Õˆ=`˜0J; ¢r!’ß²Îâ¢ÎÎÉñŠ÷ÖÄ‚FÁ°«¡)0–’—ýÁ·æ:, Ì5ÁÁL™`ņ3Â:!¨4!`Ÿ6Âý‰e°Ñ~AÁÀÅF8xM'š%8È BxéŠBµ"JíT‰¸Ã°C‰è´Ïë’µ=ØSÐv•êÒ…#|Vë0~Ü^®ßŒR¡d|Ú´„"Ji’å£ ©ÚÜ 4Éb¤d—+¤‡BÈ+nì Á3 .”âÖvÄGX<È'™oŽ-XÖšL¶Y 6’_93,º$q-Ò+Rˆâi°%«Y›@dϽŠ 1LeNv­QÒM)[BlÄJ3¯ºeT@K i¶2y‹rOmм ä 9…”*+4RAy ²$lb°­íÀd=âkà^ÿ >z5$0ɹE‰‚^ÿœ‰rv«ùØÅÙïOC5‘˜A9f [ dãØ¹Gx\OA{»fÏ&MNÕ0Xê$ÌS%H ¿´‰c°WÍp¢€ê. Äa9À8$Ém–½I#¬Äâ…°[9ñÙQ]¼­ Y‚AŒ SUˆ"vWãÄ™ï 2ÚañY‘wL˜ À0Á•HV|†µ>’æý• а;­7Q"I 82I)ÃlðjÈ`m ¡^&ÊÀ’š BO‚bS$`¥óX¨­ר†4 M¦›ÔÓä8±¥R!êb‰ù##̆}é>z3Ê„ÅíS\D…H«7*r#ÄOœcJ¾¦96ŠÖÑ¢XØÑ)Ë`ˆiÑàí"ŒŸ¡Ð)PÚž‰f8Y §úž³]™D0‚©º#£ðŒ£tŒ RÙª]¡”U™FI–uÇåº0ÌÓ¤c´:ZìÈþe!êφá±pˆ(·åŸ Î3 ¢W#ò}ËÛQ ~·5¢ pöYCô$n‚ÇYb (oÌ}á rÉ .Qlˆƒ¯ñw$S… !JŽðmercurial-3.7.3/tests/test-clone.t0000644000175000017500000006410012676531525016534 0ustar mpmmpm00000000000000Prepare repo a: $ hg init a $ cd a $ echo a > a $ hg add a $ hg commit -m test $ echo first line > b $ hg add b Create a non-inlined filelog: $ $PYTHON -c 'file("data1", "wb").write("".join("%s\n" % x for x in range(10000)))' $ for j in 0 1 2 3 4 5 6 7 8 9; do > cat data1 >> b > hg commit -m test > done List files in store/data (should show a 'b.d'): $ for i in .hg/store/data/*; do > echo $i > done .hg/store/data/a.i .hg/store/data/b.d .hg/store/data/b.i Trigger branchcache creation: $ hg branches default 10:a7949464abda $ ls .hg/cache branch2-served rbc-names-v1 rbc-revs-v1 Default operation: $ hg clone . ../b updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd ../b Ensure branchcache got copied over: $ ls .hg/cache branch2-served $ cat a a $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 11 changesets, 11 total revisions Invalid dest '' must abort: $ hg clone . '' abort: empty destination path is not valid [255] No update, with debug option: #if hardlink $ hg --debug clone -U . ../c --config progress.debug=true linking: 1 linking: 2 linking: 3 linking: 4 linking: 5 linking: 6 linking: 7 linking: 8 linked 8 files #else $ hg --debug clone -U . ../c --config progress.debug=true linking: 1 copying: 2 copying: 3 copying: 4 copying: 5 copying: 6 copying: 7 copying: 8 copied 8 files #endif $ cd ../c Ensure branchcache got copied over: $ ls .hg/cache branch2-served $ cat a 2>/dev/null || echo "a not present" a not present $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 11 changesets, 11 total revisions Default destination: $ mkdir ../d $ cd ../d $ hg clone ../a destination directory: a updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd a $ hg cat a a $ cd ../.. Check that we drop the 'file:' from the path before writing the .hgrc: $ hg clone file:a e updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ grep 'file:' e/.hg/hgrc [1] Check that path aliases are expanded: $ hg clone -q -U --config 'paths.foobar=a#0' foobar f $ hg -R f showconfig paths.default $TESTTMP/a#0 (glob) Use --pull: $ hg clone --pull a g requesting all changes adding changesets adding manifests adding file changes added 11 changesets with 11 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R g verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 2 files, 11 changesets, 11 total revisions Invalid dest '' with --pull must abort (issue2528): $ hg clone --pull a '' abort: empty destination path is not valid [255] Clone to '.': $ mkdir h $ cd h $ hg clone ../a . updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. *** Tests for option -u *** Adding some more history to repo a: $ cd a $ hg tag ref1 $ echo the quick brown fox >a $ hg ci -m "hacked default" $ hg up ref1 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg branch stable marked working directory as branch stable (branches are permanent and global, did you want a bookmark?) $ echo some text >a $ hg ci -m "starting branch stable" $ hg tag ref2 $ echo some more text >a $ hg ci -m "another change for branch stable" $ hg up ref2 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg parents changeset: 13:e8ece76546a6 branch: stable tag: ref2 parent: 10:a7949464abda user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: starting branch stable Repo a has two heads: $ hg heads changeset: 15:0aae7cf88f0d branch: stable tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: another change for branch stable changeset: 12:f21241060d6a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: hacked default $ cd .. Testing --noupdate with --updaterev (must abort): $ hg clone --noupdate --updaterev 1 a ua abort: cannot specify both --noupdate and --updaterev [255] Testing clone -u: $ hg clone -u . a ua updating to branch stable 2 files updated, 0 files merged, 0 files removed, 0 files unresolved Repo ua has both heads: $ hg -R ua heads changeset: 15:0aae7cf88f0d branch: stable tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: another change for branch stable changeset: 12:f21241060d6a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: hacked default Same revision checked out in repo a and ua: $ hg -R a parents --template "{node|short}\n" e8ece76546a6 $ hg -R ua parents --template "{node|short}\n" e8ece76546a6 $ rm -r ua Testing clone --pull -u: $ hg clone --pull -u . a ua requesting all changes adding changesets adding manifests adding file changes added 16 changesets with 16 changes to 3 files (+1 heads) updating to branch stable 2 files updated, 0 files merged, 0 files removed, 0 files unresolved Repo ua has both heads: $ hg -R ua heads changeset: 15:0aae7cf88f0d branch: stable tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: another change for branch stable changeset: 12:f21241060d6a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: hacked default Same revision checked out in repo a and ua: $ hg -R a parents --template "{node|short}\n" e8ece76546a6 $ hg -R ua parents --template "{node|short}\n" e8ece76546a6 $ rm -r ua Testing clone -u : $ hg clone -u stable a ua updating to branch stable 3 files updated, 0 files merged, 0 files removed, 0 files unresolved Repo ua has both heads: $ hg -R ua heads changeset: 15:0aae7cf88f0d branch: stable tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: another change for branch stable changeset: 12:f21241060d6a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: hacked default Branch 'stable' is checked out: $ hg -R ua parents changeset: 15:0aae7cf88f0d branch: stable tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: another change for branch stable $ rm -r ua Testing default checkout: $ hg clone a ua updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved Repo ua has both heads: $ hg -R ua heads changeset: 15:0aae7cf88f0d branch: stable tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: another change for branch stable changeset: 12:f21241060d6a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: hacked default Branch 'default' is checked out: $ hg -R ua parents changeset: 12:f21241060d6a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: hacked default Test clone with a branch named "@" (issue3677) $ hg -R ua branch @ marked working directory as branch @ $ hg -R ua commit -m 'created branch @' $ hg clone ua atbranch updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R atbranch heads changeset: 16:798b6d97153e branch: @ tag: tip parent: 12:f21241060d6a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: created branch @ changeset: 15:0aae7cf88f0d branch: stable user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: another change for branch stable changeset: 12:f21241060d6a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: hacked default $ hg -R atbranch parents changeset: 12:f21241060d6a user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: hacked default $ rm -r ua atbranch Testing #: $ hg clone -u . a#stable ua adding changesets adding manifests adding file changes added 14 changesets with 14 changes to 3 files updating to branch stable 2 files updated, 0 files merged, 0 files removed, 0 files unresolved Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6): $ hg -R ua heads changeset: 13:0aae7cf88f0d branch: stable tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: another change for branch stable changeset: 10:a7949464abda user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: test Same revision checked out in repo a and ua: $ hg -R a parents --template "{node|short}\n" e8ece76546a6 $ hg -R ua parents --template "{node|short}\n" e8ece76546a6 $ rm -r ua Testing -u -r : $ hg clone -u . -r stable a ua adding changesets adding manifests adding file changes added 14 changesets with 14 changes to 3 files updating to branch stable 2 files updated, 0 files merged, 0 files removed, 0 files unresolved Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6): $ hg -R ua heads changeset: 13:0aae7cf88f0d branch: stable tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: another change for branch stable changeset: 10:a7949464abda user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: test Same revision checked out in repo a and ua: $ hg -R a parents --template "{node|short}\n" e8ece76546a6 $ hg -R ua parents --template "{node|short}\n" e8ece76546a6 $ rm -r ua Testing -r : $ hg clone -r stable a ua adding changesets adding manifests adding file changes added 14 changesets with 14 changes to 3 files updating to branch stable 3 files updated, 0 files merged, 0 files removed, 0 files unresolved Repo ua has branch 'stable' and 'default' (was changed in fd511e9eeea6): $ hg -R ua heads changeset: 13:0aae7cf88f0d branch: stable tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: another change for branch stable changeset: 10:a7949464abda user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: test Branch 'stable' is checked out: $ hg -R ua parents changeset: 13:0aae7cf88f0d branch: stable tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: another change for branch stable $ rm -r ua Issue2267: Error in 1.6 hg.py: TypeError: 'NoneType' object is not iterable in addbranchrevs() $ cat < simpleclone.py > from mercurial import ui, hg > myui = ui.ui() > repo = hg.repository(myui, 'a') > hg.clone(myui, {}, repo, dest="ua") > EOF $ python simpleclone.py updating to branch default 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -r ua $ cat < branchclone.py > from mercurial import ui, hg, extensions > myui = ui.ui() > extensions.loadall(myui) > repo = hg.repository(myui, 'a') > hg.clone(myui, {}, repo, dest="ua", branch=["stable",]) > EOF $ python branchclone.py adding changesets adding manifests adding file changes added 14 changesets with 14 changes to 3 files updating to branch stable 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ rm -r ua Test clone with special '@' bookmark: $ cd a $ hg bookmark -r a7949464abda @ # branch point of stable from default $ hg clone . ../i updating to bookmark @ 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -i ../i a7949464abda $ rm -r ../i $ hg bookmark -f -r stable @ $ hg bookmarks @ 15:0aae7cf88f0d $ hg clone . ../i updating to bookmark @ on branch stable 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg id -i ../i 0aae7cf88f0d $ cd "$TESTTMP" Testing failures: $ mkdir fail $ cd fail No local source $ hg clone a b abort: repository a not found! [255] No remote source #if windows $ hg clone http://127.0.0.1:3121/a b abort: error: * (glob) [255] #else $ hg clone http://127.0.0.1:3121/a b abort: error: *refused* (glob) [255] #endif $ rm -rf b # work around bug with http clone #if unix-permissions no-root Inaccessible source $ mkdir a $ chmod 000 a $ hg clone a b abort: repository a not found! [255] Inaccessible destination $ hg init b $ cd b $ hg clone . ../a abort: Permission denied: '../a' [255] $ cd .. $ chmod 700 a $ rm -r a b #endif #if fifo Source of wrong type $ mkfifo a $ hg clone a b abort: repository a not found! [255] $ rm a #endif Default destination, same directory $ hg init q $ hg clone q destination directory: q abort: destination 'q' is not empty [255] destination directory not empty $ mkdir a $ echo stuff > a/a $ hg clone q a abort: destination 'a' is not empty [255] #if unix-permissions no-root leave existing directory in place after clone failure $ hg init c $ cd c $ echo c > c $ hg commit -A -m test adding c $ chmod -rx .hg/store/data $ cd .. $ mkdir d $ hg clone c d 2> err [255] $ test -d d $ test -d d/.hg [1] re-enable perm to allow deletion $ chmod +rx c/.hg/store/data #endif $ cd .. Test clone from the repository in (emulated) revlog format 0 (issue4203): $ mkdir issue4203 $ mkdir -p src/.hg $ echo foo > src/foo $ hg -R src add src/foo $ hg -R src commit -m '#0' $ hg -R src log -q 0:e1bab28bca43 $ hg clone -U -q src dst $ hg -R dst log -q 0:e1bab28bca43 Create repositories to test auto sharing functionality $ cat >> $HGRCPATH << EOF > [extensions] > share= > EOF $ hg init empty $ hg init source1a $ cd source1a $ echo initial1 > foo $ hg -q commit -A -m initial $ echo second > foo $ hg commit -m second $ cd .. $ hg init filteredrev0 $ cd filteredrev0 $ cat >> .hg/hgrc << EOF > [experimental] > evolution=createmarkers > EOF $ echo initial1 > foo $ hg -q commit -A -m initial0 $ hg -q up -r null $ echo initial2 > foo $ hg -q commit -A -m initial1 $ hg debugobsolete c05d5c47a5cf81401869999f3d05f7d699d2b29a e082c1832e09a7d1e78b7fd49a592d372de854c8 $ cd .. $ hg -q clone --pull source1a source1b $ cd source1a $ hg bookmark bookA $ echo 1a > foo $ hg commit -m 1a $ cd ../source1b $ hg -q up -r 0 $ echo head1 > foo $ hg commit -m head1 created new head $ hg bookmark head1 $ hg -q up -r 0 $ echo head2 > foo $ hg commit -m head2 created new head $ hg bookmark head2 $ hg -q up -r 0 $ hg branch branch1 marked working directory as branch branch1 (branches are permanent and global, did you want a bookmark?) $ echo branch1 > foo $ hg commit -m branch1 $ hg -q up -r 0 $ hg branch branch2 marked working directory as branch branch2 $ echo branch2 > foo $ hg commit -m branch2 $ cd .. $ hg init source2 $ cd source2 $ echo initial2 > foo $ hg -q commit -A -m initial2 $ echo second > foo $ hg commit -m second $ cd .. Clone with auto share from an empty repo should not result in share $ mkdir share $ hg --config share.pool=share clone empty share-empty (not using pooled storage: remote appears to be empty) updating to branch default 0 files updated, 0 files merged, 0 files removed, 0 files unresolved $ ls share $ test -d share-empty/.hg/store $ test -f share-empty/.hg/sharedpath [1] Clone with auto share from a repo with filtered revision 0 should not result in share $ hg --config share.pool=share clone filteredrev0 share-filtered (not using pooled storage: unable to resolve identity of remote) requesting all changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved Clone from repo with content should result in shared store being created $ hg --config share.pool=share clone source1a share-dest1a (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1) requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files searching for changes no changes found adding remote bookmark bookA updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved The shared repo should have been created $ ls share b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1 The destination should point to it $ cat share-dest1a/.hg/sharedpath; echo $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg (glob) The destination should have bookmarks $ hg -R share-dest1a bookmarks bookA 2:e5bfe23c0b47 The default path should be the remote, not the share $ hg -R share-dest1a config paths.default $TESTTMP/source1a (glob) Clone with existing share dir should result in pull + share $ hg --config share.pool=share clone source1b share-dest1b (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1) searching for changes adding changesets adding manifests adding file changes added 4 changesets with 4 changes to 1 files (+4 heads) adding remote bookmark head1 adding remote bookmark head2 updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ ls share b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1 $ cat share-dest1b/.hg/sharedpath; echo $TESTTMP/share/b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1/.hg (glob) We only get bookmarks from the remote, not everything in the share $ hg -R share-dest1b bookmarks head1 3:4a8dc1ab4c13 head2 4:99f71071f117 Default path should be source, not share. $ hg -R share-dest1b config paths.default $TESTTMP/source1a (glob) Checked out revision should be head of default branch $ hg -R share-dest1b log -r . changeset: 4:99f71071f117 bookmark: head2 parent: 0:b5f04eac9d8f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: head2 Clone from unrelated repo should result in new share $ hg --config share.pool=share clone source2 share-dest2 (sharing from new pooled repository 22aeff664783fd44c6d9b435618173c118c3448e) requesting all changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files searching for changes no changes found updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ ls share 22aeff664783fd44c6d9b435618173c118c3448e b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1 remote naming mode works as advertised $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1a share-remote1a (sharing from new pooled repository 195bb1fcdb595c14a6c13e0269129ed78f6debde) requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files searching for changes no changes found adding remote bookmark bookA updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ ls shareremote 195bb1fcdb595c14a6c13e0269129ed78f6debde $ hg --config share.pool=shareremote --config share.poolnaming=remote clone source1b share-remote1b (sharing from new pooled repository c0d4f83847ca2a873741feb7048a45085fd47c46) requesting all changes adding changesets adding manifests adding file changes added 6 changesets with 6 changes to 1 files (+4 heads) searching for changes no changes found adding remote bookmark head1 adding remote bookmark head2 updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ ls shareremote 195bb1fcdb595c14a6c13e0269129ed78f6debde c0d4f83847ca2a873741feb7048a45085fd47c46 request to clone a single revision is respected in sharing mode $ hg --config share.pool=sharerevs clone -r 4a8dc1ab4c13 source1b share-1arev (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1) adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files no changes found adding remote bookmark head1 updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R share-1arev log -G @ changeset: 1:4a8dc1ab4c13 | bookmark: head1 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: head1 | o changeset: 0:b5f04eac9d8f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: initial making another clone should only pull down requested rev $ hg --config share.pool=sharerevs clone -r 99f71071f117 source1b share-1brev (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) adding remote bookmark head1 adding remote bookmark head2 updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R share-1brev log -G @ changeset: 2:99f71071f117 | bookmark: head2 | tag: tip | parent: 0:b5f04eac9d8f | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: head2 | | o changeset: 1:4a8dc1ab4c13 |/ bookmark: head1 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: head1 | o changeset: 0:b5f04eac9d8f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: initial Request to clone a single branch is respected in sharing mode $ hg --config share.pool=sharebranch clone -b branch1 source1b share-1bbranch1 (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1) adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 1 files no changes found updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R share-1bbranch1 log -G o changeset: 1:5f92a6c1a1b1 | branch: branch1 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: branch1 | @ changeset: 0:b5f04eac9d8f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: initial $ hg --config share.pool=sharebranch clone -b branch2 source1b share-1bbranch2 (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1) searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg -R share-1bbranch2 log -G o changeset: 2:6bacf4683960 | branch: branch2 | tag: tip | parent: 0:b5f04eac9d8f | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: branch2 | | o changeset: 1:5f92a6c1a1b1 |/ branch: branch1 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: branch1 | @ changeset: 0:b5f04eac9d8f user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: initial -U is respected in share clone mode $ hg --config share.pool=share clone -U source1a share-1anowc (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1) searching for changes no changes found adding remote bookmark bookA $ ls share-1anowc Test that auto sharing doesn't cause failure of "hg clone local remote" $ cd $TESTTMP $ hg -R a id -r 0 acb14030fe0a $ hg id -R remote -r 0 abort: repository remote not found! [255] $ hg --config share.pool=share -q clone -e "python \"$TESTDIR/dummyssh\"" a ssh://user@dummy/remote $ hg -R remote id -r 0 acb14030fe0a Cloning into pooled storage doesn't race (issue5104) $ HGPOSTLOCKDELAY=2.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace1 > race1.log 2>&1 & $ HGPRELOCKDELAY=1.0 hg --config share.pool=racepool --config extensions.lockdelay=$TESTDIR/lockdelay.py clone source1a share-destrace2 > race2.log 2>&1 $ wait $ hg -R share-destrace1 log -r tip changeset: 2:e5bfe23c0b47 bookmark: bookA tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1a $ hg -R share-destrace2 log -r tip changeset: 2:e5bfe23c0b47 bookmark: bookA tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: 1a $ cat race1.log (sharing from new pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1) requesting all changes adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 1 files searching for changes no changes found adding remote bookmark bookA updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cat race2.log (sharing from existing pooled repository b5f04eac9d8f7a6a9fcb070243cccea7dc5ea0c1) waiting for lock on repository share-destrace2 held by * (glob) got lock after \d+ seconds (re) searching for changes no changes found adding remote bookmark bookA updating working directory 1 files updated, 0 files merged, 0 files removed, 0 files unresolved mercurial-3.7.3/tests/revlog-formatv0.py0000755000175000017500000000345312676531525017705 0ustar mpmmpm00000000000000#!/usr/bin/env python # Copyright 2010 Intevation GmbH # Author(s): # Thomas Arendsen Hein # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. """Create a Mercurial repository in revlog format 0 changeset: 0:a1ef0b125355 tag: tip user: user date: Thu Jan 01 00:00:00 1970 +0000 files: empty description: empty file """ import os, sys files = [ ('formatv0/.hg/00changelog.i', '000000000000004400000000000000000000000000000000000000' '000000000000000000000000000000000000000000000000000000' '0000a1ef0b125355d27765928be600cfe85784284ab3'), ('formatv0/.hg/00changelog.d', '756163613935613961356635353036303562366138343738336237' '61623536363738616436356635380a757365720a3020300a656d70' '74790a0a656d7074792066696c65'), ('formatv0/.hg/00manifest.i', '000000000000003000000000000000000000000000000000000000' '000000000000000000000000000000000000000000000000000000' '0000aca95a9a5f550605b6a84783b7ab56678ad65f58'), ('formatv0/.hg/00manifest.d', '75656d707479006238306465356431333837353835343163356630' '35323635616431343461623966613836643164620a'), ('formatv0/.hg/data/empty.i', '000000000000000000000000000000000000000000000000000000' '000000000000000000000000000000000000000000000000000000' '0000b80de5d138758541c5f05265ad144ab9fa86d1db'), ('formatv0/.hg/data/empty.d', ''), ] def makedirs(name): """recursive directory creation""" parent = os.path.dirname(name) if parent: makedirs(parent) os.mkdir(name) makedirs(os.path.join(*'formatv0/.hg/data'.split('/'))) for name, data in files: f = open(name, 'wb') f.write(data.decode('hex')) f.close() sys.exit(0) mercurial-3.7.3/tests/test-pull-update.t0000644000175000017500000000252712676531525017675 0ustar mpmmpm00000000000000 $ hg init t $ cd t $ echo 1 > foo $ hg ci -Am m adding foo $ cd .. $ hg clone t tt updating to branch default 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd tt $ echo 1.1 > foo $ hg ci -Am m $ cd ../t $ echo 1.2 > foo $ hg ci -Am m Should not update: $ hg pull -u ../tt pulling from ../tt searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) abort: not updating: not a linear update (merge or update --check to force update) [255] $ cd ../tt Should not update: $ hg pull -u ../t pulling from ../t searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) abort: not updating: not a linear update (merge or update --check to force update) [255] $ HGMERGE=true hg merge merging foo 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -mm $ cd ../t Should work: $ hg pull -u ../tt pulling from ../tt searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (-1 heads) 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd .. mercurial-3.7.3/tests/test-rebase-conflicts.t0000644000175000017500000001751612676531525020670 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [format] > usegeneraldelta=yes > [extensions] > rebase= > > [phases] > publish=False > > [alias] > tglog = log -G --template "{rev}:{phase} '{desc}' {branches} {bookmarks}\n" > EOF $ hg init a $ cd a $ echo c1 >common $ hg add common $ hg ci -m C1 $ echo c2 >>common $ hg ci -m C2 $ echo c3 >>common $ hg ci -m C3 $ hg up -q -C 1 $ echo l1 >>extra $ hg add extra $ hg ci -m L1 created new head $ sed -e 's/c2/l2/' common > common.new $ mv common.new common $ hg ci -m L2 $ echo l3 >> extra2 $ hg add extra2 $ hg ci -m L3 $ hg bookmark mybook $ hg phase --force --secret 4 $ hg tglog @ 5:secret 'L3' mybook | o 4:secret 'L2' | o 3:draft 'L1' | | o 2:draft 'C3' |/ o 1:draft 'C2' | o 0:draft 'C1' Try to call --continue: $ hg rebase --continue abort: no rebase in progress [255] Conflicting rebase: $ hg rebase -s 3 -d 2 rebasing 3:3163e20567cc "L1" rebasing 4:46f0b057b5c0 "L2" merging common warning: conflicts while merging common! (edit, then use 'hg resolve --mark') unresolved conflicts (see hg resolve, then hg rebase --continue) [1] Try to continue without solving the conflict: $ hg rebase --continue already rebased 3:3163e20567cc "L1" as 3e046f2ecedb rebasing 4:46f0b057b5c0 "L2" abort: unresolved merge conflicts (see "hg help resolve") [255] Conclude rebase: $ echo 'resolved merge' >common $ hg resolve -m common (no more unresolved files) continue: hg rebase --continue $ hg rebase --continue already rebased 3:3163e20567cc "L1" as 3e046f2ecedb rebasing 4:46f0b057b5c0 "L2" rebasing 5:8029388f38dc "L3" (mybook) saved backup bundle to $TESTTMP/a/.hg/strip-backup/3163e20567cc-5ca4656e-backup.hg (glob) $ hg tglog @ 5:secret 'L3' mybook | o 4:secret 'L2' | o 3:draft 'L1' | o 2:draft 'C3' | o 1:draft 'C2' | o 0:draft 'C1' Check correctness: $ hg cat -r 0 common c1 $ hg cat -r 1 common c1 c2 $ hg cat -r 2 common c1 c2 c3 $ hg cat -r 3 common c1 c2 c3 $ hg cat -r 4 common resolved merge $ hg cat -r 5 common resolved merge Bookmark stays active after --continue $ hg bookmarks * mybook 5:d67b21408fc0 $ cd .. Check that the right ancestors is used while rebasing a merge (issue4041) $ hg clone "$TESTDIR/bundles/issue4041.hg" issue4041 requesting all changes adding changesets adding manifests adding file changes added 11 changesets with 8 changes to 3 files (+1 heads) updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd issue4041 $ hg log -G o changeset: 10:2f2496ddf49d |\ branch: f1 | | tag: tip | | parent: 7:4c9fbe56a16f | | parent: 9:e31216eec445 | | user: szhang | | date: Thu Sep 05 12:59:39 2013 -0400 | | summary: merge | | | o changeset: 9:e31216eec445 | | branch: f1 | | user: szhang | | date: Thu Sep 05 12:59:10 2013 -0400 | | summary: more changes to f1 | | | o changeset: 8:8e4e2c1a07ae | |\ branch: f1 | | | parent: 2:4bc80088dc6b | | | parent: 6:400110238667 | | | user: szhang | | | date: Thu Sep 05 12:57:59 2013 -0400 | | | summary: bad merge | | | o | | changeset: 7:4c9fbe56a16f |/ / branch: f1 | | parent: 2:4bc80088dc6b | | user: szhang | | date: Thu Sep 05 12:54:00 2013 -0400 | | summary: changed f1 | | | o changeset: 6:400110238667 | | branch: f2 | | parent: 4:12e8ec6bb010 | | user: szhang | | date: Tue Sep 03 13:58:02 2013 -0400 | | summary: changed f2 on f2 | | | | @ changeset: 5:d79e2059b5c0 | | | parent: 3:8a951942e016 | | | user: szhang | | | date: Tue Sep 03 13:57:39 2013 -0400 | | | summary: changed f2 on default | | | | o | changeset: 4:12e8ec6bb010 | |/ branch: f2 | | user: szhang | | date: Tue Sep 03 13:57:18 2013 -0400 | | summary: created f2 branch | | | o changeset: 3:8a951942e016 | | parent: 0:24797d4f68de | | user: szhang | | date: Tue Sep 03 13:57:11 2013 -0400 | | summary: added f2.txt | | o | changeset: 2:4bc80088dc6b | | branch: f1 | | user: szhang | | date: Tue Sep 03 13:56:20 2013 -0400 | | summary: added f1.txt | | o | changeset: 1:ef53c9e6b608 |/ branch: f1 | user: szhang | date: Tue Sep 03 13:55:26 2013 -0400 | summary: created f1 branch | o changeset: 0:24797d4f68de user: szhang date: Tue Sep 03 13:55:08 2013 -0400 summary: added default.txt $ hg rebase -s9 -d2 --debug # use debug to really check merge base used rebase onto 2 starting from e31216eec445 ignoring null merge rebase of 3 ignoring null merge rebase of 4 ignoring null merge rebase of 6 ignoring null merge rebase of 8 rebasing 9:e31216eec445 "more changes to f1" future parents are 2 and -1 rebase status stored update to 2:4bc80088dc6b resolving manifests branchmerge: False, force: True, partial: False ancestor: d79e2059b5c0+, local: d79e2059b5c0+, remote: 4bc80088dc6b f2.txt: other deleted -> r removing f2.txt f1.txt: remote created -> g getting f1.txt merge against 9:e31216eec445 detach base 8:8e4e2c1a07ae searching for copies back to rev 3 resolving manifests branchmerge: True, force: True, partial: False ancestor: 8e4e2c1a07ae, local: 4bc80088dc6b+, remote: e31216eec445 f1.txt: remote is newer -> g getting f1.txt committing files: f1.txt committing manifest committing changelog rebased as 19c888675e13 rebasing 10:2f2496ddf49d "merge" (tip) future parents are 11 and 7 rebase status stored already in target merge against 10:2f2496ddf49d detach base 9:e31216eec445 searching for copies back to rev 3 resolving manifests branchmerge: True, force: True, partial: False ancestor: e31216eec445, local: 19c888675e13+, remote: 2f2496ddf49d f1.txt: remote is newer -> g getting f1.txt committing files: f1.txt committing manifest committing changelog rebased as 2a7f09cac94c rebase merging completed update back to initial working directory parent resolving manifests branchmerge: False, force: False, partial: False ancestor: 2a7f09cac94c, local: 2a7f09cac94c+, remote: d79e2059b5c0 f1.txt: other deleted -> r removing f1.txt f2.txt: remote created -> g getting f2.txt 2 changesets found list of changesets: e31216eec445e44352c5f01588856059466a24c9 2f2496ddf49d69b5ef23ad8cf9fb2e0e4faf0ac2 bundle2-output-bundle: "HG20", (1 params) 1 parts total bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload saved backup bundle to $TESTTMP/issue4041/.hg/strip-backup/e31216eec445-15f7a814-backup.hg (glob) 3 changesets found list of changesets: 4c9fbe56a16f30c0d5dcc40ec1a97bbe3325209c 19c888675e133ab5dff84516926a65672eaf04d9 2a7f09cac94c7f4b73ebd5cd1a62d3b2e8e336bf bundle2-output-bundle: "HG20", 1 parts total bundle2-output-part: "changegroup" (params: 1 mandatory) streamed payload adding branch bundle2-input-bundle: with-transaction bundle2-input-part: "changegroup" (params: 1 mandatory) supported adding changesets add changeset 4c9fbe56a16f add changeset 19c888675e13 add changeset 2a7f09cac94c adding manifests adding file changes adding f1.txt revisions added 2 changesets with 2 changes to 1 files bundle2-input-part: total payload size 1713 bundle2-input-bundle: 0 parts total invalid branchheads cache (served): tip differs rebase completed updating the branch cache truncating cache/rbc-revs-v1 to 72 mercurial-3.7.3/tests/svn/0000755000175000017500000000000012676531544015100 5ustar mpmmpm00000000000000mercurial-3.7.3/tests/svn/empty.svndump0000644000175000017500000000302412676531525017652 0ustar mpmmpm00000000000000SVN-fs-dump-format-version: 2 UUID: b70c45d5-2b76-4722-a373-d9babae61626 Revision-number: 0 Prop-content-length: 260 Content-length: 260 K 8 svn:date V 27 2012-04-18T11:35:14.752409Z K 17 svn:sync-from-url V 73 file:///Users/pmezard/dev/hg/hg-pmezard/tests/svn/temp/svn-repo/trunk/dir K 18 svn:sync-from-uuid V 36 56625b9e-e7e9-45be-ab61-052d41f0e1dd K 24 svn:sync-last-merged-rev V 1 4 PROPS-END Revision-number: 1 Prop-content-length: 112 Content-length: 112 K 10 svn:author V 7 pmezard K 8 svn:date V 27 2012-04-18T11:35:14.769622Z K 7 svn:log V 10 init projA PROPS-END Node-path: trunk Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Revision-number: 2 Prop-content-length: 107 Content-length: 107 K 10 svn:author V 7 pmezard K 8 svn:date V 27 2012-04-18T11:35:15.052989Z K 7 svn:log V 6 adddir PROPS-END Node-path: trunk/dir Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk/dir/a Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3 Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b Content-length: 12 PROPS-END a Revision-number: 3 Prop-content-length: 105 Content-length: 105 K 10 svn:author V 7 pmezard K 8 svn:date V 27 2012-04-18T11:35:16.050353Z K 7 svn:log V 4 addb PROPS-END Revision-number: 4 Prop-content-length: 105 Content-length: 105 K 10 svn:author V 7 pmezard K 8 svn:date V 27 2012-04-18T11:35:17.050768Z K 7 svn:log V 4 addc PROPS-END mercurial-3.7.3/tests/svn/svndump-replace.sh0000755000175000017500000000273012676531525020545 0ustar mpmmpm00000000000000#!/bin/sh RSVN="`pwd`/rsvn.py" export PATH=/bin:/usr/bin mkdir temp cd temp svnadmin create repo svn co file://`pwd`/repo wc cd wc mkdir trunk branches cd trunk echo a > a mkdir d echo b > d/b ln -s d dlink ln -s d dlink2 ln -s d dlink3 mkdir d2 echo a > d2/a cd .. svn add * svn ci -m 'initial' # Clobber symlink with file with similar content cd trunk ls -Alh readlink dlink3 > dlink3tmp rm dlink3 mv dlink3tmp dlink3 svn propdel svn:special dlink3 svn ci -m 'clobber symlink' cd .. svn up # Clobber files and symlink with directories cd .. cat > clobber.rsvn < clobber.rsvn < d2/b svn add d2/b svn ci -m adddb cd .. svn up svn cp trunk branches/branch cd branches/branch svn rm d2/b echo c > d2/c svn add d2/c cd ../.. svn ci -m branch svn up cd .. cat > clobber.rsvn < ../replace.svndump mercurial-3.7.3/tests/svn/svndump-branches.sh0000755000175000017500000000272112676531525020717 0ustar mpmmpm00000000000000#!/bin/sh # # Use this script to generate branches.svndump # mkdir temp cd temp mkdir project-orig cd project-orig mkdir trunk mkdir branches cd .. svnadmin create svn-repo svnurl=file://`pwd`/svn-repo svn import project-orig $svnurl -m "init projA" svn co $svnurl project cd project echo a > trunk/a echo b > trunk/b echo c > trunk/c mkdir trunk/dir echo e > trunk/dir/e # Add a file within branches, used to confuse branch detection echo d > branches/notinbranch svn add trunk/a trunk/b trunk/c trunk/dir branches/notinbranch svn ci -m hello svn up # Branch to old svn copy trunk branches/old svn rm branches/old/c svn rm branches/old/dir svn ci -m "branch trunk, remove c and dir" svn up # Update trunk echo a >> trunk/a svn ci -m "change a" # Update old branch echo b >> branches/old/b svn ci -m "change b" # Create a cross-branch revision svn move trunk/b branches/old/c echo c >> branches/old/c svn ci -m "move and update c" # Update old branch again echo b >> branches/old/b svn ci -m "change b again" # Move back and forth between branch of similar names # This used to generate fake copy records svn up svn move branches/old branches/old2 svn ci -m "move to old2" svn move branches/old2 branches/old svn ci -m "move back to old" # Update trunk again echo a > trunk/a svn ci -m "last change to a" # Branch again from a converted revision svn copy -r 1 $svnurl/trunk branches/old3 svn ci -m "branch trunk@1 into old3" cd .. svnadmin dump svn-repo > ../branches.svndump mercurial-3.7.3/tests/svn/replace.svndump0000644000175000017500000001177712676531525020145 0ustar mpmmpm00000000000000SVN-fs-dump-format-version: 2 UUID: 97a955ef-0269-44f2-a58f-abd4ad400b2b Revision-number: 0 Prop-content-length: 56 Content-length: 56 K 8 svn:date V 27 2010-11-26T18:01:12.912988Z PROPS-END Revision-number: 1 Prop-content-length: 108 Content-length: 108 K 7 svn:log V 7 initial K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-11-26T18:01:13.106933Z PROPS-END Node-path: branches Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk/a Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3 Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b Content-length: 12 PROPS-END a Node-path: trunk/d Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk/d/b Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 3b5d5c3712955042212316173ccf37be Text-content-sha1: 89e6c98d92887913cadf06b2adb97f26cde4849b Content-length: 12 PROPS-END b Node-path: trunk/d2 Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk/d2/a Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3 Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b Content-length: 12 PROPS-END a Node-path: trunk/dlink Node-kind: file Node-action: add Prop-content-length: 33 Text-content-length: 6 Text-content-md5: cca56829f18345718a4980bb02b6d8c3 Text-content-sha1: 7c54cc5d472b78c94a04382df34b0f4f0f4f2d49 Content-length: 39 K 11 svn:special V 1 * PROPS-END link d Node-path: trunk/dlink2 Node-kind: file Node-action: add Prop-content-length: 33 Text-content-length: 6 Text-content-md5: cca56829f18345718a4980bb02b6d8c3 Text-content-sha1: 7c54cc5d472b78c94a04382df34b0f4f0f4f2d49 Content-length: 39 K 11 svn:special V 1 * PROPS-END link d Node-path: trunk/dlink3 Node-kind: file Node-action: add Prop-content-length: 33 Text-content-length: 6 Text-content-md5: cca56829f18345718a4980bb02b6d8c3 Text-content-sha1: 7c54cc5d472b78c94a04382df34b0f4f0f4f2d49 Content-length: 39 K 11 svn:special V 1 * PROPS-END link d Revision-number: 2 Prop-content-length: 117 Content-length: 117 K 7 svn:log V 15 clobber symlink K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-11-26T18:01:14.073483Z PROPS-END Node-path: trunk/dlink3 Node-kind: file Node-action: change Prop-content-length: 10 Text-content-length: 2 Text-content-md5: e29311f6f1bf1af907f9ef9f44b8328b Text-content-sha1: e983f374794de9c64e3d1c1de1d490c0756eeeff Content-length: 12 PROPS-END d Revision-number: 3 Prop-content-length: 106 Content-length: 106 K 7 svn:log V 8 clobber1 K 10 svn:author V 4 evil K 8 svn:date V 27 2010-11-26T18:01:16.205184Z PROPS-END Node-path: trunk/a Node-kind: dir Node-action: delete Node-path: trunk/a Node-kind: dir Node-action: add Node-copyfrom-rev: 2 Node-copyfrom-path: trunk/d Node-path: trunk/dlink Node-kind: dir Node-action: delete Node-path: trunk/dlink Node-kind: dir Node-action: add Node-copyfrom-rev: 2 Node-copyfrom-path: trunk/d Revision-number: 4 Prop-content-length: 106 Content-length: 106 K 7 svn:log V 8 clobber2 K 10 svn:author V 4 evil K 8 svn:date V 27 2010-11-26T18:01:16.395962Z PROPS-END Node-path: trunk/dlink3 Node-kind: file Node-action: delete Node-path: trunk/dlink3 Node-kind: file Node-action: add Node-copyfrom-rev: 3 Node-copyfrom-path: trunk/dlink2 Text-copy-source-md5: cca56829f18345718a4980bb02b6d8c3 Text-copy-source-sha1: 7c54cc5d472b78c94a04382df34b0f4f0f4f2d49 Revision-number: 5 Prop-content-length: 106 Content-length: 106 K 7 svn:log V 5 adddb K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-11-26T18:01:16.445072Z PROPS-END Node-path: trunk/d2/b Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 3b5d5c3712955042212316173ccf37be Text-content-sha1: 89e6c98d92887913cadf06b2adb97f26cde4849b Content-length: 12 PROPS-END b Revision-number: 6 Prop-content-length: 107 Content-length: 107 K 7 svn:log V 6 branch K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-11-26T18:01:19.075874Z PROPS-END Node-path: branches/branch Node-kind: dir Node-action: add Node-copyfrom-rev: 5 Node-copyfrom-path: trunk Node-path: branches/branch/d2/c Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 2cd6ee2c70b0bde53fbe6cac3c8b8bb1 Text-content-sha1: 2b66fd261ee5c6cfc8de7fa466bab600bcfe4f69 Content-length: 12 PROPS-END c Node-path: branches/branch/d2/b Node-action: delete Revision-number: 7 Prop-content-length: 109 Content-length: 109 K 7 svn:log V 10 clobberdir K 10 svn:author V 4 evil K 8 svn:date V 27 2010-11-26T18:01:21.202158Z PROPS-END Node-path: trunk/d2 Node-kind: dir Node-action: delete Node-path: trunk/d2 Node-kind: dir Node-action: add Node-copyfrom-rev: 6 Node-copyfrom-path: branches/branch/d2 mercurial-3.7.3/tests/svn/encoding.svndump0000644000175000017500000000666212676531525020315 0ustar mpmmpm00000000000000SVN-fs-dump-format-version: 2 UUID: afeb9c47-92ff-4c0c-9f72-e1f6eb8ac9af Revision-number: 0 Prop-content-length: 56 Content-length: 56 K 8 svn:date V 27 2009-06-21T16:34:55.835945Z PROPS-END Revision-number: 1 Prop-content-length: 112 Content-length: 112 K 7 svn:log V 10 init projA K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-06-21T16:34:55.909545Z PROPS-END Node-path: branches Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: tags Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Revision-number: 2 Prop-content-length: 106 Content-length: 106 K 7 svn:log V 5 hello K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-06-21T16:34:56.150049Z PROPS-END Node-path: trunk/à Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk/à/eÌ Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: e29311f6f1bf1af907f9ef9f44b8328b Text-content-sha1: e983f374794de9c64e3d1c1de1d490c0756eeeff Content-length: 12 PROPS-END d Node-path: trunk/é Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 9ffbf43126e33be52cd2bf7e01d627f9 Text-content-sha1: 094e3afb2fe8dfe82f63731cdcd3b999f4856cff Content-length: 12 PROPS-END e Revision-number: 3 Prop-content-length: 112 Content-length: 112 K 7 svn:log V 10 copy files K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-06-21T16:34:59.089402Z PROPS-END Node-path: trunk/è Node-kind: file Node-action: add Node-copyfrom-rev: 2 Node-copyfrom-path: trunk/é Text-copy-source-md5: 9ffbf43126e33be52cd2bf7e01d627f9 Text-copy-source-sha1: 094e3afb2fe8dfe82f63731cdcd3b999f4856cff Node-path: trunk/ù Node-kind: dir Node-action: add Node-copyfrom-rev: 2 Node-copyfrom-path: trunk/à Node-path: trunk/à Node-action: delete Node-path: trunk/é Node-action: delete Revision-number: 4 Prop-content-length: 114 Content-length: 114 K 7 svn:log V 12 remove files K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-06-21T16:35:00.165121Z PROPS-END Node-path: trunk/è Node-action: delete Node-path: trunk/ù Node-action: delete Revision-number: 5 Prop-content-length: 120 Content-length: 120 K 7 svn:log V 18 branch to branché K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-06-21T16:35:03.079138Z PROPS-END Node-path: branches/branché Node-kind: dir Node-action: add Node-copyfrom-rev: 4 Node-copyfrom-path: trunk Revision-number: 6 Prop-content-length: 121 Content-length: 121 K 7 svn:log V 19 branch to branchée K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-06-21T16:35:06.060801Z PROPS-END Node-path: branches/branchée Node-kind: dir Node-action: add Node-copyfrom-rev: 5 Node-copyfrom-path: branches/branché Revision-number: 7 Prop-content-length: 110 Content-length: 110 K 7 svn:log V 9 tag trunk K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-06-21T16:35:09.061530Z PROPS-END Node-path: tags/branché Node-kind: dir Node-action: add Node-copyfrom-rev: 6 Node-copyfrom-path: trunk Revision-number: 8 Prop-content-length: 114 Content-length: 114 K 7 svn:log V 12 tag branché K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-06-21T16:35:11.068562Z PROPS-END Node-path: tags/branchée Node-kind: dir Node-action: add Node-copyfrom-rev: 6 Node-copyfrom-path: branches/branchée mercurial-3.7.3/tests/svn/svndump-tags.sh0000755000175000017500000000225412676531525020071 0ustar mpmmpm00000000000000#!/bin/sh # # Use this script to generate tags.svndump # mkdir temp cd temp mkdir project-orig cd project-orig mkdir trunk mkdir branches mkdir tags mkdir unrelated cd .. svnadmin create svn-repo svnurl=file://`pwd`/svn-repo svn import project-orig $svnurl -m "init projA" svn co $svnurl project cd project echo a > trunk/a svn add trunk/a svn ci -m adda echo a >> trunk/a svn ci -m changea echo a >> trunk/a svn ci -m changea2 # Add an unrelated commit to test that tags are bound to the # correct "from" revision and not a dummy one echo a >> unrelated/dummy svn add unrelated/dummy svn ci -m unrelatedchange # Tag current revision svn up svn copy trunk tags/trunk.v1 svn copy trunk tags/trunk.badtag svn ci -m "tagging trunk.v1 trunk.badtag" echo a >> trunk/a svn ci -m changea3 # Fix the bad tag # trunk.badtag should not show in converted tags svn up svn mv tags/trunk.badtag tags/trunk.goodtag svn ci -m "fix trunk.badtag" echo a >> trunk/a svn ci -m changea # Delete goodtag and recreate it, to test we pick the good one svn rm tags/trunk.goodtag svn ci -m removegoodtag svn up svn copy trunk tags/trunk.goodtag svn ci -m recreategoodtag cd .. svnadmin dump svn-repo > ../tags.svndumpmercurial-3.7.3/tests/svn/branches.svndump0000644000175000017500000001306612676531525020310 0ustar mpmmpm00000000000000SVN-fs-dump-format-version: 2 UUID: 644ede6c-2b81-4367-9dc8-d786514f2cde Revision-number: 0 Prop-content-length: 56 Content-length: 56 K 8 svn:date V 27 2010-05-19T20:16:07.429098Z PROPS-END Revision-number: 1 Prop-content-length: 112 Content-length: 112 K 7 svn:log V 10 init projA K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-19T20:16:07.461283Z PROPS-END Node-path: branches Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Revision-number: 2 Prop-content-length: 106 Content-length: 106 K 7 svn:log V 5 hello K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-19T20:16:08.121436Z PROPS-END Node-path: branches/notinbranch Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: e29311f6f1bf1af907f9ef9f44b8328b Text-content-sha1: e983f374794de9c64e3d1c1de1d490c0756eeeff Content-length: 12 PROPS-END d Node-path: trunk/a Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3 Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b Content-length: 12 PROPS-END a Node-path: trunk/b Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 3b5d5c3712955042212316173ccf37be Text-content-sha1: 89e6c98d92887913cadf06b2adb97f26cde4849b Content-length: 12 PROPS-END b Node-path: trunk/c Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 2cd6ee2c70b0bde53fbe6cac3c8b8bb1 Text-content-sha1: 2b66fd261ee5c6cfc8de7fa466bab600bcfe4f69 Content-length: 12 PROPS-END c Node-path: trunk/dir Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk/dir/e Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 9ffbf43126e33be52cd2bf7e01d627f9 Text-content-sha1: 094e3afb2fe8dfe82f63731cdcd3b999f4856cff Content-length: 12 PROPS-END e Revision-number: 3 Prop-content-length: 132 Content-length: 132 K 7 svn:log V 30 branch trunk, remove c and dir K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-19T20:16:11.113124Z PROPS-END Node-path: branches/old Node-kind: dir Node-action: add Node-copyfrom-rev: 2 Node-copyfrom-path: trunk Node-path: branches/old/dir Node-action: delete Node-path: branches/old/c Node-action: delete Revision-number: 4 Prop-content-length: 109 Content-length: 109 K 7 svn:log V 8 change a K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-19T20:16:13.060877Z PROPS-END Node-path: trunk/a Node-kind: file Node-action: change Text-content-length: 4 Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29 Content-length: 4 a a Revision-number: 5 Prop-content-length: 109 Content-length: 109 K 7 svn:log V 8 change b K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-19T20:16:14.066212Z PROPS-END Node-path: branches/old/b Node-kind: file Node-action: change Text-content-length: 4 Text-content-md5: 06ac26ed8b614fc0b141e4542aa067c2 Text-content-sha1: f6980469e74f7125178e88ec571e06fe6ce86e95 Content-length: 4 b b Revision-number: 6 Prop-content-length: 119 Content-length: 119 K 7 svn:log V 17 move and update c K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-19T20:16:16.069449Z PROPS-END Node-path: branches/old/c Node-kind: file Node-action: add Node-copyfrom-rev: 3 Node-copyfrom-path: trunk/b Text-copy-source-md5: 3b5d5c3712955042212316173ccf37be Text-copy-source-sha1: 89e6c98d92887913cadf06b2adb97f26cde4849b Text-content-length: 4 Text-content-md5: 33cb6785d50937d8d307ebb66d6259a7 Text-content-sha1: 7a6478264aa11a0f4befef356c03e83f2b1f6eba Content-length: 4 b c Node-path: trunk/b Node-action: delete Revision-number: 7 Prop-content-length: 116 Content-length: 116 K 7 svn:log V 14 change b again K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-19T20:16:17.070868Z PROPS-END Node-path: branches/old/b Node-kind: file Node-action: change Text-content-length: 6 Text-content-md5: cdcfb41554e2d092c13f5e6839e63577 Text-content-sha1: 17ac58cabedebea235d1b5605531d5b1559797e9 Content-length: 6 b b b Revision-number: 8 Prop-content-length: 114 Content-length: 114 K 7 svn:log V 12 move to old2 K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-19T20:16:20.063098Z PROPS-END Node-path: branches/old2 Node-kind: dir Node-action: add Node-copyfrom-rev: 7 Node-copyfrom-path: branches/old Node-path: branches/old Node-action: delete Revision-number: 9 Prop-content-length: 118 Content-length: 118 K 7 svn:log V 16 move back to old K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-19T20:16:22.062931Z PROPS-END Node-path: branches/old Node-kind: dir Node-action: add Node-copyfrom-rev: 8 Node-copyfrom-path: branches/old2 Node-path: branches/old2 Node-action: delete Revision-number: 10 Prop-content-length: 118 Content-length: 118 K 7 svn:log V 16 last change to a K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-19T20:16:23.075562Z PROPS-END Node-path: trunk/a Node-kind: file Node-action: change Text-content-length: 2 Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3 Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b Content-length: 2 a Revision-number: 11 Prop-content-length: 126 Content-length: 126 K 7 svn:log V 24 branch trunk@1 into old3 K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-19T20:16:25.107655Z PROPS-END Node-path: branches/old3 Node-kind: dir Node-action: add Node-copyfrom-rev: 1 Node-copyfrom-path: trunk mercurial-3.7.3/tests/svn/move.svndump0000644000175000017500000001704312676531525017470 0ustar mpmmpm00000000000000SVN-fs-dump-format-version: 2 UUID: 7d15f7c2-5863-4c16-aa2a-3418b1721d3a Revision-number: 0 Prop-content-length: 56 Content-length: 56 K 8 svn:date V 27 2010-05-09T13:02:37.336239Z PROPS-END Revision-number: 1 Prop-content-length: 112 Content-length: 112 K 7 svn:log V 10 init projA K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:37.372834Z PROPS-END Node-path: trunk Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk/a Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3 Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b Content-length: 12 PROPS-END a Node-path: trunk/d1 Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk/d1/b Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 3b5d5c3712955042212316173ccf37be Text-content-sha1: 89e6c98d92887913cadf06b2adb97f26cde4849b Content-length: 12 PROPS-END b Node-path: trunk/d1/c Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 2cd6ee2c70b0bde53fbe6cac3c8b8bb1 Text-content-sha1: 2b66fd261ee5c6cfc8de7fa466bab600bcfe4f69 Content-length: 12 PROPS-END c Node-path: trunk/d2 Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk/d2/d Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: e29311f6f1bf1af907f9ef9f44b8328b Text-content-sha1: e983f374794de9c64e3d1c1de1d490c0756eeeff Content-length: 12 PROPS-END d Revision-number: 2 Prop-content-length: 118 Content-length: 118 K 7 svn:log V 16 commitbeforemove K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:38.049068Z PROPS-END Node-path: trunk/a Node-kind: file Node-action: change Text-content-length: 4 Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29 Content-length: 4 a a Node-path: trunk/d1/c Node-kind: file Node-action: change Text-content-length: 4 Text-content-md5: 63fad9092ad37713ebe26b3193f89c41 Text-content-sha1: ccfb93b7bac6f1520f0adc0eebc2cafe9da80f42 Content-length: 4 c c Revision-number: 3 Prop-content-length: 112 Content-length: 112 K 7 svn:log V 10 movedtrunk K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:39.044479Z PROPS-END Node-path: subproject Node-kind: dir Node-action: add Node-copyfrom-rev: 2 Node-copyfrom-path: trunk Node-path: trunk Node-action: delete Revision-number: 4 Prop-content-length: 113 Content-length: 113 K 7 svn:log V 11 createtrunk K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:40.057804Z PROPS-END Node-path: subproject/trunk Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Revision-number: 5 Prop-content-length: 116 Content-length: 116 K 7 svn:log V 14 createbranches K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:41.058871Z PROPS-END Node-path: subproject/branches Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Revision-number: 6 Prop-content-length: 107 Content-length: 107 K 7 svn:log V 6 moved1 K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:42.046689Z PROPS-END Node-path: subproject/trunk/d1 Node-kind: dir Node-action: add Node-copyfrom-rev: 5 Node-copyfrom-path: subproject/d1 Node-path: subproject/d1 Node-action: delete Revision-number: 7 Prop-content-length: 107 Content-length: 107 K 7 svn:log V 6 moved2 K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:42.071413Z PROPS-END Node-path: subproject/trunk/d2 Node-kind: dir Node-action: add Node-copyfrom-rev: 6 Node-copyfrom-path: subproject/d2 Node-path: subproject/d2 Node-action: delete Revision-number: 8 Prop-content-length: 119 Content-length: 119 K 7 svn:log V 17 changeb and rm d2 K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:43.062018Z PROPS-END Node-path: subproject/trunk/d1/b Node-kind: file Node-action: change Text-content-length: 4 Text-content-md5: 06ac26ed8b614fc0b141e4542aa067c2 Text-content-sha1: f6980469e74f7125178e88ec571e06fe6ce86e95 Content-length: 4 b b Node-path: subproject/trunk/d2 Node-action: delete Revision-number: 9 Prop-content-length: 113 Content-length: 113 K 7 svn:log V 11 moved1again K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:44.047997Z PROPS-END Node-path: subproject/branches/d1 Node-kind: dir Node-action: add Node-copyfrom-rev: 8 Node-copyfrom-path: subproject/trunk/d1 Node-path: subproject/trunk/d1 Node-action: delete Revision-number: 10 Prop-content-length: 118 Content-length: 118 K 7 svn:log V 16 copyfilefrompast K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:44.086619Z PROPS-END Node-path: subproject/trunk/d Node-kind: file Node-action: add Node-copyfrom-rev: 7 Node-copyfrom-path: subproject/trunk/d2/d Text-copy-source-md5: e29311f6f1bf1af907f9ef9f44b8328b Text-copy-source-sha1: e983f374794de9c64e3d1c1de1d490c0756eeeff Revision-number: 11 Prop-content-length: 117 Content-length: 117 K 7 svn:log V 15 copydirfrompast K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:44.111550Z PROPS-END Node-path: subproject/trunk/d2 Node-kind: dir Node-action: add Node-copyfrom-rev: 7 Node-copyfrom-path: subproject/trunk/d2 Revision-number: 12 Prop-content-length: 107 Content-length: 107 K 7 svn:log V 6 add d3 K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:45.067982Z PROPS-END Node-path: subproject/trunk/d3 Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: subproject/trunk/d3/d31 Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: subproject/trunk/d3/d31/e Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 9ffbf43126e33be52cd2bf7e01d627f9 Text-content-sha1: 094e3afb2fe8dfe82f63731cdcd3b999f4856cff Content-length: 12 PROPS-END e Node-path: subproject/trunk/d3/f Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 9a8ad92c50cae39aa2c5604fd0ab6d8c Text-content-sha1: a9fcd54b25e7e863d72cd47c08af46e61b74b561 Content-length: 12 PROPS-END f Revision-number: 13 Prop-content-length: 128 Content-length: 128 K 7 svn:log V 26 copy dir and remove subdir K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:47.061259Z PROPS-END Node-path: subproject/trunk/d3/d31 Node-action: delete Node-path: subproject/trunk/d4 Node-kind: dir Node-action: add Node-copyfrom-rev: 12 Node-copyfrom-path: subproject/trunk/d3 Revision-number: 14 Prop-content-length: 110 Content-length: 110 K 7 svn:log V 9 add d4old K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:49.063363Z PROPS-END Node-path: subproject/trunk/d4old Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: subproject/trunk/d4old/g Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: f5302386464f953ed581edac03556e55 Text-content-sha1: a5938ace3f424be1a26904781cdb06d55b614e6b Content-length: 12 PROPS-END g Revision-number: 15 Prop-content-length: 125 Content-length: 125 K 7 svn:log V 23 rename d4old into d4new K 10 svn:author V 7 pmezard K 8 svn:date V 27 2010-05-09T13:02:51.047304Z PROPS-END Node-path: subproject/trunk/d4new Node-kind: dir Node-action: add Node-copyfrom-rev: 14 Node-copyfrom-path: subproject/trunk/d4old Node-path: subproject/trunk/d4old Node-action: delete mercurial-3.7.3/tests/svn/startrev.svndump0000644000175000017500000000545712676531525020402 0ustar mpmmpm00000000000000SVN-fs-dump-format-version: 2 UUID: c731c652-65e9-4325-a17e-fed96a319f22 Revision-number: 0 Prop-content-length: 56 Content-length: 56 K 8 svn:date V 27 2008-12-06T13:44:21.642421Z PROPS-END Revision-number: 1 Prop-content-length: 112 Content-length: 112 K 7 svn:log V 10 init projA K 10 svn:author V 7 pmezard K 8 svn:date V 27 2008-12-06T13:44:21.759281Z PROPS-END Node-path: branches Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: tags Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Revision-number: 2 Prop-content-length: 109 Content-length: 109 K 7 svn:log V 8 createab K 10 svn:author V 7 pmezard K 8 svn:date V 27 2008-12-06T13:44:22.179257Z PROPS-END Node-path: trunk/a Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3 Content-length: 12 PROPS-END a Node-path: trunk/b Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 3b5d5c3712955042212316173ccf37be Content-length: 12 PROPS-END b Revision-number: 3 Prop-content-length: 108 Content-length: 108 K 7 svn:log V 7 removeb K 10 svn:author V 7 pmezard K 8 svn:date V 27 2008-12-06T13:44:23.176546Z PROPS-END Node-path: trunk/b Node-action: delete Revision-number: 4 Prop-content-length: 109 Content-length: 109 K 7 svn:log V 8 changeaa K 10 svn:author V 7 pmezard K 8 svn:date V 27 2008-12-06T13:44:25.147151Z PROPS-END Node-path: trunk/a Node-kind: file Node-action: change Text-content-length: 4 Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb Content-length: 4 a a Revision-number: 5 Prop-content-length: 119 Content-length: 119 K 7 svn:log V 17 branch, changeaaa K 10 svn:author V 7 pmezard K 8 svn:date V 27 2008-12-06T13:44:28.158475Z PROPS-END Node-path: branches/branch1 Node-kind: dir Node-action: add Node-copyfrom-rev: 4 Node-copyfrom-path: trunk Prop-content-length: 34 Content-length: 34 K 13 svn:mergeinfo V 0 PROPS-END Node-path: branches/branch1/a Node-kind: file Node-action: change Text-content-length: 6 Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c Content-length: 6 a a a Revision-number: 6 Prop-content-length: 117 Content-length: 117 K 7 svn:log V 15 addc,changeaaaa K 10 svn:author V 7 pmezard K 8 svn:date V 27 2008-12-06T13:44:29.180655Z PROPS-END Node-path: branches/branch1/a Node-kind: file Node-action: change Text-content-length: 8 Text-content-md5: d12178e74d8774e34361e0a08d1fd2b7 Content-length: 8 a a a a Node-path: branches/branch1/c Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 2cd6ee2c70b0bde53fbe6cac3c8b8bb1 Content-length: 12 PROPS-END c mercurial-3.7.3/tests/svn/tags.svndump0000644000175000017500000001064612676531525017462 0ustar mpmmpm00000000000000SVN-fs-dump-format-version: 2 UUID: a9c3b03d-cffa-4248-8023-ecf4b2bdf5d5 Revision-number: 0 Prop-content-length: 56 Content-length: 56 K 8 svn:date V 27 2009-04-29T19:26:51.708679Z PROPS-END Revision-number: 1 Prop-content-length: 112 Content-length: 112 K 7 svn:log V 10 init projA K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-04-29T19:26:52.115023Z PROPS-END Node-path: branches Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: tags Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: trunk Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Node-path: unrelated Node-kind: dir Node-action: add Prop-content-length: 10 Content-length: 10 PROPS-END Revision-number: 2 Prop-content-length: 105 Content-length: 105 K 7 svn:log V 4 adda K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-04-29T19:26:53.109819Z PROPS-END Node-path: trunk/a Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3 Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b Content-length: 12 PROPS-END a Revision-number: 3 Prop-content-length: 108 Content-length: 108 K 7 svn:log V 7 changea K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-04-29T19:26:54.073017Z PROPS-END Node-path: trunk/a Node-kind: file Node-action: change Text-content-length: 4 Text-content-md5: 0d227f1abf8c2932d342e9b99cc957eb Text-content-sha1: d7c8127a20a396cff08af086a1c695b0636f0c29 Content-length: 4 a a Revision-number: 4 Prop-content-length: 109 Content-length: 109 K 7 svn:log V 8 changea2 K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-04-29T19:26:55.076032Z PROPS-END Node-path: trunk/a Node-kind: file Node-action: change Text-content-length: 6 Text-content-md5: 7d4ebf8f298d22fc349a91725b00af1c Text-content-sha1: 92f31bc48f52339253fce6cad9f2f0c95b302f7e Content-length: 6 a a a Revision-number: 5 Prop-content-length: 117 Content-length: 117 K 7 svn:log V 15 unrelatedchange K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-04-29T19:26:56.095784Z PROPS-END Node-path: unrelated/dummy Node-kind: file Node-action: add Prop-content-length: 10 Text-content-length: 2 Text-content-md5: 60b725f10c9c85c70d97880dfe8191b3 Text-content-sha1: 3f786850e387550fdab836ed7e6dc881de23001b Content-length: 12 PROPS-END a Revision-number: 6 Prop-content-length: 131 Content-length: 131 K 7 svn:log V 29 tagging trunk.v1 trunk.badtag K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-04-29T19:27:00.074864Z PROPS-END Node-path: tags/trunk.badtag Node-kind: dir Node-action: add Node-copyfrom-rev: 5 Node-copyfrom-path: trunk Node-path: tags/trunk.v1 Node-kind: dir Node-action: add Node-copyfrom-rev: 5 Node-copyfrom-path: trunk Revision-number: 7 Prop-content-length: 109 Content-length: 109 K 7 svn:log V 8 changea3 K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-04-29T19:27:01.073910Z PROPS-END Node-path: trunk/a Node-kind: file Node-action: change Text-content-length: 8 Text-content-md5: d12178e74d8774e34361e0a08d1fd2b7 Text-content-sha1: cce0b2a263066e26610df9082b7b3c810f71262e Content-length: 8 a a a a Revision-number: 8 Prop-content-length: 118 Content-length: 118 K 7 svn:log V 16 fix trunk.badtag K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-04-29T19:27:04.073542Z PROPS-END Node-path: tags/trunk.goodtag Node-kind: dir Node-action: add Node-copyfrom-rev: 7 Node-copyfrom-path: tags/trunk.badtag Node-path: tags/trunk.badtag Node-action: delete Revision-number: 9 Prop-content-length: 108 Content-length: 108 K 7 svn:log V 7 changea K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-04-29T19:27:05.095204Z PROPS-END Node-path: trunk/a Node-kind: file Node-action: change Text-content-length: 10 Text-content-md5: 3f65cbdca1b64c2f8f574fccae24f3a4 Text-content-sha1: 5c077263421de2abff9dbe867921bc6810811aa2 Content-length: 10 a a a a a Revision-number: 10 Prop-content-length: 115 Content-length: 115 K 7 svn:log V 13 removegoodtag K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-04-29T19:27:06.089193Z PROPS-END Node-path: tags/trunk.goodtag Node-action: delete Revision-number: 11 Prop-content-length: 117 Content-length: 117 K 7 svn:log V 15 recreategoodtag K 10 svn:author V 7 pmezard K 8 svn:date V 27 2009-04-29T19:27:09.070471Z PROPS-END Node-path: tags/trunk.goodtag Node-kind: dir Node-action: add Node-copyfrom-rev: 10 Node-copyfrom-path: trunk mercurial-3.7.3/tests/svn/svndump-startrev.sh0000755000175000017500000000133412676531525021003 0ustar mpmmpm00000000000000#!/bin/sh # # Use this script to generate startrev.svndump # mkdir temp cd temp mkdir project-orig cd project-orig mkdir trunk mkdir branches mkdir tags cd .. svnadmin create svn-repo svnurl=file://`pwd`/svn-repo svn import project-orig $svnurl -m "init projA" svn co $svnurl project cd project echo a > trunk/a echo b > trunk/b svn add trunk/a trunk/b svn ci -m createab svn rm trunk/b svn ci -m removeb svn up echo a >> trunk/a svn ci -m changeaa # Branch svn up svn copy trunk branches/branch1 echo a >> branches/branch1/a svn ci -m "branch, changeaaa" echo a >> branches/branch1/a echo c > branches/branch1/c svn add branches/branch1/c svn ci -m "addc,changeaaaa" svn up cd .. svnadmin dump svn-repo > ../startrev.svndumpmercurial-3.7.3/tests/svn/svndump-encoding.sh0000755000175000017500000000203712676531525020720 0ustar mpmmpm00000000000000#!/bin/sh # -*- coding: utf-8 -*- # # Use this script to generate encoding.svndump # mkdir temp cd temp mkdir project-orig cd project-orig mkdir trunk mkdir branches mkdir tags cd .. svnadmin create svn-repo svnurl=file://`pwd`/svn-repo svn import project-orig $svnurl -m "init projA" svn co $svnurl project cd project echo e > trunk/é mkdir trunk/à echo d > trunk/à/é svn add trunk/é trunk/à svn ci -m hello # Copy files and directories svn mv trunk/é trunk/è svn mv trunk/à trunk/ù svn ci -m "copy files" # Remove files svn rm trunk/è svn rm trunk/ù svn ci -m 'remove files' # Create branches with and from weird names svn up svn cp trunk branches/branché echo a > branches/branché/a svn ci -m 'branch to branché' svn up svn cp branches/branché branches/branchée echo a >> branches/branché/a svn ci -m 'branch to branchée' # Create tag with weird name svn up svn cp trunk tags/branché svn ci -m 'tag trunk' svn cp branches/branchée tags/branchée svn ci -m 'tag branché' cd .. svnadmin dump svn-repo > ../encoding.svndump mercurial-3.7.3/tests/svn/svndump-empty.sh0000755000175000017500000000154212676531525020270 0ustar mpmmpm00000000000000#!/bin/sh # # Use this script to generate empty.svndump # mkdir temp cd temp mkdir project-orig cd project-orig mkdir trunk mkdir branches mkdir tags cd .. svnadmin create svn-repo svnurl=file://`pwd`/svn-repo svn import project-orig $svnurl -m "init projA" svn co $svnurl project cd project mkdir trunk/dir echo a > trunk/dir/a svn add trunk/dir svn ci -m adddir echo b > trunk/b svn add trunk/b svn ci -m addb echo c > c svn add c svn ci -m addc cd .. # svnsync repo/trunk/dir only so the last two revisions are empty svnadmin create svn-empty cat > svn-empty/hooks/pre-revprop-change < ../empty.svndump mercurial-3.7.3/tests/svn/svndump-move.sh0000755000175000017500000000441412676531525020101 0ustar mpmmpm00000000000000#!/bin/sh # # Use this script to generate move.svndump # mkdir temp cd temp mkdir project-orig cd project-orig mkdir trunk echo a > trunk/a mkdir trunk/d1 mkdir trunk/d2 echo b > trunk/d1/b echo c > trunk/d1/c echo d > trunk/d2/d cd .. svnadmin create svn-repo svnurl=file://`pwd`/svn-repo svn import project-orig $svnurl -m "init projA" svn co $svnurl project cd project # Build a module renaming chain which used to confuse the converter. # Update svn repository echo a >> trunk/a echo c >> trunk/d1/c svn ci -m commitbeforemove svn mv $svnurl/trunk $svnurl/subproject -m movedtrunk svn up mkdir subproject/trunk svn add subproject/trunk svn ci -m createtrunk mkdir subproject/branches svn add subproject/branches svn ci -m createbranches svn mv $svnurl/subproject/d1 $svnurl/subproject/trunk/d1 -m moved1 svn mv $svnurl/subproject/d2 $svnurl/subproject/trunk/d2 -m moved2 svn up echo b >> subproject/trunk/d1/b svn rm subproject/trunk/d2 svn ci -m "changeb and rm d2" svn mv $svnurl/subproject/trunk/d1 $svnurl/subproject/branches/d1 -m moved1again if svn help copy | grep 'SRC\[@REV\]' > /dev/null 2>&1; then # SVN >= 1.5 replaced the -r REV syntax with @REV # Copy a file from a past revision svn copy $svnurl/subproject/trunk/d2/d@7 $svnurl/subproject/trunk -m copyfilefrompast # Copy a directory from a past revision svn copy $svnurl/subproject/trunk/d2@7 $svnurl/subproject/trunk -m copydirfrompast else # Copy a file from a past revision svn copy -r 7 $svnurl/subproject/trunk/d2/d $svnurl/subproject/trunk -m copyfilefrompast # Copy a directory from a past revision svn copy -r 7 $svnurl/subproject/trunk/d2 $svnurl/subproject/trunk -m copydirfrompast fi # Copy a directory while removing a subdirectory svn up mkdir -p subproject/trunk/d3/d31 echo e > subproject/trunk/d3/d31/e echo f > subproject/trunk/d3/f svn add subproject/trunk/d3 svn ci -m "add d3" svn copy subproject/trunk/d3 subproject/trunk/d4 svn rm subproject/trunk/d3/d31 svn ci -m "copy dir and remove subdir" # Test directory moves svn up mkdir -p subproject/trunk/d4old echo g > subproject/trunk/d4old/g svn add subproject/trunk/d4old svn ci -m "add d4old" svn mv subproject/trunk/d4old subproject/trunk/d4new svn ci -m "rename d4old into d4new" cd .. svnadmin dump svn-repo > ../move.svndumpmercurial-3.7.3/tests/test-tools.t0000644000175000017500000000542112676531525016575 0ustar mpmmpm00000000000000Tests of the file helper tool $ f -h ?sage: f [options] [filenames] (glob) ?ptions: (glob) -h, --help show this help message and exit -t, --type show file type (file or directory) -m, --mode show file mode -l, --links show number of links -s, --size show size of file -n NEWER, --newer=NEWER check if file is newer (or same) -r, --recurse recurse into directories -S, --sha1 show sha1 hash of the content -M, --md5 show md5 hash of the content -D, --dump dump file content -H, --hexdump hexdump file content -B BYTES, --bytes=BYTES number of characters to dump -L LINES, --lines=LINES number of lines to dump -q, --quiet no default output $ mkdir dir $ cd dir $ f --size size=0 $ echo hello | f --md5 --size size=6, md5=b1946ac92492d2347c6235b4d2611184 $ f foo foo: file not found $ echo foo > foo $ f foo foo: #if symlink $ f foo --mode foo: mode=644 #endif #if no-windows $ python $TESTDIR/seq.py 10 > bar #else Convert CRLF -> LF for consistency $ python $TESTDIR/seq.py 10 | sed "s/$//" > bar #endif #if unix-permissions symlink $ chmod +x bar $ f bar --newer foo --mode --type --size --dump --links --bytes 7 bar: file, size=21, mode=755, links=1, newer than foo >>> 1 2 3 4 <<< no trailing newline #endif #if unix-permissions $ ln bar baz $ f bar -n baz -l --hexdump -t --sha1 --lines=9 -B 20 bar: file, links=2, newer than baz, sha1=612ca68d0305c821750a 0000: 31 0a 32 0a 33 0a 34 0a 35 0a 36 0a 37 0a 38 0a |1.2.3.4.5.6.7.8.| 0010: 39 0a |9.| $ rm baz #endif #if unix-permissions symlink $ ln -s yadda l $ f . --recurse -MStmsB4 .: directory with 3 files, mode=755 ./bar: file, size=21, mode=755, md5=3b03, sha1=612c ./foo: file, size=4, mode=644, md5=d3b0, sha1=f1d2 ./l: link, size=5, md5=2faa, sha1=af93 #endif $ f --quiet bar -DL 3 1 2 3 $ cd .. Yadda is a symlink #if symlink $ f -qr dir -HB 17 dir: directory with 3 files dir/bar: 0000: 31 0a 32 0a 33 0a 34 0a 35 0a 36 0a 37 0a 38 0a |1.2.3.4.5.6.7.8.| 0010: 39 |9| dir/foo: 0000: 66 6f 6f 0a |foo.| dir/l: 0000: 79 61 64 64 61 |yadda| #else $ f -qr dir -HB 17 dir: directory with 2 files (esc) dir/bar: (glob) 0000: 31 0a 32 0a 33 0a 34 0a 35 0a 36 0a 37 0a 38 0a |1.2.3.4.5.6.7.8.| 0010: 39 |9| dir/foo: (glob) 0000: 66 6f 6f 0a |foo.| #endif mercurial-3.7.3/tests/test-rename-merge1.t0000644000175000017500000001053712676531525020066 0ustar mpmmpm00000000000000 $ hg init $ echo "[merge]" >> .hg/hgrc $ echo "followcopies = 1" >> .hg/hgrc $ echo foo > a $ echo foo > a2 $ hg add a a2 $ hg ci -m "start" $ hg mv a b $ hg mv a2 b2 $ hg ci -m "rename" $ hg co 0 2 files updated, 0 files merged, 2 files removed, 0 files unresolved $ echo blahblah > a $ echo blahblah > a2 $ hg mv a2 c2 $ hg ci -m "modify" created new head $ hg merge -y --debug searching for copies back to rev 1 unmatched files in local: c2 unmatched files in other: b b2 all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'a' -> dst: 'b' * src: 'a2' -> dst: 'b2' ! src: 'a2' -> dst: 'c2' ! checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: af1939970a1c, local: 044f8520aeeb+, remote: 85c198ef2f6c note: possible conflict - a2 was renamed multiple times to: c2 b2 preserving a for resolve of b removing a b2: remote created -> g getting b2 b: remote moved from a -> m (premerge) picked tool ':merge' for b (binary False symlink False changedelete False) merging a and b to b my b@044f8520aeeb+ other b@85c198ef2f6c ancestor a@af1939970a1c premerge successful 1 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status -AC M b a M b2 R a C c2 $ cat b blahblah $ hg ci -m "merge" $ hg debugindex b rev offset length ..... linkrev nodeid p1 p2 (re) 0 0 67 ..... 1 57eacc201a7f 000000000000 000000000000 (re) 1 67 72 ..... 3 4727ba907962 000000000000 57eacc201a7f (re) $ hg debugrename b b renamed from a:dd03b83622e78778b403775d0d074b9ac7387a66 This used to trigger a "divergent renames" warning, despite no renames $ hg cp b b3 $ hg cp b b4 $ hg ci -A -m 'copy b twice' $ hg up eb92d88a9712 0 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg up 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg rm b3 b4 $ hg ci -m 'clean up a bit of our mess' We'd rather not warn on divergent renames done in the same changeset (issue2113) $ hg cp b b3 $ hg mv b b4 $ hg ci -A -m 'divergent renames in same changeset' $ hg up c761c6948de0 1 files updated, 0 files merged, 2 files removed, 0 files unresolved $ hg up 2 files updated, 0 files merged, 1 files removed, 0 files unresolved Check for issue2642 $ hg init t $ cd t $ echo c0 > f1 $ hg ci -Aqm0 $ hg up null -q $ echo c1 > f1 # backport $ hg ci -Aqm1 $ hg mv f1 f2 $ hg ci -qm2 $ hg up 0 -q $ hg merge 1 -q --tool internal:local $ hg ci -qm3 $ hg merge 2 merging f1 and f2 to f2 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f2 c0 $ cd .. Check for issue2089 $ hg init repo2089 $ cd repo2089 $ echo c0 > f1 $ hg ci -Aqm0 $ hg up null -q $ echo c1 > f1 $ hg ci -Aqm1 $ hg up 0 -q $ hg merge 1 -q --tool internal:local $ echo c2 > f1 $ hg ci -qm2 $ hg up 1 -q $ hg mv f1 f2 $ hg ci -Aqm3 $ hg up 2 -q $ hg merge 3 merging f1 and f2 to f2 0 files updated, 1 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ cat f2 c2 $ cd .. Check for issue3074 $ hg init repo3074 $ cd repo3074 $ echo foo > file $ hg add file $ hg commit -m "added file" $ hg mv file newfile $ hg commit -m "renamed file" $ hg update 0 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg rm file $ hg commit -m "deleted file" created new head $ hg merge --debug searching for copies back to rev 1 unmatched files in other: newfile all copies found (* = to merge, ! = divergent, % = renamed and deleted): src: 'file' -> dst: 'newfile' % checking for directory renames resolving manifests branchmerge: True, force: False, partial: False ancestor: 19d7f95df299, local: 0084274f6b67+, remote: 5d32493049f0 note: possible conflict - file was deleted and renamed to: newfile newfile: remote created -> g getting newfile 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg status M newfile $ cd .. mercurial-3.7.3/tests/test-fncache.t0000644000175000017500000002710612676531525017030 0ustar mpmmpm00000000000000Init repo1: $ hg init repo1 $ cd repo1 $ echo "some text" > a $ hg add adding a $ hg ci -m first $ cat .hg/store/fncache | sort data/a.i Testing a.i/b: $ mkdir a.i $ echo "some other text" > a.i/b $ hg add adding a.i/b (glob) $ hg ci -m second $ cat .hg/store/fncache | sort data/a.i data/a.i.hg/b.i Testing a.i.hg/c: $ mkdir a.i.hg $ echo "yet another text" > a.i.hg/c $ hg add adding a.i.hg/c (glob) $ hg ci -m third $ cat .hg/store/fncache | sort data/a.i data/a.i.hg.hg/c.i data/a.i.hg/b.i Testing verify: $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 3 changesets, 3 total revisions $ rm .hg/store/fncache $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files warning: revlog 'data/a.i' not in fncache! warning: revlog 'data/a.i.hg/c.i' not in fncache! warning: revlog 'data/a.i/b.i' not in fncache! 3 files, 3 changesets, 3 total revisions 3 warnings encountered! hint: run "hg debugrebuildfncache" to recover from corrupt fncache Follow the hint to make sure it works $ hg debugrebuildfncache adding data/a.i adding data/a.i.hg/c.i adding data/a.i/b.i 3 items added, 0 removed from fncache $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 3 files, 3 changesets, 3 total revisions $ cd .. Non store repo: $ hg --config format.usestore=False init foo $ cd foo $ mkdir tst.d $ echo foo > tst.d/foo $ hg ci -Amfoo adding tst.d/foo $ find .hg | sort .hg .hg/00changelog.i .hg/00manifest.i .hg/cache .hg/cache/branch2-served .hg/cache/rbc-names-v1 .hg/cache/rbc-revs-v1 .hg/data .hg/data/tst.d.hg .hg/data/tst.d.hg/foo.i .hg/dirstate .hg/last-message.txt .hg/phaseroots .hg/requires .hg/undo .hg/undo.backup.dirstate .hg/undo.backupfiles .hg/undo.bookmarks .hg/undo.branch .hg/undo.desc .hg/undo.dirstate .hg/undo.phaseroots $ cd .. Non fncache repo: $ hg --config format.usefncache=False init bar $ cd bar $ mkdir tst.d $ echo foo > tst.d/Foo $ hg ci -Amfoo adding tst.d/Foo $ find .hg | sort .hg .hg/00changelog.i .hg/cache .hg/cache/branch2-served .hg/cache/rbc-names-v1 .hg/cache/rbc-revs-v1 .hg/dirstate .hg/last-message.txt .hg/requires .hg/store .hg/store/00changelog.i .hg/store/00manifest.i .hg/store/data .hg/store/data/tst.d.hg .hg/store/data/tst.d.hg/_foo.i .hg/store/phaseroots .hg/store/undo .hg/store/undo.backupfiles .hg/store/undo.phaseroots .hg/undo.backup.dirstate .hg/undo.bookmarks .hg/undo.branch .hg/undo.desc .hg/undo.dirstate $ cd .. Encoding of reserved / long paths in the store $ hg init r2 $ cd r2 $ cat < .hg/hgrc > [ui] > portablefilenames = ignore > EOF $ hg import -q --bypass - < # HG changeset patch > # User test > # Date 0 0 > # Node ID 1c7a2f7cb77be1a0def34e4c7cabc562ad98fbd7 > # Parent 0000000000000000000000000000000000000000 > 1 > > diff --git a/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz > new file mode 100644 > --- /dev/null > +++ b/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxxxxx-xxxxxxxxx-xxxxxxxxx-123456789-12.3456789-12345-ABCDEFGHIJKLMNOPRSTUVWXYZ-abcdefghjiklmnopqrstuvwxyz > @@ -0,0 +1,1 @@ > +foo > diff --git a/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT > new file mode 100644 > --- /dev/null > +++ b/AUX/SECOND/X.PRN/FOURTH/FI:FTH/SIXTH/SEVENTH/EIGHTH/NINETH/TENTH/ELEVENTH/LOREMIPSUM.TXT > @@ -0,0 +1,1 @@ > +foo > diff --git a/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt > new file mode 100644 > --- /dev/null > +++ b/Project Planning/Resources/AnotherLongDirectoryName/Followedbyanother/AndAnother/AndThenAnExtremelyLongFileName.txt > @@ -0,0 +1,1 @@ > +foo > diff --git a/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c > new file mode 100644 > --- /dev/null > +++ b/bla.aux/prn/PRN/lpt/com3/nul/coma/foo.NUL/normal.c > @@ -0,0 +1,1 @@ > +foo > diff --git a/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider > new file mode 100644 > --- /dev/null > +++ b/enterprise/openesbaddons/contrib-imola/corba-bc/netbeansplugin/wsdlExtension/src/main/java/META-INF/services/org.netbeans.modules.xml.wsdl.bindingsupport.spi.ExtensibilityElementTemplateProvider > @@ -0,0 +1,1 @@ > +foo > EOF $ find .hg/store -name *.i | sort .hg/store/00changelog.i .hg/store/00manifest.i .hg/store/data/bla.aux/pr~6e/_p_r_n/lpt/co~6d3/nu~6c/coma/foo._n_u_l/normal.c.i .hg/store/dh/12345678/12345678/12345678/12345678/12345678/12345678/12345678/12345/xxxxxx168e07b38e65eff86ab579afaaa8e30bfbe0f35f.i .hg/store/dh/au~78/second/x.prn/fourth/fi~3afth/sixth/seventh/eighth/nineth/tenth/loremia20419e358ddff1bf8751e38288aff1d7c32ec05.i .hg/store/dh/enterpri/openesba/contrib-/corba-bc/netbeans/wsdlexte/src/main/java/org.net7018f27961fdf338a598a40c4683429e7ffb9743.i .hg/store/dh/project_/resource/anotherl/followed/andanoth/andthenanextremelylongfilename0d8e1f4187c650e2f1fdca9fd90f786bc0976b6b.i $ cd .. Aborting lock does not prevent fncache writes $ cat > exceptionext.py < import os > from mercurial import commands, error > from mercurial.extensions import wrapcommand, wrapfunction > > def lockexception(orig, vfs, lockname, wait, releasefn, *args, **kwargs): > def releasewrap(): > l.held = False # ensure __del__ is a noop > raise error.Abort("forced lock failure") > l = orig(vfs, lockname, wait, releasewrap, *args, **kwargs) > return l > > def reposetup(ui, repo): > wrapfunction(repo, '_lock', lockexception) > > cmdtable = {} > > # wrap "commit" command to prevent wlock from being '__del__()'-ed > # at the end of dispatching (for intentional "forced lcok failure") > def commitwrap(orig, ui, repo, *pats, **opts): > repo = repo.unfiltered() # to use replaced repo._lock certainly > wlock = repo.wlock() > try: > return orig(ui, repo, *pats, **opts) > finally: > # multiple 'relase()' is needed for complete releasing wlock, > # because "forced" abort at last releasing store lock > # prevents wlock from being released at same 'lockmod.release()' > for i in range(wlock.held): > wlock.release() > > def extsetup(ui): > wrapcommand(commands.table, "commit", commitwrap) > EOF $ extpath=`pwd`/exceptionext.py $ hg init fncachetxn $ cd fncachetxn $ printf "[extensions]\nexceptionext=$extpath\n" >> .hg/hgrc $ touch y $ hg ci -qAm y abort: forced lock failure [255] $ cat .hg/store/fncache data/y.i Aborting transaction prevents fncache change $ cat > ../exceptionext.py < import os > from mercurial import commands, error, localrepo > from mercurial.extensions import wrapfunction > > def wrapper(orig, self, *args, **kwargs): > tr = orig(self, *args, **kwargs) > def fail(tr): > raise error.Abort("forced transaction failure") > # zzz prefix to ensure it sorted after store.write > tr.addfinalize('zzz-forcefails', fail) > return tr > > def uisetup(ui): > wrapfunction(localrepo.localrepository, 'transaction', wrapper) > > cmdtable = {} > > EOF $ rm -f "${extpath}c" $ touch z $ hg ci -qAm z transaction abort! rollback completed abort: forced transaction failure [255] $ cat .hg/store/fncache data/y.i Aborted transactions can be recovered later $ cat > ../exceptionext.py < import os > from mercurial import commands, error, transaction, localrepo > from mercurial.extensions import wrapfunction > > def trwrapper(orig, self, *args, **kwargs): > tr = orig(self, *args, **kwargs) > def fail(tr): > raise error.Abort("forced transaction failure") > # zzz prefix to ensure it sorted after store.write > tr.addfinalize('zzz-forcefails', fail) > return tr > > def abortwrapper(orig, self, *args, **kwargs): > raise error.Abort("forced transaction failure") > > def uisetup(ui): > wrapfunction(localrepo.localrepository, 'transaction', trwrapper) > wrapfunction(transaction.transaction, '_abort', abortwrapper) > > cmdtable = {} > > EOF $ rm -f "${extpath}c" $ hg up -q 1 $ touch z $ hg ci -qAm z 2>/dev/null [255] $ cat .hg/store/fncache | sort data/y.i data/z.i $ hg recover rolling back interrupted transaction checking changesets checking manifests crosschecking files in changesets and manifests checking files 1 files, 1 changesets, 1 total revisions $ cat .hg/store/fncache data/y.i $ cd .. debugrebuildfncache does nothing unless repo has fncache requirement $ hg --config format.usefncache=false init nofncache $ cd nofncache $ hg debugrebuildfncache (not rebuilding fncache because repository does not support fncache) $ cd .. debugrebuildfncache works on empty repository $ hg init empty $ cd empty $ hg debugrebuildfncache fncache already up to date $ cd .. debugrebuildfncache on an up to date repository no-ops $ hg init repo $ cd repo $ echo initial > foo $ echo initial > .bar $ hg commit -A -m initial adding .bar adding foo $ cat .hg/store/fncache | sort data/.bar.i data/foo.i $ hg debugrebuildfncache fncache already up to date debugrebuildfncache restores deleted fncache file $ rm -f .hg/store/fncache $ hg debugrebuildfncache adding data/.bar.i adding data/foo.i 2 items added, 0 removed from fncache $ cat .hg/store/fncache | sort data/.bar.i data/foo.i Rebuild after rebuild should no-op $ hg debugrebuildfncache fncache already up to date A single missing file should get restored, an extra file should be removed $ cat > .hg/store/fncache << EOF > data/foo.i > data/bad-entry.i > EOF $ hg debugrebuildfncache removing data/bad-entry.i adding data/.bar.i 1 items added, 1 removed from fncache $ cat .hg/store/fncache | sort data/.bar.i data/foo.i $ cd .. Try a simple variation without dotencode to ensure fncache is ignorant of encoding $ hg --config format.dotencode=false init nodotencode $ cd nodotencode $ echo initial > foo $ echo initial > .bar $ hg commit -A -m initial adding .bar adding foo $ cat .hg/store/fncache | sort data/.bar.i data/foo.i $ rm .hg/store/fncache $ hg debugrebuildfncache adding data/.bar.i adding data/foo.i 2 items added, 0 removed from fncache $ cat .hg/store/fncache | sort data/.bar.i data/foo.i mercurial-3.7.3/tests/run-tests.py0000755000175000017500000025420012676531525016615 0ustar mpmmpm00000000000000#!/usr/bin/env python # # run-tests.py - Run a set of tests on Mercurial # # Copyright 2006 Matt Mackall # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. # Modifying this script is tricky because it has many modes: # - serial (default) vs parallel (-jN, N > 1) # - no coverage (default) vs coverage (-c, -C, -s) # - temp install (default) vs specific hg script (--with-hg, --local) # - tests are a mix of shell scripts and Python scripts # # If you change this script, it is recommended that you ensure you # haven't broken it by running it in various modes with a representative # sample of test scripts. For example: # # 1) serial, no coverage, temp install: # ./run-tests.py test-s* # 2) serial, no coverage, local hg: # ./run-tests.py --local test-s* # 3) serial, coverage, temp install: # ./run-tests.py -c test-s* # 4) serial, coverage, local hg: # ./run-tests.py -c --local test-s* # unsupported # 5) parallel, no coverage, temp install: # ./run-tests.py -j2 test-s* # 6) parallel, no coverage, local hg: # ./run-tests.py -j2 --local test-s* # 7) parallel, coverage, temp install: # ./run-tests.py -j2 -c test-s* # currently broken # 8) parallel, coverage, local install: # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken) # 9) parallel, custom tmp dir: # ./run-tests.py -j2 --tmpdir /tmp/myhgtests # 10) parallel, pure, tests that call run-tests: # ./run-tests.py --pure `grep -l run-tests.py *.t` # # (You could use any subset of the tests: test-s* happens to match # enough that it's worth doing parallel runs, few enough that it # completes fairly quickly, includes both shell and Python scripts, and # includes some scripts that run daemon processes.) from __future__ import print_function from distutils import version import difflib import errno import optparse import os import shutil import subprocess import signal import socket import sys import tempfile import time import random import re import threading import killdaemons as killmod try: import Queue as queue except ImportError: import queue from xml.dom import minidom import unittest osenvironb = getattr(os, 'environb', os.environ) try: import json except ImportError: try: import simplejson as json except ImportError: json = None processlock = threading.Lock() if sys.version_info > (3, 5, 0): PYTHON3 = True xrange = range # we use xrange in one place, and we'd rather not use range def _bytespath(p): return p.encode('utf-8') def _strpath(p): return p.decode('utf-8') elif sys.version_info >= (3, 0, 0): print('%s is only supported on Python 3.5+ and 2.6-2.7, not %s' % (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3]))) sys.exit(70) # EX_SOFTWARE from `man 3 sysexit` else: PYTHON3 = False # In python 2.x, path operations are generally done using # bytestrings by default, so we don't have to do any extra # fiddling there. We define the wrapper functions anyway just to # help keep code consistent between platforms. def _bytespath(p): return p _strpath = _bytespath # For Windows support wifexited = getattr(os, "WIFEXITED", lambda x: False) def checkportisavailable(port): """return true if a port seems free to bind on localhost""" try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('localhost', port)) s.close() return True except socket.error as exc: if not exc.errno == errno.EADDRINUSE: raise return False closefds = os.name == 'posix' def Popen4(cmd, wd, timeout, env=None): processlock.acquire() p = subprocess.Popen(cmd, shell=True, bufsize=-1, cwd=wd, env=env, close_fds=closefds, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) processlock.release() p.fromchild = p.stdout p.tochild = p.stdin p.childerr = p.stderr p.timeout = False if timeout: def t(): start = time.time() while time.time() - start < timeout and p.returncode is None: time.sleep(.1) p.timeout = True if p.returncode is None: terminate(p) threading.Thread(target=t).start() return p PYTHON = _bytespath(sys.executable.replace('\\', '/')) IMPL_PATH = b'PYTHONPATH' if 'java' in sys.platform: IMPL_PATH = b'JYTHONPATH' defaults = { 'jobs': ('HGTEST_JOBS', 1), 'timeout': ('HGTEST_TIMEOUT', 180), 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 500), 'port': ('HGTEST_PORT', 20059), 'shell': ('HGTEST_SHELL', 'sh'), } def parselistfiles(files, listtype, warn=True): entries = dict() for filename in files: try: path = os.path.expanduser(os.path.expandvars(filename)) f = open(path, "rb") except IOError as err: if err.errno != errno.ENOENT: raise if warn: print("warning: no such %s file: %s" % (listtype, filename)) continue for line in f.readlines(): line = line.split(b'#', 1)[0].strip() if line: entries[line] = filename f.close() return entries def getparser(): """Obtain the OptionParser used by the CLI.""" parser = optparse.OptionParser("%prog [options] [tests]") # keep these sorted parser.add_option("--blacklist", action="append", help="skip tests listed in the specified blacklist file") parser.add_option("--whitelist", action="append", help="always run tests listed in the specified whitelist file") parser.add_option("--changed", type="string", help="run tests that are changed in parent rev or working directory") parser.add_option("-C", "--annotate", action="store_true", help="output files annotated with coverage") parser.add_option("-c", "--cover", action="store_true", help="print a test coverage report") parser.add_option("-d", "--debug", action="store_true", help="debug mode: write output of test scripts to console" " rather than capturing and diffing it (disables timeout)") parser.add_option("-f", "--first", action="store_true", help="exit on the first test failure") parser.add_option("-H", "--htmlcov", action="store_true", help="create an HTML report of the coverage of the files") parser.add_option("-i", "--interactive", action="store_true", help="prompt to accept changed output") parser.add_option("-j", "--jobs", type="int", help="number of jobs to run in parallel" " (default: $%s or %d)" % defaults['jobs']) parser.add_option("--keep-tmpdir", action="store_true", help="keep temporary directory after running tests") parser.add_option("-k", "--keywords", help="run tests matching keywords") parser.add_option("-l", "--local", action="store_true", help="shortcut for --with-hg=/../hg") parser.add_option("--loop", action="store_true", help="loop tests repeatedly") parser.add_option("--runs-per-test", type="int", dest="runs_per_test", help="run each test N times (default=1)", default=1) parser.add_option("-n", "--nodiff", action="store_true", help="skip showing test changes") parser.add_option("-p", "--port", type="int", help="port on which servers should listen" " (default: $%s or %d)" % defaults['port']) parser.add_option("--compiler", type="string", help="compiler to build with") parser.add_option("--pure", action="store_true", help="use pure Python code instead of C extensions") parser.add_option("-R", "--restart", action="store_true", help="restart at last error") parser.add_option("-r", "--retest", action="store_true", help="retest failed tests") parser.add_option("-S", "--noskips", action="store_true", help="don't report skip tests verbosely") parser.add_option("--shell", type="string", help="shell to use (default: $%s or %s)" % defaults['shell']) parser.add_option("-t", "--timeout", type="int", help="kill errant tests after TIMEOUT seconds" " (default: $%s or %d)" % defaults['timeout']) parser.add_option("--slowtimeout", type="int", help="kill errant slow tests after SLOWTIMEOUT seconds" " (default: $%s or %d)" % defaults['slowtimeout']) parser.add_option("--time", action="store_true", help="time how long each test takes") parser.add_option("--json", action="store_true", help="store test result data in 'report.json' file") parser.add_option("--tmpdir", type="string", help="run tests in the given temporary directory" " (implies --keep-tmpdir)") parser.add_option("-v", "--verbose", action="store_true", help="output verbose messages") parser.add_option("--xunit", type="string", help="record xunit results at specified path") parser.add_option("--view", type="string", help="external diff viewer") parser.add_option("--with-hg", type="string", metavar="HG", help="test using specified hg script rather than a " "temporary installation") parser.add_option("-3", "--py3k-warnings", action="store_true", help="enable Py3k warnings on Python 2.6+") parser.add_option('--extra-config-opt', action="append", help='set the given config opt in the test hgrc') parser.add_option('--random', action="store_true", help='run tests in random order') parser.add_option('--profile-runner', action='store_true', help='run statprof on run-tests') parser.add_option('--allow-slow-tests', action='store_true', help='allow extremely slow tests') parser.add_option('--showchannels', action='store_true', help='show scheduling channels') for option, (envvar, default) in defaults.items(): defaults[option] = type(default)(os.environ.get(envvar, default)) parser.set_defaults(**defaults) return parser def parseargs(args, parser): """Parse arguments with our OptionParser and validate results.""" (options, args) = parser.parse_args(args) # jython is always pure if 'java' in sys.platform or '__pypy__' in sys.modules: options.pure = True if options.with_hg: options.with_hg = os.path.expanduser(options.with_hg) if not (os.path.isfile(options.with_hg) and os.access(options.with_hg, os.X_OK)): parser.error('--with-hg must specify an executable hg script') if not os.path.basename(options.with_hg) == 'hg': sys.stderr.write('warning: --with-hg should specify an hg script\n') if options.local: testdir = os.path.dirname(_bytespath(os.path.realpath(sys.argv[0]))) hgbin = os.path.join(os.path.dirname(testdir), b'hg') if os.name != 'nt' and not os.access(hgbin, os.X_OK): parser.error('--local specified, but %r not found or not executable' % hgbin) options.with_hg = hgbin options.anycoverage = options.cover or options.annotate or options.htmlcov if options.anycoverage: try: import coverage covver = version.StrictVersion(coverage.__version__).version if covver < (3, 3): parser.error('coverage options require coverage 3.3 or later') except ImportError: parser.error('coverage options now require the coverage package') if options.anycoverage and options.local: # this needs some path mangling somewhere, I guess parser.error("sorry, coverage options do not work when --local " "is specified") if options.anycoverage and options.with_hg: parser.error("sorry, coverage options do not work when --with-hg " "is specified") global verbose if options.verbose: verbose = '' if options.tmpdir: options.tmpdir = os.path.expanduser(options.tmpdir) if options.jobs < 1: parser.error('--jobs must be positive') if options.interactive and options.debug: parser.error("-i/--interactive and -d/--debug are incompatible") if options.debug: if options.timeout != defaults['timeout']: sys.stderr.write( 'warning: --timeout option ignored with --debug\n') if options.slowtimeout != defaults['slowtimeout']: sys.stderr.write( 'warning: --slowtimeout option ignored with --debug\n') options.timeout = 0 options.slowtimeout = 0 if options.py3k_warnings: if PYTHON3: parser.error( '--py3k-warnings can only be used on Python 2.6 and 2.7') if options.blacklist: options.blacklist = parselistfiles(options.blacklist, 'blacklist') if options.whitelist: options.whitelisted = parselistfiles(options.whitelist, 'whitelist') else: options.whitelisted = {} if options.showchannels: options.nodiff = True return (options, args) def rename(src, dst): """Like os.rename(), trade atomicity and opened files friendliness for existing destination support. """ shutil.copy(src, dst) os.remove(src) _unified_diff = difflib.unified_diff if PYTHON3: import functools _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff) def getdiff(expected, output, ref, err): servefail = False lines = [] for line in _unified_diff(expected, output, ref, err): if line.startswith(b'+++') or line.startswith(b'---'): line = line.replace(b'\\', b'/') if line.endswith(b' \n'): line = line[:-2] + b'\n' lines.append(line) if not servefail and line.startswith( b'+ abort: child process failed to start'): servefail = True return servefail, lines verbose = False def vlog(*msg): """Log only when in verbose mode.""" if verbose is False: return return log(*msg) # Bytes that break XML even in a CDATA block: control characters 0-31 # sans \t, \n and \r CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]") def cdatasafe(data): """Make a string safe to include in a CDATA block. Certain control characters are illegal in a CDATA block, and there's no way to include a ]]> in a CDATA either. This function replaces illegal bytes with ? and adds a space between the ]] so that it won't break the CDATA block. """ return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>') def log(*msg): """Log something to stdout. Arguments are strings to print. """ with iolock: if verbose: print(verbose, end=' ') for m in msg: print(m, end=' ') print() sys.stdout.flush() def terminate(proc): """Terminate subprocess (with fallback for Python versions < 2.6)""" vlog('# Terminating process %d' % proc.pid) try: getattr(proc, 'terminate', lambda : os.kill(proc.pid, signal.SIGTERM))() except OSError: pass def killdaemons(pidfile): return killmod.killdaemons(pidfile, tryhard=False, remove=True, logfn=vlog) class Test(unittest.TestCase): """Encapsulates a single, runnable test. While this class conforms to the unittest.TestCase API, it differs in that instances need to be instantiated manually. (Typically, unittest.TestCase classes are instantiated automatically by scanning modules.) """ # Status code reserved for skipped tests (used by hghave). SKIPPED_STATUS = 80 def __init__(self, path, tmpdir, keeptmpdir=False, debug=False, timeout=defaults['timeout'], startport=defaults['port'], extraconfigopts=None, py3kwarnings=False, shell=None, slowtimeout=defaults['slowtimeout']): """Create a test from parameters. path is the full path to the file defining the test. tmpdir is the main temporary directory to use for this test. keeptmpdir determines whether to keep the test's temporary directory after execution. It defaults to removal (False). debug mode will make the test execute verbosely, with unfiltered output. timeout controls the maximum run time of the test. It is ignored when debug is True. See slowtimeout for tests with #require slow. slowtimeout overrides timeout if the test has #require slow. startport controls the starting port number to use for this test. Each test will reserve 3 port numbers for execution. It is the caller's responsibility to allocate a non-overlapping port range to Test instances. extraconfigopts is an iterable of extra hgrc config options. Values must have the form "key=value" (something understood by hgrc). Values of the form "foo.key=value" will result in "[foo] key=value". py3kwarnings enables Py3k warnings. shell is the shell to execute tests in. """ self.path = path self.bname = os.path.basename(path) self.name = _strpath(self.bname) self._testdir = os.path.dirname(path) self.errpath = os.path.join(self._testdir, b'%s.err' % self.bname) self._threadtmp = tmpdir self._keeptmpdir = keeptmpdir self._debug = debug self._timeout = timeout self._slowtimeout = slowtimeout self._startport = startport self._extraconfigopts = extraconfigopts or [] self._py3kwarnings = py3kwarnings self._shell = _bytespath(shell) self._aborted = False self._daemonpids = [] self._finished = None self._ret = None self._out = None self._skipped = None self._testtmp = None # If we're not in --debug mode and reference output file exists, # check test output against it. if debug: self._refout = None # to match "out is None" elif os.path.exists(self.refpath): f = open(self.refpath, 'rb') self._refout = f.read().splitlines(True) f.close() else: self._refout = [] # needed to get base class __repr__ running @property def _testMethodName(self): return self.name def __str__(self): return self.name def shortDescription(self): return self.name def setUp(self): """Tasks to perform before run().""" self._finished = False self._ret = None self._out = None self._skipped = None try: os.mkdir(self._threadtmp) except OSError as e: if e.errno != errno.EEXIST: raise self._testtmp = os.path.join(self._threadtmp, os.path.basename(self.path)) os.mkdir(self._testtmp) # Remove any previous output files. if os.path.exists(self.errpath): try: os.remove(self.errpath) except OSError as e: # We might have raced another test to clean up a .err # file, so ignore ENOENT when removing a previous .err # file. if e.errno != errno.ENOENT: raise def run(self, result): """Run this test and report results against a TestResult instance.""" # This function is extremely similar to unittest.TestCase.run(). Once # we require Python 2.7 (or at least its version of unittest), this # function can largely go away. self._result = result result.startTest(self) try: try: self.setUp() except (KeyboardInterrupt, SystemExit): self._aborted = True raise except Exception: result.addError(self, sys.exc_info()) return success = False try: self.runTest() except KeyboardInterrupt: self._aborted = True raise except SkipTest as e: result.addSkip(self, str(e)) # The base class will have already counted this as a # test we "ran", but we want to exclude skipped tests # from those we count towards those run. result.testsRun -= 1 except IgnoreTest as e: result.addIgnore(self, str(e)) # As with skips, ignores also should be excluded from # the number of tests executed. result.testsRun -= 1 except WarnTest as e: result.addWarn(self, str(e)) except ReportedTest as e: pass except self.failureException as e: # This differs from unittest in that we don't capture # the stack trace. This is for historical reasons and # this decision could be revisited in the future, # especially for PythonTest instances. if result.addFailure(self, str(e)): success = True except Exception: result.addError(self, sys.exc_info()) else: success = True try: self.tearDown() except (KeyboardInterrupt, SystemExit): self._aborted = True raise except Exception: result.addError(self, sys.exc_info()) success = False if success: result.addSuccess(self) finally: result.stopTest(self, interrupted=self._aborted) def runTest(self): """Run this test instance. This will return a tuple describing the result of the test. """ env = self._getenv() self._daemonpids.append(env['DAEMON_PIDS']) self._createhgrc(env['HGRCPATH']) vlog('# Test', self.name) ret, out = self._run(env) self._finished = True self._ret = ret self._out = out def describe(ret): if ret < 0: return 'killed by signal: %d' % -ret return 'returned error code %d' % ret self._skipped = False if ret == self.SKIPPED_STATUS: if out is None: # Debug mode, nothing to parse. missing = ['unknown'] failed = None else: missing, failed = TTest.parsehghaveoutput(out) if not missing: missing = ['skipped'] if failed: self.fail('hg have failed checking for %s' % failed[-1]) else: self._skipped = True raise SkipTest(missing[-1]) elif ret == 'timeout': self.fail('timed out') elif ret is False: raise WarnTest('no result code from test') elif out != self._refout: # Diff generation may rely on written .err file. if (ret != 0 or out != self._refout) and not self._skipped \ and not self._debug: f = open(self.errpath, 'wb') for line in out: f.write(line) f.close() # The result object handles diff calculation for us. if self._result.addOutputMismatch(self, ret, out, self._refout): # change was accepted, skip failing return if ret: msg = 'output changed and ' + describe(ret) else: msg = 'output changed' self.fail(msg) elif ret: self.fail(describe(ret)) def tearDown(self): """Tasks to perform after run().""" for entry in self._daemonpids: killdaemons(entry) self._daemonpids = [] if self._keeptmpdir: log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' % (self._testtmp, self._threadtmp)) else: shutil.rmtree(self._testtmp, True) shutil.rmtree(self._threadtmp, True) if (self._ret != 0 or self._out != self._refout) and not self._skipped \ and not self._debug and self._out: f = open(self.errpath, 'wb') for line in self._out: f.write(line) f.close() vlog("# Ret was:", self._ret, '(%s)' % self.name) def _run(self, env): # This should be implemented in child classes to run tests. raise SkipTest('unknown test type') def abort(self): """Terminate execution of this test.""" self._aborted = True def _getreplacements(self): """Obtain a mapping of text replacements to apply to test output. Test output needs to be normalized so it can be compared to expected output. This function defines how some of that normalization will occur. """ r = [ (br':%d\b' % self._startport, b':$HGPORT'), (br':%d\b' % (self._startport + 1), b':$HGPORT1'), (br':%d\b' % (self._startport + 2), b':$HGPORT2'), (br'(?m)^(saved backup bundle to .*\.hg)( \(glob\))?$', br'\1 (glob)'), ] if os.name == 'nt': r.append( (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c for c in self._testtmp), b'$TESTTMP')) else: r.append((re.escape(self._testtmp), b'$TESTTMP')) return r def _getenv(self): """Obtain environment variables to use during test execution.""" env = os.environ.copy() env['TESTTMP'] = self._testtmp env['HOME'] = self._testtmp env["HGPORT"] = str(self._startport) env["HGPORT1"] = str(self._startport + 1) env["HGPORT2"] = str(self._startport + 2) env["HGRCPATH"] = os.path.join(self._threadtmp, b'.hgrc') env["DAEMON_PIDS"] = os.path.join(self._threadtmp, b'daemon.pids') env["HGEDITOR"] = ('"' + sys.executable + '"' + ' -c "import sys; sys.exit(0)"') env["HGMERGE"] = "internal:merge" env["HGUSER"] = "test" env["HGENCODING"] = "ascii" env["HGENCODINGMODE"] = "strict" # Reset some environment variables to well-known values so that # the tests produce repeatable output. env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C' env['TZ'] = 'GMT' env["EMAIL"] = "Foo Bar " env['COLUMNS'] = '80' env['TERM'] = 'xterm' for k in ('HG HGPROF CDPATH GREP_OPTIONS http_proxy no_proxy ' + 'NO_PROXY').split(): if k in env: del env[k] # unset env related to hooks for k in env.keys(): if k.startswith('HG_'): del env[k] return env def _createhgrc(self, path): """Create an hgrc file for this test.""" hgrc = open(path, 'wb') hgrc.write(b'[ui]\n') hgrc.write(b'slash = True\n') hgrc.write(b'interactive = False\n') hgrc.write(b'mergemarkers = detailed\n') hgrc.write(b'promptecho = True\n') hgrc.write(b'[defaults]\n') hgrc.write(b'backout = -d "0 0"\n') hgrc.write(b'commit = -d "0 0"\n') hgrc.write(b'shelve = --date "0 0"\n') hgrc.write(b'tag = -d "0 0"\n') hgrc.write(b'[devel]\n') hgrc.write(b'all-warnings = true\n') hgrc.write(b'[largefiles]\n') hgrc.write(b'usercache = %s\n' % (os.path.join(self._testtmp, b'.cache/largefiles'))) for opt in self._extraconfigopts: section, key = opt.split('.', 1) assert '=' in key, ('extra config opt %s must ' 'have an = for assignment' % opt) hgrc.write(b'[%s]\n%s\n' % (section, key)) hgrc.close() def fail(self, msg): # unittest differentiates between errored and failed. # Failed is denoted by AssertionError (by default at least). raise AssertionError(msg) def _runcommand(self, cmd, env, normalizenewlines=False): """Run command in a sub-process, capturing the output (stdout and stderr). Return a tuple (exitcode, output). output is None in debug mode. """ if self._debug: proc = subprocess.Popen(cmd, shell=True, cwd=self._testtmp, env=env) ret = proc.wait() return (ret, None) proc = Popen4(cmd, self._testtmp, self._timeout, env) def cleanup(): terminate(proc) ret = proc.wait() if ret == 0: ret = signal.SIGTERM << 8 killdaemons(env['DAEMON_PIDS']) return ret output = '' proc.tochild.close() try: output = proc.fromchild.read() except KeyboardInterrupt: vlog('# Handling keyboard interrupt') cleanup() raise ret = proc.wait() if wifexited(ret): ret = os.WEXITSTATUS(ret) if proc.timeout: ret = 'timeout' if ret: killdaemons(env['DAEMON_PIDS']) for s, r in self._getreplacements(): output = re.sub(s, r, output) if normalizenewlines: output = output.replace('\r\n', '\n') return ret, output.splitlines(True) class PythonTest(Test): """A Python-based test.""" @property def refpath(self): return os.path.join(self._testdir, b'%s.out' % self.bname) def _run(self, env): py3kswitch = self._py3kwarnings and b' -3' or b'' cmd = b'%s%s "%s"' % (PYTHON, py3kswitch, self.path) vlog("# Running", cmd) normalizenewlines = os.name == 'nt' result = self._runcommand(cmd, env, normalizenewlines=normalizenewlines) if self._aborted: raise KeyboardInterrupt() return result # This script may want to drop globs from lines matching these patterns on # Windows, but check-code.py wants a glob on these lines unconditionally. Don't # warn if that is the case for anything matching these lines. checkcodeglobpats = [ re.compile(br'^pushing to \$TESTTMP/.*[^)]$'), re.compile(br'^moving \S+/.*[^)]$'), re.compile(br'^pulling from \$TESTTMP/.*[^)]$') ] bchr = chr if PYTHON3: bchr = lambda x: bytes([x]) class TTest(Test): """A "t test" is a test backed by a .t file.""" SKIPPED_PREFIX = 'skipped: ' FAILED_PREFIX = 'hghave check failed: ' NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256)) ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'}) @property def refpath(self): return os.path.join(self._testdir, self.bname) def _run(self, env): f = open(self.path, 'rb') lines = f.readlines() f.close() salt, script, after, expected = self._parsetest(lines) # Write out the generated script. fname = b'%s.sh' % self._testtmp f = open(fname, 'wb') for l in script: f.write(l) f.close() cmd = b'%s "%s"' % (self._shell, fname) vlog("# Running", cmd) exitcode, output = self._runcommand(cmd, env) if self._aborted: raise KeyboardInterrupt() # Do not merge output if skipped. Return hghave message instead. # Similarly, with --debug, output is None. if exitcode == self.SKIPPED_STATUS or output is None: return exitcode, output return self._processoutput(exitcode, output, salt, after, expected) def _hghave(self, reqs): # TODO do something smarter when all other uses of hghave are gone. runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__))) tdir = runtestdir.replace(b'\\', b'/') proc = Popen4(b'%s -c "%s/hghave %s"' % (self._shell, tdir, b' '.join(reqs)), self._testtmp, 0, self._getenv()) stdout, stderr = proc.communicate() ret = proc.wait() if wifexited(ret): ret = os.WEXITSTATUS(ret) if ret == 2: print(stdout) sys.exit(1) if ret != 0: return False, stdout if 'slow' in reqs: self._timeout = self._slowtimeout return True, None def _parsetest(self, lines): # We generate a shell script which outputs unique markers to line # up script results with our source. These markers include input # line number and the last return code. salt = b"SALT%d" % time.time() def addsalt(line, inpython): if inpython: script.append(b'%s %d 0\n' % (salt, line)) else: script.append(b'echo %s %d $?\n' % (salt, line)) script = [] # After we run the shell script, we re-unify the script output # with non-active parts of the source, with synchronization by our # SALT line number markers. The after table contains the non-active # components, ordered by line number. after = {} # Expected shell script output. expected = {} pos = prepos = -1 # True or False when in a true or false conditional section skipping = None # We keep track of whether or not we're in a Python block so we # can generate the surrounding doctest magic. inpython = False if self._debug: script.append(b'set -x\n') if os.getenv('MSYSTEM'): script.append(b'alias pwd="pwd -W"\n') for n, l in enumerate(lines): if not l.endswith(b'\n'): l += b'\n' if l.startswith(b'#require'): lsplit = l.split() if len(lsplit) < 2 or lsplit[0] != b'#require': after.setdefault(pos, []).append(' !!! invalid #require\n') haveresult, message = self._hghave(lsplit[1:]) if not haveresult: script = [b'echo "%s"\nexit 80\n' % message] break after.setdefault(pos, []).append(l) elif l.startswith(b'#if'): lsplit = l.split() if len(lsplit) < 2 or lsplit[0] != b'#if': after.setdefault(pos, []).append(' !!! invalid #if\n') if skipping is not None: after.setdefault(pos, []).append(' !!! nested #if\n') skipping = not self._hghave(lsplit[1:])[0] after.setdefault(pos, []).append(l) elif l.startswith(b'#else'): if skipping is None: after.setdefault(pos, []).append(' !!! missing #if\n') skipping = not skipping after.setdefault(pos, []).append(l) elif l.startswith(b'#endif'): if skipping is None: after.setdefault(pos, []).append(' !!! missing #if\n') skipping = None after.setdefault(pos, []).append(l) elif skipping: after.setdefault(pos, []).append(l) elif l.startswith(b' >>> '): # python inlines after.setdefault(pos, []).append(l) prepos = pos pos = n if not inpython: # We've just entered a Python block. Add the header. inpython = True addsalt(prepos, False) # Make sure we report the exit code. script.append(b'%s -m heredoctest < '): # continuations after.setdefault(prepos, []).append(l) script.append(l[4:]) elif l.startswith(b' '): # results # Queue up a list of expected results. expected.setdefault(pos, []).append(l[2:]) else: if inpython: script.append(b'EOF\n') inpython = False # Non-command/result. Queue up for merged output. after.setdefault(pos, []).append(l) if inpython: script.append(b'EOF\n') if skipping is not None: after.setdefault(pos, []).append(' !!! missing #endif\n') addsalt(n + 1, False) return salt, script, after, expected def _processoutput(self, exitcode, output, salt, after, expected): # Merge the script output back into a unified test. warnonly = 1 # 1: not yet; 2: yes; 3: for sure not if exitcode != 0: warnonly = 3 pos = -1 postout = [] for l in output: lout, lcmd = l, None if salt in l: lout, lcmd = l.split(salt, 1) while lout: if not lout.endswith(b'\n'): lout += b' (no-eol)\n' # Find the expected output at the current position. el = None if expected.get(pos, None): el = expected[pos].pop(0) r = TTest.linematch(el, lout) if isinstance(r, str): if r == '+glob': lout = el[:-1] + ' (glob)\n' r = '' # Warn only this line. elif r == '-glob': lout = ''.join(el.rsplit(' (glob)', 1)) r = '' # Warn only this line. elif r == "retry": postout.append(b' ' + el) continue else: log('\ninfo, unknown linematch result: %r\n' % r) r = False if r: postout.append(b' ' + el) else: if self.NEEDESCAPE(lout): lout = TTest._stringescape(b'%s (esc)\n' % lout.rstrip(b'\n')) postout.append(b' ' + lout) # Let diff deal with it. if r != '': # If line failed. warnonly = 3 # for sure not elif warnonly == 1: # Is "not yet" and line is warn only. warnonly = 2 # Yes do warn. break # clean up any optional leftovers while expected.get(pos, None): el = expected[pos].pop(0) if not el.endswith(b" (?)\n"): expected[pos].insert(0, el) break postout.append(b' ' + el) if lcmd: # Add on last return code. ret = int(lcmd.split()[1]) if ret != 0: postout.append(b' [%d]\n' % ret) if pos in after: # Merge in non-active test bits. postout += after.pop(pos) pos = int(lcmd.split()[0]) if pos in after: postout += after.pop(pos) if warnonly == 2: exitcode = False # Set exitcode to warned. return exitcode, postout @staticmethod def rematch(el, l): try: # use \Z to ensure that the regex matches to the end of the string if os.name == 'nt': return re.match(el + br'\r?\n\Z', l) return re.match(el + br'\n\Z', l) except re.error: # el is an invalid regex return False @staticmethod def globmatch(el, l): # The only supported special characters are * and ? plus / which also # matches \ on windows. Escaping of these characters is supported. if el + b'\n' == l: if os.altsep: # matching on "/" is not needed for this line for pat in checkcodeglobpats: if pat.match(el): return True return b'-glob' return True i, n = 0, len(el) res = b'' while i < n: c = el[i:i + 1] i += 1 if c == b'\\' and i < n and el[i:i + 1] in b'*?\\/': res += el[i - 1:i + 1] i += 1 elif c == b'*': res += b'.*' elif c == b'?': res += b'.' elif c == b'/' and os.altsep: res += b'[/\\\\]' else: res += re.escape(c) return TTest.rematch(res, l) @staticmethod def linematch(el, l): retry = False if el == l: # perfect match (fast) return True if el: if el.endswith(b" (?)\n"): retry = "retry" el = el[:-5] + "\n" if el.endswith(b" (esc)\n"): if PYTHON3: el = el[:-7].decode('unicode_escape') + '\n' el = el.encode('utf-8') else: el = el[:-7].decode('string-escape') + '\n' if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l: return True if el.endswith(b" (re)\n"): return TTest.rematch(el[:-6], l) or retry if el.endswith(b" (glob)\n"): # ignore '(glob)' added to l by 'replacements' if l.endswith(b" (glob)\n"): l = l[:-8] + b"\n" return TTest.globmatch(el[:-8], l) if os.altsep and l.replace(b'\\', b'/') == el: return b'+glob' return retry @staticmethod def parsehghaveoutput(lines): '''Parse hghave log lines. Return tuple of lists (missing, failed): * the missing/unknown features * the features for which existence check failed''' missing = [] failed = [] for line in lines: if line.startswith(TTest.SKIPPED_PREFIX): line = line.splitlines()[0] missing.append(line[len(TTest.SKIPPED_PREFIX):]) elif line.startswith(TTest.FAILED_PREFIX): line = line.splitlines()[0] failed.append(line[len(TTest.FAILED_PREFIX):]) return missing, failed @staticmethod def _escapef(m): return TTest.ESCAPEMAP[m.group(0)] @staticmethod def _stringescape(s): return TTest.ESCAPESUB(TTest._escapef, s) iolock = threading.RLock() class SkipTest(Exception): """Raised to indicate that a test is to be skipped.""" class IgnoreTest(Exception): """Raised to indicate that a test is to be ignored.""" class WarnTest(Exception): """Raised to indicate that a test warned.""" class ReportedTest(Exception): """Raised to indicate that a test already reported.""" class TestResult(unittest._TextTestResult): """Holds results when executing via unittest.""" # Don't worry too much about accessing the non-public _TextTestResult. # It is relatively common in Python testing tools. def __init__(self, options, *args, **kwargs): super(TestResult, self).__init__(*args, **kwargs) self._options = options # unittest.TestResult didn't have skipped until 2.7. We need to # polyfill it. self.skipped = [] # We have a custom "ignored" result that isn't present in any Python # unittest implementation. It is very similar to skipped. It may make # sense to map it into skip some day. self.ignored = [] # We have a custom "warned" result that isn't present in any Python # unittest implementation. It is very similar to failed. It may make # sense to map it into fail some day. self.warned = [] self.times = [] self._firststarttime = None # Data stored for the benefit of generating xunit reports. self.successes = [] self.faildata = {} def addFailure(self, test, reason): self.failures.append((test, reason)) if self._options.first: self.stop() else: with iolock: if reason == "timed out": self.stream.write('t') else: if not self._options.nodiff: self.stream.write('\nERROR: %s output changed\n' % test) self.stream.write('!') self.stream.flush() def addSuccess(self, test): with iolock: super(TestResult, self).addSuccess(test) self.successes.append(test) def addError(self, test, err): super(TestResult, self).addError(test, err) if self._options.first: self.stop() # Polyfill. def addSkip(self, test, reason): self.skipped.append((test, reason)) with iolock: if self.showAll: self.stream.writeln('skipped %s' % reason) else: self.stream.write('s') self.stream.flush() def addIgnore(self, test, reason): self.ignored.append((test, reason)) with iolock: if self.showAll: self.stream.writeln('ignored %s' % reason) else: if reason not in ('not retesting', "doesn't match keyword"): self.stream.write('i') else: self.testsRun += 1 self.stream.flush() def addWarn(self, test, reason): self.warned.append((test, reason)) if self._options.first: self.stop() with iolock: if self.showAll: self.stream.writeln('warned %s' % reason) else: self.stream.write('~') self.stream.flush() def addOutputMismatch(self, test, ret, got, expected): """Record a mismatch in test output for a particular test.""" if self.shouldStop: # don't print, some other test case already failed and # printed, we're just stale and probably failed due to our # temp dir getting cleaned up. return accepted = False failed = False lines = [] with iolock: if self._options.nodiff: pass elif self._options.view: v = self._options.view if PYTHON3: v = _bytespath(v) os.system(b"%s %s %s" % (v, test.refpath, test.errpath)) else: servefail, lines = getdiff(expected, got, test.refpath, test.errpath) if servefail: self.addFailure( test, 'server failed to start (HGPORT=%s)' % test._startport) raise ReportedTest('server failed to start') else: self.stream.write('\n') for line in lines: if PYTHON3: self.stream.flush() self.stream.buffer.write(line) self.stream.buffer.flush() else: self.stream.write(line) self.stream.flush() # handle interactive prompt without releasing iolock if self._options.interactive: self.stream.write('Accept this change? [n] ') answer = sys.stdin.readline().strip() if answer.lower() in ('y', 'yes'): if test.name.endswith('.t'): rename(test.errpath, test.path) else: rename(test.errpath, '%s.out' % test.path) accepted = True if not accepted and not failed: self.faildata[test.name] = b''.join(lines) return accepted def startTest(self, test): super(TestResult, self).startTest(test) # os.times module computes the user time and system time spent by # child's processes along with real elapsed time taken by a process. # This module has one limitation. It can only work for Linux user # and not for Windows. test.started = os.times() if self._firststarttime is None: # thread racy but irrelevant self._firststarttime = test.started[4] def stopTest(self, test, interrupted=False): super(TestResult, self).stopTest(test) test.stopped = os.times() starttime = test.started endtime = test.stopped origin = self._firststarttime self.times.append((test.name, endtime[2] - starttime[2], # user space CPU time endtime[3] - starttime[3], # sys space CPU time endtime[4] - starttime[4], # real time starttime[4] - origin, # start date in run context endtime[4] - origin, # end date in run context )) if interrupted: with iolock: self.stream.writeln('INTERRUPTED: %s (after %d seconds)' % ( test.name, self.times[-1][3])) class TestSuite(unittest.TestSuite): """Custom unittest TestSuite that knows how to execute Mercurial tests.""" def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None, retest=False, keywords=None, loop=False, runs_per_test=1, loadtest=None, showchannels=False, *args, **kwargs): """Create a new instance that can run tests with a configuration. testdir specifies the directory where tests are executed from. This is typically the ``tests`` directory from Mercurial's source repository. jobs specifies the number of jobs to run concurrently. Each test executes on its own thread. Tests actually spawn new processes, so state mutation should not be an issue. If there is only one job, it will use the main thread. whitelist and blacklist denote tests that have been whitelisted and blacklisted, respectively. These arguments don't belong in TestSuite. Instead, whitelist and blacklist should be handled by the thing that populates the TestSuite with tests. They are present to preserve backwards compatible behavior which reports skipped tests as part of the results. retest denotes whether to retest failed tests. This arguably belongs outside of TestSuite. keywords denotes key words that will be used to filter which tests to execute. This arguably belongs outside of TestSuite. loop denotes whether to loop over tests forever. """ super(TestSuite, self).__init__(*args, **kwargs) self._jobs = jobs self._whitelist = whitelist self._blacklist = blacklist self._retest = retest self._keywords = keywords self._loop = loop self._runs_per_test = runs_per_test self._loadtest = loadtest self._showchannels = showchannels def run(self, result): # We have a number of filters that need to be applied. We do this # here instead of inside Test because it makes the running logic for # Test simpler. tests = [] num_tests = [0] for test in self._tests: def get(): num_tests[0] += 1 if getattr(test, 'should_reload', False): return self._loadtest(test.bname, num_tests[0]) return test if not os.path.exists(test.path): result.addSkip(test, "Doesn't exist") continue if not (self._whitelist and test.name in self._whitelist): if self._blacklist and test.bname in self._blacklist: result.addSkip(test, 'blacklisted') continue if self._retest and not os.path.exists(test.errpath): result.addIgnore(test, 'not retesting') continue if self._keywords: f = open(test.path, 'rb') t = f.read().lower() + test.bname.lower() f.close() ignored = False for k in self._keywords.lower().split(): if k not in t: result.addIgnore(test, "doesn't match keyword") ignored = True break if ignored: continue for _ in xrange(self._runs_per_test): tests.append(get()) runtests = list(tests) done = queue.Queue() running = 0 channels = [""] * self._jobs def job(test, result): for n, v in enumerate(channels): if not v: channel = n break channels[channel] = "=" + test.name[5:].split(".")[0] try: test(result) done.put(None) except KeyboardInterrupt: pass except: # re-raises done.put(('!', test, 'run-test raised an error, see traceback')) raise try: channels[channel] = '' except IndexError: pass def stat(): count = 0 while channels: d = '\n%03s ' % count for n, v in enumerate(channels): if v: d += v[0] channels[n] = v[1:] or '.' else: d += ' ' d += ' ' with iolock: sys.stdout.write(d + ' ') sys.stdout.flush() for x in xrange(10): if channels: time.sleep(.1) count += 1 stoppedearly = False if self._showchannels: statthread = threading.Thread(target=stat, name="stat") statthread.start() try: while tests or running: if not done.empty() or running == self._jobs or not tests: try: done.get(True, 1) running -= 1 if result and result.shouldStop: stoppedearly = True break except queue.Empty: continue if tests and not running == self._jobs: test = tests.pop(0) if self._loop: if getattr(test, 'should_reload', False): num_tests[0] += 1 tests.append( self._loadtest(test.name, num_tests[0])) else: tests.append(test) if self._jobs == 1: job(test, result) else: t = threading.Thread(target=job, name=test.name, args=(test, result)) t.start() running += 1 # If we stop early we still need to wait on started tests to # finish. Otherwise, there is a race between the test completing # and the test's cleanup code running. This could result in the # test reporting incorrect. if stoppedearly: while running: try: done.get(True, 1) running -= 1 except queue.Empty: continue except KeyboardInterrupt: for test in runtests: test.abort() channels = [] return result # Save the most recent 5 wall-clock runtimes of each test to a # human-readable text file named .testtimes. Tests are sorted # alphabetically, while times for each test are listed from oldest to # newest. def loadtimes(testdir): times = [] try: with open(os.path.join(testdir, '.testtimes-')) as fp: for line in fp: ts = line.split() times.append((ts[0], [float(t) for t in ts[1:]])) except IOError as err: if err.errno != errno.ENOENT: raise return times def savetimes(testdir, result): saved = dict(loadtimes(testdir)) maxruns = 5 skipped = set([str(t[0]) for t in result.skipped]) for tdata in result.times: test, real = tdata[0], tdata[3] if test not in skipped: ts = saved.setdefault(test, []) ts.append(real) ts[:] = ts[-maxruns:] fd, tmpname = tempfile.mkstemp(prefix='.testtimes', dir=testdir, text=True) with os.fdopen(fd, 'w') as fp: for name, ts in sorted(saved.iteritems()): fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts]))) timepath = os.path.join(testdir, '.testtimes') try: os.unlink(timepath) except OSError: pass try: os.rename(tmpname, timepath) except OSError: pass class TextTestRunner(unittest.TextTestRunner): """Custom unittest test runner that uses appropriate settings.""" def __init__(self, runner, *args, **kwargs): super(TextTestRunner, self).__init__(*args, **kwargs) self._runner = runner def run(self, test): result = TestResult(self._runner.options, self.stream, self.descriptions, self.verbosity) test(result) failed = len(result.failures) warned = len(result.warned) skipped = len(result.skipped) ignored = len(result.ignored) with iolock: self.stream.writeln('') if not self._runner.options.noskips: for test, msg in result.skipped: self.stream.writeln('Skipped %s: %s' % (test.name, msg)) for test, msg in result.warned: self.stream.writeln('Warned %s: %s' % (test.name, msg)) for test, msg in result.failures: self.stream.writeln('Failed %s: %s' % (test.name, msg)) for test, msg in result.errors: self.stream.writeln('Errored %s: %s' % (test.name, msg)) if self._runner.options.xunit: with open(self._runner.options.xunit, 'wb') as xuf: timesd = dict((t[0], t[3]) for t in result.times) doc = minidom.Document() s = doc.createElement('testsuite') s.setAttribute('name', 'run-tests') s.setAttribute('tests', str(result.testsRun)) s.setAttribute('errors', "0") # TODO s.setAttribute('failures', str(failed)) s.setAttribute('skipped', str(skipped + ignored)) doc.appendChild(s) for tc in result.successes: t = doc.createElement('testcase') t.setAttribute('name', tc.name) t.setAttribute('time', '%.3f' % timesd[tc.name]) s.appendChild(t) for tc, err in sorted(result.faildata.items()): t = doc.createElement('testcase') t.setAttribute('name', tc) t.setAttribute('time', '%.3f' % timesd[tc]) # createCDATASection expects a unicode or it will # convert using default conversion rules, which will # fail if string isn't ASCII. err = cdatasafe(err).decode('utf-8', 'replace') cd = doc.createCDATASection(err) t.appendChild(cd) s.appendChild(t) xuf.write(doc.toprettyxml(indent=' ', encoding='utf-8')) if self._runner.options.json: if json is None: raise ImportError("json module not installed") jsonpath = os.path.join(self._runner._testdir, 'report.json') with open(jsonpath, 'w') as fp: timesd = {} for tdata in result.times: test = tdata[0] timesd[test] = tdata[1:] outcome = {} groups = [('success', ((tc, None) for tc in result.successes)), ('failure', result.failures), ('skip', result.skipped)] for res, testcases in groups: for tc, __ in testcases: if tc.name in timesd: tres = {'result': res, 'time': ('%0.3f' % timesd[tc.name][2]), 'cuser': ('%0.3f' % timesd[tc.name][0]), 'csys': ('%0.3f' % timesd[tc.name][1]), 'start': ('%0.3f' % timesd[tc.name][3]), 'end': ('%0.3f' % timesd[tc.name][4]), 'diff': result.faildata.get(tc.name, ''), } else: # blacklisted test tres = {'result': res} outcome[tc.name] = tres jsonout = json.dumps(outcome, sort_keys=True, indent=4) fp.writelines(("testreport =", jsonout)) self._runner._checkhglib('Tested') savetimes(self._runner._testdir, result) self.stream.writeln( '# Ran %d tests, %d skipped, %d warned, %d failed.' % (result.testsRun, skipped + ignored, warned, failed)) if failed: self.stream.writeln('python hash seed: %s' % os.environ['PYTHONHASHSEED']) if self._runner.options.time: self.printtimes(result.times) return result def printtimes(self, times): # iolock held by run self.stream.writeln('# Producing time report') times.sort(key=lambda t: (t[3])) cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s' self.stream.writeln('%-7s %-7s %-7s %-7s %-7s %s' % ('start', 'end', 'cuser', 'csys', 'real', 'Test')) for tdata in times: test = tdata[0] cuser, csys, real, start, end = tdata[1:6] self.stream.writeln(cols % (start, end, cuser, csys, real, test)) class TestRunner(object): """Holds context for executing tests. Tests rely on a lot of state. This object holds it for them. """ # Programs required to run tests. REQUIREDTOOLS = [ os.path.basename(_bytespath(sys.executable)), b'diff', b'grep', b'unzip', b'gunzip', b'bunzip2', b'sed', ] # Maps file extensions to test class. TESTTYPES = [ (b'.py', PythonTest), (b'.t', TTest), ] def __init__(self): self.options = None self._hgroot = None self._testdir = None self._hgtmp = None self._installdir = None self._bindir = None self._tmpbinddir = None self._pythondir = None self._coveragefile = None self._createdfiles = [] self._hgpath = None self._portoffset = 0 self._ports = {} def run(self, args, parser=None): """Run the test suite.""" oldmask = os.umask(0o22) try: parser = parser or getparser() options, args = parseargs(args, parser) # positional arguments are paths to test files to run, so # we make sure they're all bytestrings args = [_bytespath(a) for a in args] self.options = options self._checktools() tests = self.findtests(args) if options.profile_runner: import statprof statprof.start() result = self._run(tests) if options.profile_runner: statprof.stop() statprof.display() return result finally: os.umask(oldmask) def _run(self, tests): if self.options.random: random.shuffle(tests) else: # keywords for slow tests slow = {b'svn': 10, b'cvs': 10, b'hghave': 10, b'largefiles-update': 10, b'run-tests': 10, b'corruption': 10, b'race': 10, b'i18n': 10, b'check': 100, b'gendoc': 100, b'contrib-perf': 200, } perf = {} def sortkey(f): # run largest tests first, as they tend to take the longest try: return perf[f] except KeyError: try: val = -os.stat(f).st_size except OSError as e: if e.errno != errno.ENOENT: raise perf[f] = -1e9 # file does not exist, tell early return -1e9 for kw, mul in slow.items(): if kw in f: val *= mul if f.endswith('.py'): val /= 10.0 perf[f] = val / 1000.0 return perf[f] tests.sort(key=sortkey) self._testdir = osenvironb[b'TESTDIR'] = getattr( os, 'getcwdb', os.getcwd)() if 'PYTHONHASHSEED' not in os.environ: # use a random python hash seed all the time # we do the randomness ourself to know what seed is used os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32)) if self.options.tmpdir: self.options.keep_tmpdir = True tmpdir = _bytespath(self.options.tmpdir) if os.path.exists(tmpdir): # Meaning of tmpdir has changed since 1.3: we used to create # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if # tmpdir already exists. print("error: temp dir %r already exists" % tmpdir) return 1 # Automatically removing tmpdir sounds convenient, but could # really annoy anyone in the habit of using "--tmpdir=/tmp" # or "--tmpdir=$HOME". #vlog("# Removing temp dir", tmpdir) #shutil.rmtree(tmpdir) os.makedirs(tmpdir) else: d = None if os.name == 'nt': # without this, we get the default temp dir location, but # in all lowercase, which causes troubles with paths (issue3490) d = osenvironb.get(b'TMP', None) tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d) self._hgtmp = osenvironb[b'HGTMP'] = ( os.path.realpath(tmpdir)) if self.options.with_hg: self._installdir = None whg = self.options.with_hg # If --with-hg is not specified, we have bytes already, # but if it was specified in python3 we get a str, so we # have to encode it back into a bytes. if PYTHON3: if not isinstance(whg, bytes): whg = _bytespath(whg) self._bindir = os.path.dirname(os.path.realpath(whg)) assert isinstance(self._bindir, bytes) self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin') os.makedirs(self._tmpbindir) # This looks redundant with how Python initializes sys.path from # the location of the script being executed. Needed because the # "hg" specified by --with-hg is not the only Python script # executed in the test suite that needs to import 'mercurial' # ... which means it's not really redundant at all. self._pythondir = self._bindir else: self._installdir = os.path.join(self._hgtmp, b"install") self._bindir = osenvironb[b"BINDIR"] = \ os.path.join(self._installdir, b"bin") self._tmpbindir = self._bindir self._pythondir = os.path.join(self._installdir, b"lib", b"python") osenvironb[b"BINDIR"] = self._bindir osenvironb[b"PYTHON"] = PYTHON fileb = _bytespath(__file__) runtestdir = os.path.abspath(os.path.dirname(fileb)) osenvironb[b'RUNTESTDIR'] = runtestdir if PYTHON3: sepb = _bytespath(os.pathsep) else: sepb = os.pathsep path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb) if os.path.islink(__file__): # test helper will likely be at the end of the symlink realfile = os.path.realpath(fileb) realdir = os.path.abspath(os.path.dirname(realfile)) path.insert(2, realdir) if self._testdir != runtestdir: path = [self._testdir] + path if self._tmpbindir != self._bindir: path = [self._tmpbindir] + path osenvironb[b"PATH"] = sepb.join(path) # Include TESTDIR in PYTHONPATH so that out-of-tree extensions # can run .../tests/run-tests.py test-foo where test-foo # adds an extension to HGRC. Also include run-test.py directory to # import modules like heredoctest. pypath = [self._pythondir, self._testdir, runtestdir] # We have to augment PYTHONPATH, rather than simply replacing # it, in case external libraries are only available via current # PYTHONPATH. (In particular, the Subversion bindings on OS X # are in /opt/subversion.) oldpypath = osenvironb.get(IMPL_PATH) if oldpypath: pypath.append(oldpypath) osenvironb[IMPL_PATH] = sepb.join(pypath) if self.options.pure: os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure" if self.options.allow_slow_tests: os.environ["HGTEST_SLOW"] = "slow" elif 'HGTEST_SLOW' in os.environ: del os.environ['HGTEST_SLOW'] self._coveragefile = os.path.join(self._testdir, b'.coverage') vlog("# Using TESTDIR", self._testdir) vlog("# Using RUNTESTDIR", osenvironb[b'RUNTESTDIR']) vlog("# Using HGTMP", self._hgtmp) vlog("# Using PATH", os.environ["PATH"]) vlog("# Using", IMPL_PATH, osenvironb[IMPL_PATH]) try: return self._runtests(tests) or 0 finally: time.sleep(.1) self._cleanup() def findtests(self, args): """Finds possible test files from arguments. If you wish to inject custom tests into the test harness, this would be a good function to monkeypatch or override in a derived class. """ if not args: if self.options.changed: proc = Popen4('hg st --rev "%s" -man0 .' % self.options.changed, None, 0) stdout, stderr = proc.communicate() args = stdout.strip(b'\0').split(b'\0') else: args = os.listdir(b'.') return [t for t in args if os.path.basename(t).startswith(b'test-') and (t.endswith(b'.py') or t.endswith(b'.t'))] def _runtests(self, tests): try: if self._installdir: self._installhg() self._checkhglib("Testing") else: self._usecorrectpython() if self.options.restart: orig = list(tests) while tests: if os.path.exists(tests[0] + ".err"): break tests.pop(0) if not tests: print("running all tests") tests = orig tests = [self._gettest(t, i) for i, t in enumerate(tests)] failed = False warned = False kws = self.options.keywords if kws is not None and PYTHON3: kws = kws.encode('utf-8') suite = TestSuite(self._testdir, jobs=self.options.jobs, whitelist=self.options.whitelisted, blacklist=self.options.blacklist, retest=self.options.retest, keywords=kws, loop=self.options.loop, runs_per_test=self.options.runs_per_test, showchannels=self.options.showchannels, tests=tests, loadtest=self._gettest) verbosity = 1 if self.options.verbose: verbosity = 2 runner = TextTestRunner(self, verbosity=verbosity) result = runner.run(suite) if result.failures: failed = True if result.warned: warned = True if self.options.anycoverage: self._outputcoverage() except KeyboardInterrupt: failed = True print("\ninterrupted!") if failed: return 1 if warned: return 80 def _getport(self, count): port = self._ports.get(count) # do we have a cached entry? if port is None: portneeded = 3 # above 100 tries we just give up and let test reports failure for tries in xrange(100): allfree = True port = self.options.port + self._portoffset for idx in xrange(portneeded): if not checkportisavailable(port + idx): allfree = False break self._portoffset += portneeded if allfree: break self._ports[count] = port return port def _gettest(self, test, count): """Obtain a Test by looking at its filename. Returns a Test instance. The Test may not be runnable if it doesn't map to a known type. """ lctest = test.lower() testcls = Test for ext, cls in self.TESTTYPES: if lctest.endswith(ext): testcls = cls break refpath = os.path.join(self._testdir, test) tmpdir = os.path.join(self._hgtmp, b'child%d' % count) t = testcls(refpath, tmpdir, keeptmpdir=self.options.keep_tmpdir, debug=self.options.debug, timeout=self.options.timeout, startport=self._getport(count), extraconfigopts=self.options.extra_config_opt, py3kwarnings=self.options.py3k_warnings, shell=self.options.shell) t.should_reload = True return t def _cleanup(self): """Clean up state from this test invocation.""" if self.options.keep_tmpdir: return vlog("# Cleaning up HGTMP", self._hgtmp) shutil.rmtree(self._hgtmp, True) for f in self._createdfiles: try: os.remove(f) except OSError: pass def _usecorrectpython(self): """Configure the environment to use the appropriate Python in tests.""" # Tests must use the same interpreter as us or bad things will happen. pyexename = sys.platform == 'win32' and b'python.exe' or b'python' if getattr(os, 'symlink', None): vlog("# Making python executable in test path a symlink to '%s'" % sys.executable) mypython = os.path.join(self._tmpbindir, pyexename) try: if os.readlink(mypython) == sys.executable: return os.unlink(mypython) except OSError as err: if err.errno != errno.ENOENT: raise if self._findprogram(pyexename) != sys.executable: try: os.symlink(sys.executable, mypython) self._createdfiles.append(mypython) except OSError as err: # child processes may race, which is harmless if err.errno != errno.EEXIST: raise else: exedir, exename = os.path.split(sys.executable) vlog("# Modifying search path to find %s as %s in '%s'" % (exename, pyexename, exedir)) path = os.environ['PATH'].split(os.pathsep) while exedir in path: path.remove(exedir) os.environ['PATH'] = os.pathsep.join([exedir] + path) if not self._findprogram(pyexename): print("WARNING: Cannot find %s in search path" % pyexename) def _installhg(self): """Install hg into the test environment. This will also configure hg with the appropriate testing settings. """ vlog("# Performing temporary installation of HG") installerrs = os.path.join(b"tests", b"install.err") compiler = '' if self.options.compiler: compiler = '--compiler ' + self.options.compiler if self.options.pure: pure = b"--pure" else: pure = b"" py3 = '' # Run installer in hg root script = os.path.realpath(sys.argv[0]) exe = sys.executable if PYTHON3: py3 = b'--c2to3' compiler = _bytespath(compiler) script = _bytespath(script) exe = _bytespath(exe) hgroot = os.path.dirname(os.path.dirname(script)) self._hgroot = hgroot os.chdir(hgroot) nohome = b'--home=""' if os.name == 'nt': # The --home="" trick works only on OS where os.sep == '/' # because of a distutils convert_path() fast-path. Avoid it at # least on Windows for now, deal with .pydistutils.cfg bugs # when they happen. nohome = b'' cmd = (b'%(exe)s setup.py %(py3)s %(pure)s clean --all' b' build %(compiler)s --build-base="%(base)s"' b' install --force --prefix="%(prefix)s"' b' --install-lib="%(libdir)s"' b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1' % {b'exe': exe, b'py3': py3, b'pure': pure, b'compiler': compiler, b'base': os.path.join(self._hgtmp, b"build"), b'prefix': self._installdir, b'libdir': self._pythondir, b'bindir': self._bindir, b'nohome': nohome, b'logfile': installerrs}) # setuptools requires install directories to exist. def makedirs(p): try: os.makedirs(p) except OSError as e: if e.errno != errno.EEXIST: raise makedirs(self._pythondir) makedirs(self._bindir) vlog("# Running", cmd) if os.system(cmd) == 0: if not self.options.verbose: try: os.remove(installerrs) except OSError as e: if e.errno != errno.ENOENT: raise else: f = open(installerrs, 'rb') for line in f: if PYTHON3: sys.stdout.buffer.write(line) else: sys.stdout.write(line) f.close() sys.exit(1) os.chdir(self._testdir) self._usecorrectpython() if self.options.py3k_warnings and not self.options.anycoverage: vlog("# Updating hg command to enable Py3k Warnings switch") f = open(os.path.join(self._bindir, 'hg'), 'rb') lines = [line.rstrip() for line in f] lines[0] += ' -3' f.close() f = open(os.path.join(self._bindir, 'hg'), 'wb') for line in lines: f.write(line + '\n') f.close() hgbat = os.path.join(self._bindir, b'hg.bat') if os.path.isfile(hgbat): # hg.bat expects to be put in bin/scripts while run-tests.py # installation layout put it in bin/ directly. Fix it f = open(hgbat, 'rb') data = f.read() f.close() if b'"%~dp0..\python" "%~dp0hg" %*' in data: data = data.replace(b'"%~dp0..\python" "%~dp0hg" %*', b'"%~dp0python" "%~dp0hg" %*') f = open(hgbat, 'wb') f.write(data) f.close() else: print('WARNING: cannot fix hg.bat reference to python.exe') if self.options.anycoverage: custom = os.path.join(self._testdir, 'sitecustomize.py') target = os.path.join(self._pythondir, 'sitecustomize.py') vlog('# Installing coverage trigger to %s' % target) shutil.copyfile(custom, target) rc = os.path.join(self._testdir, '.coveragerc') vlog('# Installing coverage rc to %s' % rc) os.environ['COVERAGE_PROCESS_START'] = rc covdir = os.path.join(self._installdir, '..', 'coverage') try: os.mkdir(covdir) except OSError as e: if e.errno != errno.EEXIST: raise os.environ['COVERAGE_DIR'] = covdir def _checkhglib(self, verb): """Ensure that the 'mercurial' package imported by python is the one we expect it to be. If not, print a warning to stderr.""" if ((self._bindir == self._pythondir) and (self._bindir != self._tmpbindir)): # The pythondir has been inferred from --with-hg flag. # We cannot expect anything sensible here. return expecthg = os.path.join(self._pythondir, b'mercurial') actualhg = self._gethgpath() if os.path.abspath(actualhg) != os.path.abspath(expecthg): sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n' ' (expected %s)\n' % (verb, actualhg, expecthg)) def _gethgpath(self): """Return the path to the mercurial package that is actually found by the current Python interpreter.""" if self._hgpath is not None: return self._hgpath cmd = b'%s -c "import mercurial; print (mercurial.__path__[0])"' cmd = cmd % PYTHON if PYTHON3: cmd = _strpath(cmd) pipe = os.popen(cmd) try: self._hgpath = _bytespath(pipe.read().strip()) finally: pipe.close() return self._hgpath def _outputcoverage(self): """Produce code coverage output.""" from coverage import coverage vlog('# Producing coverage report') # chdir is the easiest way to get short, relative paths in the # output. os.chdir(self._hgroot) covdir = os.path.join(self._installdir, '..', 'coverage') cov = coverage(data_file=os.path.join(covdir, 'cov')) # Map install directory paths back to source directory. cov.config.paths['srcdir'] = ['.', self._pythondir] cov.combine() omit = [os.path.join(x, '*') for x in [self._bindir, self._testdir]] cov.report(ignore_errors=True, omit=omit) if self.options.htmlcov: htmldir = os.path.join(self._testdir, 'htmlcov') cov.html_report(directory=htmldir, omit=omit) if self.options.annotate: adir = os.path.join(self._testdir, 'annotated') if not os.path.isdir(adir): os.mkdir(adir) cov.annotate(directory=adir, omit=omit) def _findprogram(self, program): """Search PATH for a executable program""" dpb = _bytespath(os.defpath) sepb = _bytespath(os.pathsep) for p in osenvironb.get(b'PATH', dpb).split(sepb): name = os.path.join(p, program) if os.name == 'nt' or os.access(name, os.X_OK): return name return None def _checktools(self): """Ensure tools required to run tests are present.""" for p in self.REQUIREDTOOLS: if os.name == 'nt' and not p.endswith('.exe'): p += '.exe' found = self._findprogram(p) if found: vlog("# Found prerequisite", p, "at", found) else: print("WARNING: Did not find prerequisite tool: %s " % p) if __name__ == '__main__': runner = TestRunner() try: import msvcrt msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY) msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY) except ImportError: pass sys.exit(runner.run(sys.argv[1:])) mercurial-3.7.3/tests/test-hgweb-diffs.t0000644000175000017500000007706612676531525017640 0ustar mpmmpm00000000000000#require serve setting up repo $ hg init test $ cd test $ echo a > a $ echo b > b $ hg ci -Ama adding a adding b change permissions for git diffs $ hg import -q --bypass - < # HG changeset patch > # User test > # Date 0 0 > b > > diff --git a/a b/a > old mode 100644 > new mode 100755 > diff --git a/b b/b > deleted file mode 100644 > --- a/b > +++ /dev/null > @@ -1,1 +0,0 @@ > -b > EOF set up hgweb $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS revision $ get-with-headers.py localhost:$HGPORT 'rev/0' 200 Script output follows test: 0cd96de13884

                            changeset 0:0cd96de13884

                            a
                            author test
                            date Thu, 01 Jan 1970 00:00:00 +0000
                            parents
                            children 559edbd9ed20
                            files a b
                            diffstat 2 files changed, 2 insertions(+), 0 deletions(-) [+]
                            line wrap: on
                            line diff
                              --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
                              +++ b/a	Thu Jan 01 00:00:00 1970 +0000
                              @@ -0,0 +1,1 @@
                              +a
                              --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
                              +++ b/b	Thu Jan 01 00:00:00 1970 +0000
                              @@ -0,0 +1,1 @@
                              +b
                            raw revision $ get-with-headers.py localhost:$HGPORT 'raw-rev/0' 200 Script output follows # HG changeset patch # User test # Date 0 0 # Node ID 0cd96de13884b090099512d4794ae87ad067ea8e a diff -r 000000000000 -r 0cd96de13884 a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +a diff -r 000000000000 -r 0cd96de13884 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +b diff removed file $ hg log --template "{file_mods}\n{file_dels}\n" -r tip a b $ hg parents --template "{node|short}\n" -r tip 0cd96de13884 $ hg parents --template "{node|short}\n" -r tip b 0cd96de13884 $ get-with-headers.py localhost:$HGPORT 'diff/tip/b' 200 Script output follows test: b diff

                            diff b @ 1:559edbd9ed20 tip

                            b
                            author test
                            date Thu, 01 Jan 1970 00:00:00 +0000
                            parents 0cd96de13884
                            children
                            line wrap: on
                            line diff
                              --- a/b	Thu Jan 01 00:00:00 1970 +0000
                              +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
                              @@ -1,1 +0,0 @@
                              -b
                            set up hgweb with git diffs $ killdaemons.py $ hg serve --config 'diff.git=1' -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS revision $ get-with-headers.py localhost:$HGPORT 'rev/0' 200 Script output follows test: 0cd96de13884

                            changeset 0:0cd96de13884

                            a
                            author test
                            date Thu, 01 Jan 1970 00:00:00 +0000
                            parents
                            children 559edbd9ed20
                            files a b
                            diffstat 2 files changed, 2 insertions(+), 0 deletions(-) [+]
                            line wrap: on
                            line diff
                              new file mode 100644
                              --- /dev/null
                              +++ b/a
                              @@ -0,0 +1,1 @@
                              +a
                              new file mode 100644
                              --- /dev/null
                              +++ b/b
                              @@ -0,0 +1,1 @@
                              +b
                            revision $ get-with-headers.py localhost:$HGPORT 'raw-rev/0' 200 Script output follows # HG changeset patch # User test # Date 0 0 # Node ID 0cd96de13884b090099512d4794ae87ad067ea8e a diff --git a/a b/a new file mode 100644 --- /dev/null +++ b/a @@ -0,0 +1,1 @@ +a diff --git a/b b/b new file mode 100644 --- /dev/null +++ b/b @@ -0,0 +1,1 @@ +b diff modified file $ hg log --template "{file_mods}\n{file_dels}\n" -r tip a b $ hg parents --template "{node|short}\n" -r tip 0cd96de13884 $ hg parents --template "{node|short}\n" -r tip a 0cd96de13884 $ get-with-headers.py localhost:$HGPORT 'diff/tip/a' 200 Script output follows test: a diff

                            diff a @ 1:559edbd9ed20 tip

                            b
                            author test
                            date Thu, 01 Jan 1970 00:00:00 +0000
                            parents 0cd96de13884
                            children
                            line wrap: on
                            line diff
                              old mode 100644
                              new mode 100755
                            comparison new file $ hg parents --template "{rev}:{node|short}\n" -r 0 $ hg log --template "{rev}:{node|short}\n" -r 0 0:0cd96de13884 $ get-with-headers.py localhost:$HGPORT 'comparison/0/a' 200 Script output follows test: a comparison

                            comparison a @ 0:0cd96de13884

                            a
                            author test
                            date Thu, 01 Jan 1970 00:00:00 +0000
                            parents
                            children
                            comparison
                            equal deleted inserted replaced
                            -1:000000000000 0:0cd96de13884
                            1 a
                            comparison existing file $ hg up 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo a >> a $ hg ci -mc $ hg parents --template "{rev}:{node|short}\n" -r tip 1:559edbd9ed20 $ hg log --template "{rev}:{node|short}\n" -r tip 2:d73db4d812ff $ get-with-headers.py localhost:$HGPORT 'comparison/tip/a' 200 Script output follows test: a comparison

                            comparison a @ 2:d73db4d812ff tip

                            c
                            author test
                            date Thu, 01 Jan 1970 00:00:00 +0000
                            parents 0cd96de13884
                            children
                            comparison
                            equal deleted inserted replaced
                            1:559edbd9ed20 2:d73db4d812ff
                            1 a 1 a
                            2 a
                            comparison removed file $ hg rm a $ hg ci -md $ hg parents --template "{rev}:{node|short}\n" -r tip 2:d73db4d812ff $ hg log --template "{rev}:{node|short}\n" -r tip 3:20e80271eb7a $ get-with-headers.py localhost:$HGPORT 'comparison/tip/a' 200 Script output follows test: a comparison

                            comparison a @ 3:20e80271eb7a tip

                            d
                            author test
                            date Thu, 01 Jan 1970 00:00:00 +0000
                            parents d73db4d812ff
                            children
                            comparison
                            equal deleted inserted replaced
                            2:d73db4d812ff 3:20e80271eb7a
                            1 a
                            2 a
                            comparison not-modified file $ echo e > e $ hg add e $ hg ci -m e $ echo f > f $ hg add f $ hg ci -m f $ hg tip --template "{rev}:{node|short}\n" 5:41d9fc4a6ae1 $ hg diff -c tip e $ hg parents --template "{rev}:{node|short}\n" -r tip 4:402bea3b0976 $ hg parents --template "{rev}:{node|short}\n" -r tip e 4:402bea3b0976 $ get-with-headers.py localhost:$HGPORT 'comparison/tip/e' 200 Script output follows test: e comparison

                            comparison e @ 5:41d9fc4a6ae1 tip

                            f
                            author test
                            date Thu, 01 Jan 1970 00:00:00 +0000
                            parents 402bea3b0976
                            children
                            comparison
                            equal deleted inserted replaced
                            4:402bea3b0976 5:41d9fc4a6ae1
                            $ cd .. test import rev as raw-rev $ hg clone -r0 test test1 adding changesets adding manifests adding file changes added 1 changesets with 2 changes to 2 files updating to branch default 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd test1 $ hg import -q --bypass --exact http://localhost:$HGPORT/rev/1 raw revision with diff block numbers $ killdaemons.py $ cat < .hg/hgrc > [web] > templates = rawdiff > EOF $ mkdir rawdiff $ cat < rawdiff/map > mimetype = 'text/plain; charset={encoding}' > changeset = '{diff}' > difflineplus = '{line}' > difflineminus = '{line}' > difflineat = '{line}' > diffline = '{line}' > filenodelink = '' > filenolink = '' > fileline = '{line}' > diffblock = 'Block: {blockno}\n{lines}\n' > EOF $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS $ get-with-headers.py localhost:$HGPORT 'raw-rev/0' 200 Script output follows Block: 1 diff -r 000000000000 -r 0cd96de13884 a --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/a Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +a Block: 2 diff -r 000000000000 -r 0cd96de13884 b --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/b Thu Jan 01 00:00:00 1970 +0000 @@ -0,0 +1,1 @@ +b $ killdaemons.py $ rm .hg/hgrc rawdiff/map $ rmdir rawdiff $ hg serve -n test -p $HGPORT -d --pid-file=hg.pid -A access.log -E errors.log $ cat hg.pid >> $DAEMON_PIDS errors $ cat ../test/errors.log $ cd .. mercurial-3.7.3/tests/test-propertycache.py0000644000175000017500000001456512676531525020503 0ustar mpmmpm00000000000000"""test behavior of propertycache and unfiltered propertycache The repoview overlay is quite complex. We test the behavior of property cache of both localrepo and repoview to prevent regression.""" import os, subprocess import mercurial.localrepo import mercurial.repoview import mercurial.util import mercurial.hg import mercurial.ui as uimod # create some special property cache that trace they call calllog = [] @mercurial.util.propertycache def testcachedfoobar(repo): name = repo.filtername if name is None: name = '' val = len(name) calllog.append(val) return val unficalllog = [] @mercurial.localrepo.unfilteredpropertycache def testcachedunfifoobar(repo): name = repo.filtername if name is None: name = '' val = 100 + len(name) unficalllog.append(val) return val #plug them on repo mercurial.localrepo.localrepository.testcachedfoobar = testcachedfoobar mercurial.localrepo.localrepository.testcachedunfifoobar = testcachedunfifoobar # Create an empty repo and instantiate it. It is important to run # these tests on the real object to detect regression. repopath = os.path.join(os.environ['TESTTMP'], 'repo') assert subprocess.call(['hg', 'init', repopath]) == 0 ui = uimod.ui() repo = mercurial.hg.repository(ui, path=repopath).unfiltered() print '' print '=== property cache ===' print '' print 'calllog:', calllog print 'cached value (unfiltered):', print vars(repo).get('testcachedfoobar', 'NOCACHE') print '' print '= first access on unfiltered, should do a call' print 'access:', repo.testcachedfoobar print 'calllog:', calllog print 'cached value (unfiltered):', print vars(repo).get('testcachedfoobar', 'NOCACHE') print '' print '= second access on unfiltered, should not do call' print 'access', repo.testcachedfoobar print 'calllog:', calllog print 'cached value (unfiltered):', print vars(repo).get('testcachedfoobar', 'NOCACHE') print '' print '= first access on "visible" view, should do a call' visibleview = repo.filtered('visible') print 'cached value ("visible" view):', print vars(visibleview).get('testcachedfoobar', 'NOCACHE') print 'access:', visibleview.testcachedfoobar print 'calllog:', calllog print 'cached value (unfiltered):', print vars(repo).get('testcachedfoobar', 'NOCACHE') print 'cached value ("visible" view):', print vars(visibleview).get('testcachedfoobar', 'NOCACHE') print '' print '= second access on "visible view", should not do call' print 'access:', visibleview.testcachedfoobar print 'calllog:', calllog print 'cached value (unfiltered):', print vars(repo).get('testcachedfoobar', 'NOCACHE') print 'cached value ("visible" view):', print vars(visibleview).get('testcachedfoobar', 'NOCACHE') print '' print '= no effect on other view' immutableview = repo.filtered('immutable') print 'cached value ("immutable" view):', print vars(immutableview).get('testcachedfoobar', 'NOCACHE') print 'access:', immutableview.testcachedfoobar print 'calllog:', calllog print 'cached value (unfiltered):', print vars(repo).get('testcachedfoobar', 'NOCACHE') print 'cached value ("visible" view):', print vars(visibleview).get('testcachedfoobar', 'NOCACHE') print 'cached value ("immutable" view):', print vars(immutableview).get('testcachedfoobar', 'NOCACHE') # unfiltered property cache test print '' print '' print '=== unfiltered property cache ===' print '' print 'unficalllog:', unficalllog print 'cached value (unfiltered): ', print vars(repo).get('testcachedunfifoobar', 'NOCACHE') print 'cached value ("visible" view): ', print vars(visibleview).get('testcachedunfifoobar', 'NOCACHE') print 'cached value ("immutable" view):', print vars(immutableview).get('testcachedunfifoobar', 'NOCACHE') print '' print '= first access on unfiltered, should do a call' print 'access (unfiltered):', repo.testcachedunfifoobar print 'unficalllog:', unficalllog print 'cached value (unfiltered): ', print vars(repo).get('testcachedunfifoobar', 'NOCACHE') print '' print '= second access on unfiltered, should not do call' print 'access (unfiltered):', repo.testcachedunfifoobar print 'unficalllog:', unficalllog print 'cached value (unfiltered): ', print vars(repo).get('testcachedunfifoobar', 'NOCACHE') print '' print '= access on view should use the unfiltered cache' print 'access (unfiltered): ', repo.testcachedunfifoobar print 'access ("visible" view): ', visibleview.testcachedunfifoobar print 'access ("immutable" view):', immutableview.testcachedunfifoobar print 'unficalllog:', unficalllog print 'cached value (unfiltered): ', print vars(repo).get('testcachedunfifoobar', 'NOCACHE') print 'cached value ("visible" view): ', print vars(visibleview).get('testcachedunfifoobar', 'NOCACHE') print 'cached value ("immutable" view):', print vars(immutableview).get('testcachedunfifoobar', 'NOCACHE') print '' print '= even if we clear the unfiltered cache' del repo.__dict__['testcachedunfifoobar'] print 'cached value (unfiltered): ', print vars(repo).get('testcachedunfifoobar', 'NOCACHE') print 'cached value ("visible" view): ', print vars(visibleview).get('testcachedunfifoobar', 'NOCACHE') print 'cached value ("immutable" view):', print vars(immutableview).get('testcachedunfifoobar', 'NOCACHE') print 'unficalllog:', unficalllog print 'access ("visible" view): ', visibleview.testcachedunfifoobar print 'unficalllog:', unficalllog print 'cached value (unfiltered): ', print vars(repo).get('testcachedunfifoobar', 'NOCACHE') print 'cached value ("visible" view): ', print vars(visibleview).get('testcachedunfifoobar', 'NOCACHE') print 'cached value ("immutable" view):', print vars(immutableview).get('testcachedunfifoobar', 'NOCACHE') print 'access ("immutable" view):', immutableview.testcachedunfifoobar print 'unficalllog:', unficalllog print 'cached value (unfiltered): ', print vars(repo).get('testcachedunfifoobar', 'NOCACHE') print 'cached value ("visible" view): ', print vars(visibleview).get('testcachedunfifoobar', 'NOCACHE') print 'cached value ("immutable" view):', print vars(immutableview).get('testcachedunfifoobar', 'NOCACHE') print 'access (unfiltered): ', repo.testcachedunfifoobar print 'unficalllog:', unficalllog print 'cached value (unfiltered): ', print vars(repo).get('testcachedunfifoobar', 'NOCACHE') print 'cached value ("visible" view): ', print vars(visibleview).get('testcachedunfifoobar', 'NOCACHE') print 'cached value ("immutable" view):', print vars(immutableview).get('testcachedunfifoobar', 'NOCACHE') mercurial-3.7.3/tests/test-lrucachedict.py.out0000644000175000017500000000162112676531525021060 0ustar mpmmpm00000000000000'a' in d: True d['a']: va 'b' in d: True d['b']: vb 'c' in d: True d['c']: vc 'd' in d: True d['d']: vd 'a' in d: False 'b' in d: True d['b']: vb 'c' in d: True d['c']: vc 'd' in d: True d['d']: vd 'e' in d: True d['e']: ve 'b' in d: True d['b']: vb2 'c' in d: True d['c']: vc2 'd' in d: True d['d']: vd 'e' in d: False 'f' in d: True d['f']: vf 'b' in d: False 'c' in d: False 'd' in d: False 'e' in d: False 'f' in d: False 'a' in d: True d['a']: 1 'b' in d: True d['b']: 2 All of these should be present: 'a' in dc: True dc['a']: va3 'b' in dc: True dc['b']: vb3 'c' in dc: True dc['c']: vc3 'd' in dc: True dc['d']: vd3 All of these except 'a' should be present: 'a' in dc: False 'b' in dc: True dc['b']: vb3 'c' in dc: True dc['c']: vc3 'd' in dc: True dc['d']: vd3 'e' in dc: True dc['e']: ve3 These should be in reverse alphabetical order and read 'v?3': d['d']: vd3 d['c']: vc3 d['b']: vb3 d['a']: va3 mercurial-3.7.3/tests/test-template-engine.t0000644000175000017500000000325512676531525020516 0ustar mpmmpm00000000000000 $ cat > engine.py << EOF > > from mercurial import templater > > class mytemplater(object): > def __init__(self, loader, filters, defaults): > self.loader = loader > > def process(self, t, map): > tmpl = self.loader(t) > for k, v in map.iteritems(): > if k in ('templ', 'ctx', 'repo', 'revcache', 'cache'): > continue > if hasattr(v, '__call__'): > v = v(**map) > v = templater.stringify(v) > tmpl = tmpl.replace('{{%s}}' % k, v) > yield tmpl > > templater.engines['my'] = mytemplater > EOF $ hg init test $ echo '[extensions]' > test/.hg/hgrc $ echo "engine = `pwd`/engine.py" >> test/.hg/hgrc $ cd test $ cat > mymap << EOF > changeset = my:changeset.txt > EOF $ cat > changeset.txt << EOF > {{rev}} {{node}} {{author}} > EOF $ hg ci -Ama adding changeset.txt adding mymap $ hg log --style=./mymap 0 97e5f848f0936960273bbf75be6388cd0350a32b test $ cat > changeset.txt << EOF > {{p1rev}} {{p1node}} {{p2rev}} {{p2node}} > EOF $ hg ci -Ama $ hg log --style=./mymap 0 97e5f848f0936960273bbf75be6388cd0350a32b -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 -1 0000000000000000000000000000000000000000 Fuzzing the unicode escaper to ensure it produces valid data #if hypothesis >>> from hypothesishelpers import * >>> import mercurial.templatefilters as tf >>> import json >>> @check(st.text().map(lambda s: s.encode('utf-8'))) ... def testtfescapeproducesvalidjson(text): ... json.loads('"' + tf.jsonescape(text) + '"') #endif $ cd .. mercurial-3.7.3/tests/test-newercgi.t0000644000175000017500000000270312676531525017240 0ustar mpmmpm00000000000000#require no-msys # MSYS will translate web paths as if they were file paths This is a rudimentary test of the CGI files as of d74fc8dec2b4. $ hg init test $ cat >hgweb.cgi < #!/usr/bin/env python > # > # An example CGI script to use hgweb, edit as necessary > > import cgitb > cgitb.enable() > > from mercurial import demandimport; demandimport.enable() > from mercurial.hgweb import hgweb > from mercurial.hgweb import wsgicgi > > application = hgweb("test", "Empty test repository") > wsgicgi.launch(application) > HGWEB $ chmod 755 hgweb.cgi $ cat >hgweb.config < [paths] > test = test > HGWEBDIRCONF $ cat >hgwebdir.cgi < #!/usr/bin/env python > # > # An example CGI script to export multiple hgweb repos, edit as necessary > > import cgitb > cgitb.enable() > > from mercurial import demandimport; demandimport.enable() > from mercurial.hgweb import hgwebdir > from mercurial.hgweb import wsgicgi > > application = hgwebdir("hgweb.config") > wsgicgi.launch(application) > HGWEBDIR $ chmod 755 hgwebdir.cgi $ . "$TESTDIR/cgienv" $ python hgweb.cgi > page1 $ python hgwebdir.cgi > page2 $ PATH_INFO="/test/" $ PATH_TRANSLATED="/var/something/test.cgi" $ REQUEST_URI="/test/test/" $ SCRIPT_URI="http://hg.omnifarious.org/test/test/" $ SCRIPT_URL="/test/test/" $ python hgwebdir.cgi > page3 $ grep -i error page1 page2 page3 [1] mercurial-3.7.3/tests/test-convert-datesort.t0000644000175000017500000000675112676531525020747 0ustar mpmmpm00000000000000 $ cat >> $HGRCPATH < [extensions] > convert= > EOF $ hg init t $ cd t $ echo a >> a $ hg ci -Am a0 -d '1 0' adding a $ hg branch brancha marked working directory as branch brancha (branches are permanent and global, did you want a bookmark?) $ echo a >> a $ hg ci -m a1 -d '2 0' $ echo a >> a $ hg ci -m a2 -d '3 0' $ echo a >> a $ hg ci -m a3 -d '4 0' $ hg up -C 0 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg branch branchb marked working directory as branch branchb $ echo b >> b $ hg ci -Am b0 -d '6 0' adding b $ hg up -C brancha 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo a >> a $ hg ci -m a4 -d '5 0' $ echo a >> a $ hg ci -m a5 -d '7 0' $ echo a >> a $ hg ci -m a6 -d '8 0' $ hg up -C branchb 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ echo b >> b $ hg ci -m b1 -d '9 0' $ hg up -C 0 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ echo c >> c $ hg branch branchc marked working directory as branch branchc $ hg ci -Am c0 -d '10 0' adding c $ hg up -C brancha 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg ci --close-branch -m a7x -d '11 0' $ hg up -C branchb 2 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg ci --close-branch -m b2x -d '12 0' $ hg up -C branchc 1 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg merge branchb 1 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg ci -m c1 -d '13 0' $ cd .. convert with datesort $ hg convert --datesort t t-datesort initializing destination t-datesort repository scanning source... sorting... converting... 12 a0 11 a1 10 a2 9 a3 8 a4 7 b0 6 a5 5 a6 4 b1 3 c0 2 a7x 1 b2x 0 c1 graph converted repo $ hg -R t-datesort log -G --template '{rev} "{desc}"\n' o 12 "c1" |\ | _ 11 "b2x" | | | | _ 10 "a7x" | | | o | | 9 "c0" | | | | o | 8 "b1" | | | | | o 7 "a6" | | | | | o 6 "a5" | | | | o | 5 "b0" |/ / | o 4 "a4" | | | o 3 "a3" | | | o 2 "a2" | | | o 1 "a1" |/ o 0 "a0" convert with datesort (default mode) $ hg convert t t-sourcesort initializing destination t-sourcesort repository scanning source... sorting... converting... 12 a0 11 a1 10 a2 9 a3 8 b0 7 a4 6 a5 5 a6 4 b1 3 c0 2 a7x 1 b2x 0 c1 graph converted repo $ hg -R t-sourcesort log -G --template '{rev} "{desc}"\n' o 12 "c1" |\ | _ 11 "b2x" | | | | _ 10 "a7x" | | | o | | 9 "c0" | | | | o | 8 "b1" | | | | | o 7 "a6" | | | | | o 6 "a5" | | | | | o 5 "a4" | | | | o | 4 "b0" |/ / | o 3 "a3" | | | o 2 "a2" | | | o 1 "a1" |/ o 0 "a0" convert with closesort $ hg convert --closesort t t-closesort initializing destination t-closesort repository scanning source... sorting... converting... 12 a0 11 a1 10 a2 9 a3 8 b0 7 a4 6 a5 5 a6 4 a7x 3 b1 2 b2x 1 c0 0 c1 graph converted repo $ hg -R t-closesort log -G --template '{rev} "{desc}"\n' o 12 "c1" |\ | o 11 "c0" | | _ | 10 "b2x" | | o | 9 "b1" | | | | _ 8 "a7x" | | | | | o 7 "a6" | | | | | o 6 "a5" | | | | | o 5 "a4" | | | o | | 4 "b0" |/ / | o 3 "a3" | | | o 2 "a2" | | | o 1 "a1" |/ o 0 "a0" mercurial-3.7.3/tests/test-revert-interactive.t0000644000175000017500000001403012676531525021253 0ustar mpmmpm00000000000000Revert interactive tests 1 add and commit file f 2 add commit file folder1/g 3 add and commit file folder2/h 4 add and commit file folder1/i 5 commit change to file f 6 commit changes to files folder1/g folder2/h 7 commit changes to files folder1/g folder2/h 8 revert interactive to commit id 2 (line 3 above), check that folder1/i is removed and 9 make workdir match 7 10 run the same test than 8 from within folder1 and check same expectations $ cat <> $HGRCPATH > [ui] > interactive = true > [extensions] > record = > EOF $ mkdir -p a/folder1 a/folder2 $ cd a $ hg init >>> open('f', 'wb').write("1\n2\n3\n4\n5\n") $ hg add f ; hg commit -m "adding f" $ cat f > folder1/g ; hg add folder1/g ; hg commit -m "adding folder1/g" $ cat f > folder2/h ; hg add folder2/h ; hg commit -m "adding folder2/h" $ cat f > folder1/i ; hg add folder1/i ; hg commit -m "adding folder1/i" >>> open('f', 'wb').write("a\n1\n2\n3\n4\n5\nb\n") $ hg commit -m "modifying f" >>> open('folder1/g', 'wb').write("c\n1\n2\n3\n4\n5\nd\n") $ hg commit -m "modifying folder1/g" >>> open('folder2/h', 'wb').write("e\n1\n2\n3\n4\n5\nf\n") $ hg commit -m "modifying folder2/h" $ hg tip changeset: 6:59dd6e4ab63a tag: tip user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: modifying folder2/h $ hg revert -i -r 2 --all -- << EOF > y > y > y > y > y > n > n > EOF reverting f reverting folder1/g (glob) removing folder1/i (glob) reverting folder2/h (glob) diff --git a/f b/f 2 hunks, 2 lines changed examine changes to 'f'? [Ynesfdaq?] y @@ -1,5 +1,6 @@ +a 1 2 3 4 5 record change 1/6 to 'f'? [Ynesfdaq?] y @@ -1,5 +2,6 @@ 1 2 3 4 5 +b record change 2/6 to 'f'? [Ynesfdaq?] y diff --git a/folder1/g b/folder1/g 2 hunks, 2 lines changed examine changes to 'folder1/g'? [Ynesfdaq?] y @@ -1,5 +1,6 @@ +c 1 2 3 4 5 record change 3/6 to 'folder1/g'? [Ynesfdaq?] y @@ -1,5 +2,6 @@ 1 2 3 4 5 +d record change 4/6 to 'folder1/g'? [Ynesfdaq?] n diff --git a/folder2/h b/folder2/h 2 hunks, 2 lines changed examine changes to 'folder2/h'? [Ynesfdaq?] n $ cat f 1 2 3 4 5 $ cat folder1/g 1 2 3 4 5 d $ cat folder2/h e 1 2 3 4 5 f Test that --interactive lift the need for --all $ echo q | hg revert -i -r 2 reverting folder1/g (glob) reverting folder2/h (glob) diff --git a/folder1/g b/folder1/g 1 hunks, 1 lines changed examine changes to 'folder1/g'? [Ynesfdaq?] q abort: user quit [255] $ rm folder1/g.orig $ hg update -C 6 3 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hg revert -i -r 2 --all -- << EOF > y > y > y > y > y > n > n > EOF reverting f reverting folder1/g (glob) removing folder1/i (glob) reverting folder2/h (glob) diff --git a/f b/f 2 hunks, 2 lines changed examine changes to 'f'? [Ynesfdaq?] y @@ -1,5 +1,6 @@ +a 1 2 3 4 5 record change 1/6 to 'f'? [Ynesfdaq?] y @@ -1,5 +2,6 @@ 1 2 3 4 5 +b record change 2/6 to 'f'? [Ynesfdaq?] y diff --git a/folder1/g b/folder1/g 2 hunks, 2 lines changed examine changes to 'folder1/g'? [Ynesfdaq?] y @@ -1,5 +1,6 @@ +c 1 2 3 4 5 record change 3/6 to 'folder1/g'? [Ynesfdaq?] y @@ -1,5 +2,6 @@ 1 2 3 4 5 +d record change 4/6 to 'folder1/g'? [Ynesfdaq?] n diff --git a/folder2/h b/folder2/h 2 hunks, 2 lines changed examine changes to 'folder2/h'? [Ynesfdaq?] n $ cat f 1 2 3 4 5 $ cat folder1/g 1 2 3 4 5 d $ cat folder2/h e 1 2 3 4 5 f $ hg st M f M folder1/g R folder1/i $ hg revert --interactive f << EOF > y > y > n > n > EOF diff --git a/f b/f 2 hunks, 2 lines changed examine changes to 'f'? [Ynesfdaq?] y @@ -1,6 +1,5 @@ -a 1 2 3 4 5 record change 1/2 to 'f'? [Ynesfdaq?] y @@ -2,6 +1,5 @@ 1 2 3 4 5 -b record change 2/2 to 'f'? [Ynesfdaq?] n $ hg st M f M folder1/g R folder1/i ? f.orig $ cat f a 1 2 3 4 5 $ cat f.orig 1 2 3 4 5 $ rm f.orig $ hg update -C . 3 files updated, 0 files merged, 0 files removed, 0 files unresolved Check editing files newly added by a revert 1) Create a dummy editor changing 1 to 42 $ cat > $TESTTMP/editor.sh << '__EOF__' > cat "$1" | sed "s/1/42/g" > tt > mv tt "$1" > __EOF__ 2) Add k $ printf "1\n" > k $ hg add k $ hg commit -m "add k" 3) Use interactive revert with editing (replacing +1 with +42): $ printf "0\n2\n" > k $ HGEDITOR="\"sh\" \"${TESTTMP}/editor.sh\"" hg revert -i < y > e > EOF reverting k diff --git a/k b/k 1 hunks, 2 lines changed examine changes to 'k'? [Ynesfdaq?] y @@ -1,1 +1,2 @@ -1 +0 +2 record this change to 'k'? [Ynesfdaq?] e $ cat k 42 Check the experimental config to invert the selection: $ cat <> $HGRCPATH > [experimental] > revertalternateinteractivemode=False > EOF $ hg up -C . 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ printf 'firstline\nc\n1\n2\n3\n 3\n5\nd\nlastline\n' > folder1/g $ hg diff --nodates diff -r a3d963a027aa folder1/g --- a/folder1/g +++ b/folder1/g @@ -1,7 +1,9 @@ +firstline c 1 2 3 -4 + 3 5 d +lastline $ hg revert -i < y > y > y > n > EOF reverting folder1/g (glob) diff --git a/folder1/g b/folder1/g 3 hunks, 3 lines changed examine changes to 'folder1/g'? [Ynesfdaq?] y @@ -1,5 +1,4 @@ -firstline c 1 2 3 record change 1/3 to 'folder1/g'? [Ynesfdaq?] y @@ -2,7 +1,7 @@ c 1 2 3 - 3 +4 5 d record change 2/3 to 'folder1/g'? [Ynesfdaq?] y @@ -7,3 +6,2 @@ 5 d -lastline record change 3/3 to 'folder1/g'? [Ynesfdaq?] n $ hg diff --nodates diff -r a3d963a027aa folder1/g --- a/folder1/g +++ b/folder1/g @@ -5,3 +5,4 @@ 4 5 d +lastline mercurial-3.7.3/tests/test-ui-verbosity.py.out0000644000175000017500000001030212676531525021063 0ustar mpmmpm00000000000000 hgrc settings command line options final result quiet verbo debug quiet verbo debug quiet verbo debug 0 False False False False False False -> False False False 1 True False False False False False -> True False False 2 False True False False False False -> False True False 3 True True False False False False -> False False False 4 False False True False False False -> False True True 5 True False True False False False -> False True True 6 False True True False False False -> False True True 7 True True True False False False -> False True True 8 False False False True False False -> True False False 9 True False False True False False -> True False False 10 False True False True False False -> True False False 11 True True False True False False -> True False False 12 False False True True False False -> True False False 13 True False True True False False -> True False False 14 False True True True False False -> True False False 15 True True True True False False -> True False False 16 False False False False True False -> False True False 17 True False False False True False -> False True False 18 False True False False True False -> False True False 19 True True False False True False -> False True False 20 False False True False True False -> False True False 21 True False True False True False -> False True False 22 False True True False True False -> False True False 23 True True True False True False -> False True False 24 False False False True True False -> False False False 25 True False False True True False -> False False False 26 False True False True True False -> False False False 27 True True False True True False -> False False False 28 False False True True True False -> False False False 29 True False True True True False -> False False False 30 False True True True True False -> False False False 31 True True True True True False -> False False False 32 False False False False False True -> False True True 33 True False False False False True -> False True True 34 False True False False False True -> False True True 35 True True False False False True -> False True True 36 False False True False False True -> False True True 37 True False True False False True -> False True True 38 False True True False False True -> False True True 39 True True True False False True -> False True True 40 False False False True False True -> False True True 41 True False False True False True -> False True True 42 False True False True False True -> False True True 43 True True False True False True -> False True True 44 False False True True False True -> False True True 45 True False True True False True -> False True True 46 False True True True False True -> False True True 47 True True True True False True -> False True True 48 False False False False True True -> False True True 49 True False False False True True -> False True True 50 False True False False True True -> False True True 51 True True False False True True -> False True True 52 False False True False True True -> False True True 53 True False True False True True -> False True True 54 False True True False True True -> False True True 55 True True True False True True -> False True True 56 False False False True True True -> False True True 57 True False False True True True -> False True True 58 False True False True True True -> False True True 59 True True False True True True -> False True True 60 False False True True True True -> False True True 61 True False True True True True -> False True True 62 False True True True True True -> False True True 63 True True True True True True -> False True True mercurial-3.7.3/tests/test-phases.t0000644000175000017500000003047712676531525016731 0ustar mpmmpm00000000000000 $ hglog() { hg log --template "{rev} {phaseidx} {desc}\n" $*; } $ mkcommit() { > echo "$1" > "$1" > hg add "$1" > message="$1" > shift > hg ci -m "$message" $* > } $ hg init initialrepo $ cd initialrepo Cannot change null revision phase $ hg phase --force --secret null abort: cannot change null revision phase [255] $ hg phase null -1: public $ mkcommit A New commit are draft by default $ hglog 0 1 A Following commit are draft too $ mkcommit B $ hglog 1 1 B 0 1 A Draft commit are properly created over public one: $ hg phase --public . $ hg phase 1: public $ hglog 1 0 B 0 0 A $ mkcommit C $ mkcommit D $ hglog 3 1 D 2 1 C 1 0 B 0 0 A Test creating changeset as secret $ mkcommit E --config phases.new-commit='secret' $ hglog 4 2 E 3 1 D 2 1 C 1 0 B 0 0 A Test the secret property is inherited $ mkcommit H $ hglog 5 2 H 4 2 E 3 1 D 2 1 C 1 0 B 0 0 A Even on merge $ hg up -q 1 $ mkcommit "B'" created new head $ hglog 6 1 B' 5 2 H 4 2 E 3 1 D 2 1 C 1 0 B 0 0 A $ hg merge 4 # E 3 files updated, 0 files merged, 0 files removed, 0 files unresolved (branch merge, don't forget to commit) $ hg phase 6: draft 4: secret $ hg ci -m "merge B' and E" $ hglog 7 2 merge B' and E 6 1 B' 5 2 H 4 2 E 3 1 D 2 1 C 1 0 B 0 0 A Test secret changeset are not pushed $ hg init ../push-dest $ cat > ../push-dest/.hg/hgrc << EOF > [phases] > publish=False > EOF $ hg outgoing ../push-dest --template='{rev} {phase} {desc|firstline}\n' comparing with ../push-dest searching for changes 0 public A 1 public B 2 draft C 3 draft D 6 draft B' $ hg outgoing -r 'branch(default)' ../push-dest --template='{rev} {phase} {desc|firstline}\n' comparing with ../push-dest searching for changes 0 public A 1 public B 2 draft C 3 draft D 6 draft B' $ hg push ../push-dest -f # force because we push multiple heads pushing to ../push-dest searching for changes adding changesets adding manifests adding file changes added 5 changesets with 5 changes to 5 files (+1 heads) $ hglog 7 2 merge B' and E 6 1 B' 5 2 H 4 2 E 3 1 D 2 1 C 1 0 B 0 0 A $ cd ../push-dest $ hglog 4 1 B' 3 1 D 2 1 C 1 0 B 0 0 A (Issue3303) Check that remote secret changeset are ignore when checking creation of remote heads We add a secret head into the push destination. This secret head shadows a visible shared between the initial repo and the push destination. $ hg up -q 4 # B' $ mkcommit Z --config phases.new-commit=secret $ hg phase . 5: secret We now try to push a new public changeset that descend from the common public head shadowed by the remote secret head. $ cd ../initialrepo $ hg up -q 6 #B' $ mkcommit I created new head $ hg push ../push-dest pushing to ../push-dest searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) :note: The "(+1 heads)" is wrong as we do not had any visible head check that branch cache with "served" filter are properly computed and stored $ ls ../push-dest/.hg/cache/branch2* ../push-dest/.hg/cache/branch2-served $ cat ../push-dest/.hg/cache/branch2-served 6d6770faffce199f1fddd1cf87f6f026138cf061 6 465891ffab3c47a3c23792f7dc84156e19a90722 b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e o default 6d6770faffce199f1fddd1cf87f6f026138cf061 o default $ hg heads -R ../push-dest --template '{rev}:{node} {phase}\n' #update visible cache too 6:6d6770faffce199f1fddd1cf87f6f026138cf061 draft 5:2713879da13d6eea1ff22b442a5a87cb31a7ce6a secret 3:b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e draft $ ls ../push-dest/.hg/cache/branch2* ../push-dest/.hg/cache/branch2-served ../push-dest/.hg/cache/branch2-visible $ cat ../push-dest/.hg/cache/branch2-served 6d6770faffce199f1fddd1cf87f6f026138cf061 6 465891ffab3c47a3c23792f7dc84156e19a90722 b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e o default 6d6770faffce199f1fddd1cf87f6f026138cf061 o default $ cat ../push-dest/.hg/cache/branch2-visible 6d6770faffce199f1fddd1cf87f6f026138cf061 6 b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e o default 2713879da13d6eea1ff22b442a5a87cb31a7ce6a o default 6d6770faffce199f1fddd1cf87f6f026138cf061 o default Restore condition prior extra insertion. $ hg -q --config extensions.mq= strip . $ hg up -q 7 $ cd .. Test secret changeset are not pull $ hg init pull-dest $ cd pull-dest $ hg pull ../initialrepo pulling from ../initialrepo requesting all changes adding changesets adding manifests adding file changes added 5 changesets with 5 changes to 5 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hglog 4 0 B' 3 0 D 2 0 C 1 0 B 0 0 A $ cd .. But secret can still be bundled explicitly $ cd initialrepo $ hg bundle --base '4^' -r 'children(4)' ../secret-bundle.hg 4 changesets found $ cd .. Test secret changeset are not cloned (during local clone) $ hg clone -qU initialrepo clone-dest $ hglog -R clone-dest 4 0 B' 3 0 D 2 0 C 1 0 B 0 0 A Test summary $ hg summary -R clone-dest --verbose parent: -1:000000000000 (no revision checked out) branch: default commit: (clean) update: 5 new changesets (update) $ hg summary -R initialrepo parent: 7:17a481b3bccb tip merge B' and E branch: default commit: (clean) (secret) update: 1 new changesets, 2 branch heads (merge) phases: 3 draft, 3 secret $ hg summary -R initialrepo --quiet parent: 7:17a481b3bccb tip update: 1 new changesets, 2 branch heads (merge) Test revset $ cd initialrepo $ hglog -r 'public()' 0 0 A 1 0 B $ hglog -r 'draft()' 2 1 C 3 1 D 6 1 B' $ hglog -r 'secret()' 4 2 E 5 2 H 7 2 merge B' and E test that phase are displayed in log at debug level $ hg log --debug changeset: 7:17a481b3bccb796c0521ae97903d81c52bfee4af tag: tip phase: secret parent: 6:cf9fe039dfd67e829edf6522a45de057b5c86519 parent: 4:a603bfb5a83e312131cebcd05353c217d4d21dde manifest: 7:5e724ffacba267b2ab726c91fc8b650710deaaa8 user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: C D E extra: branch=default description: merge B' and E changeset: 6:cf9fe039dfd67e829edf6522a45de057b5c86519 phase: draft parent: 1:27547f69f25460a52fff66ad004e58da7ad3fb56 parent: -1:0000000000000000000000000000000000000000 manifest: 6:ab8bfef2392903058bf4ebb9e7746e8d7026b27a user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: B' extra: branch=default description: B' changeset: 5:a030c6be5127abc010fcbff1851536552e6951a8 phase: secret parent: 4:a603bfb5a83e312131cebcd05353c217d4d21dde parent: -1:0000000000000000000000000000000000000000 manifest: 5:5c710aa854874fe3d5fa7192e77bdb314cc08b5a user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: H extra: branch=default description: H changeset: 4:a603bfb5a83e312131cebcd05353c217d4d21dde phase: secret parent: 3:b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e parent: -1:0000000000000000000000000000000000000000 manifest: 4:7173fd1c27119750b959e3a0f47ed78abe75d6dc user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: E extra: branch=default description: E changeset: 3:b3325c91a4d916bcc4cdc83ea3fe4ece46a42f6e phase: draft parent: 2:f838bfaca5c7226600ebcfd84f3c3c13a28d3757 parent: -1:0000000000000000000000000000000000000000 manifest: 3:6e1f4c47ecb533ffd0c8e52cdc88afb6cd39e20c user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: D extra: branch=default description: D changeset: 2:f838bfaca5c7226600ebcfd84f3c3c13a28d3757 phase: draft parent: 1:27547f69f25460a52fff66ad004e58da7ad3fb56 parent: -1:0000000000000000000000000000000000000000 manifest: 2:66a5a01817fdf5239c273802b5b7618d051c89e4 user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: C extra: branch=default description: C changeset: 1:27547f69f25460a52fff66ad004e58da7ad3fb56 phase: public parent: 0:4a2df7238c3b48766b5e22fafbb8a2f506ec8256 parent: -1:0000000000000000000000000000000000000000 manifest: 1:cb5cbbc1bfbf24cc34b9e8c16914e9caa2d2a7fd user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: B extra: branch=default description: B changeset: 0:4a2df7238c3b48766b5e22fafbb8a2f506ec8256 phase: public parent: -1:0000000000000000000000000000000000000000 parent: -1:0000000000000000000000000000000000000000 manifest: 0:007d8c9d88841325f5c6b06371b35b4e8a2b1a83 user: test date: Thu Jan 01 00:00:00 1970 +0000 files+: A extra: branch=default description: A (Issue3707) test invalid phase name $ mkcommit I --config phases.new-commit='babar' transaction abort! rollback completed abort: phases.new-commit: not a valid phase name ('babar') [255] Test phase command =================== initial picture $ hg log -G --template "{rev} {phase} {desc}\n" @ 7 secret merge B' and E |\ | o 6 draft B' | | +---o 5 secret H | | o | 4 secret E | | o | 3 draft D | | o | 2 draft C |/ o 1 public B | o 0 public A display changesets phase (mixing -r and plain rev specification) $ hg phase 1::4 -r 7 1: public 2: draft 3: draft 4: secret 7: secret move changeset forward (with -r option) $ hg phase --public -r 2 $ hg log -G --template "{rev} {phase} {desc}\n" @ 7 secret merge B' and E |\ | o 6 draft B' | | +---o 5 secret H | | o | 4 secret E | | o | 3 draft D | | o | 2 public C |/ o 1 public B | o 0 public A move changeset backward (without -r option) $ hg phase --draft --force 2 $ hg log -G --template "{rev} {phase} {desc}\n" @ 7 secret merge B' and E |\ | o 6 draft B' | | +---o 5 secret H | | o | 4 secret E | | o | 3 draft D | | o | 2 draft C |/ o 1 public B | o 0 public A move changeset forward and backward and test kill switch $ cat <> $HGRCPATH > [experimental] > nativephaseskillswitch = true > EOF $ hg phase --draft --force 1::4 $ hg log -G --template "{rev} {phase} {desc}\n" @ 7 secret merge B' and E |\ | o 6 draft B' | | +---o 5 secret H | | o | 4 draft E | | o | 3 draft D | | o | 2 draft C |/ o 1 draft B | o 0 public A test partial failure $ cat <> $HGRCPATH > [experimental] > nativephaseskillswitch = false > EOF $ hg phase --public 7 $ hg phase --draft '5 or 7' cannot move 1 changesets to a higher phase, use --force phase changed for 1 changesets [1] $ hg log -G --template "{rev} {phase} {desc}\n" @ 7 public merge B' and E |\ | o 6 public B' | | +---o 5 draft H | | o | 4 public E | | o | 3 public D | | o | 2 public C |/ o 1 public B | o 0 public A test complete failure $ hg phase --draft 7 cannot move 1 changesets to a higher phase, use --force no phases changed [1] $ cd .. test hidden changeset are not cloned as public (issue3935) $ cd initialrepo (enabling evolution) $ cat >> $HGRCPATH << EOF > [experimental] > evolution=createmarkers > EOF (making a changeset hidden; H in that case) $ hg debugobsolete `hg id --debug -r 5` $ cd .. $ hg clone initialrepo clonewithobs requesting all changes adding changesets adding manifests adding file changes added 7 changesets with 6 changes to 6 files updating to branch default 6 files updated, 0 files merged, 0 files removed, 0 files unresolved $ cd clonewithobs $ hg log -G --template "{rev} {phase} {desc}\n" @ 6 public merge B' and E |\ | o 5 public B' | | o | 4 public E | | o | 3 public D | | o | 2 public C |/ o 1 public B | o 0 public A test verify repo containing hidden changesets, which should not abort just because repo.cancopy() is False $ cd ../initialrepo $ hg verify checking changesets checking manifests crosschecking files in changesets and manifests checking files 7 files, 8 changesets, 7 total revisions mercurial-3.7.3/tests/autodiff.py0000644000175000017500000000265712676531525016456 0ustar mpmmpm00000000000000# Extension dedicated to test patch.diff() upgrade modes from __future__ import absolute_import from mercurial import ( cmdutil, error, patch, scmutil, ) cmdtable = {} command = cmdutil.command(cmdtable) @command('autodiff', [('', 'git', '', 'git upgrade mode (yes/no/auto/warn/abort)')], '[OPTION]... [FILE]...') def autodiff(ui, repo, *pats, **opts): diffopts = patch.difffeatureopts(ui, opts) git = opts.get('git', 'no') brokenfiles = set() losedatafn = None if git in ('yes', 'no'): diffopts.git = git == 'yes' diffopts.upgrade = False elif git == 'auto': diffopts.git = False diffopts.upgrade = True elif git == 'warn': diffopts.git = False diffopts.upgrade = True def losedatafn(fn=None, **kwargs): brokenfiles.add(fn) return True elif git == 'abort': diffopts.git = False diffopts.upgrade = True def losedatafn(fn=None, **kwargs): raise error.Abort('losing data for %s' % fn) else: raise error.Abort('--git must be yes, no or auto') node1, node2 = scmutil.revpair(repo, []) m = scmutil.match(repo[node2], pats, opts) it = patch.diff(repo, node1, node2, match=m, opts=diffopts, losedatafn=losedatafn) for chunk in it: ui.write(chunk) for fn in sorted(brokenfiles): ui.write(('data lost for: %s\n' % fn)) mercurial-3.7.3/tests/test-phases-exchange.t0000644000175000017500000006135112676531525020504 0ustar mpmmpm00000000000000#require killdaemons $ cat << EOF >> $HGRCPATH > [experimental] > # drop me once bundle2 is the default, > # added to get test change early. > bundle2-exp = True > EOF $ hgph() { hg log -G --template "{rev} {phase} {desc} - {node|short}\n" $*; } $ mkcommit() { > echo "$1" > "$1" > hg add "$1" > message="$1" > shift > hg ci -m "$message" $* > } $ hg init alpha $ cd alpha $ mkcommit a-A $ mkcommit a-B $ mkcommit a-C $ mkcommit a-D $ hgph @ 3 draft a-D - b555f63b6063 | o 2 draft a-C - 54acac6f23ab | o 1 draft a-B - 548a3d25dbf0 | o 0 draft a-A - 054250a37db4 $ hg init ../beta $ hg push -r 1 ../beta pushing to ../beta searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files $ hgph @ 3 draft a-D - b555f63b6063 | o 2 draft a-C - 54acac6f23ab | o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ cd ../beta $ hgph o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ hg up -q $ mkcommit b-A $ hgph @ 2 draft b-A - f54f1bb90ff3 | o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ hg pull ../alpha pulling from ../alpha searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hgph o 4 public a-D - b555f63b6063 | o 3 public a-C - 54acac6f23ab | | @ 2 draft b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 pull did not updated ../alpha state. push from alpha to beta should update phase even if nothing is transferred $ cd ../alpha $ hgph # not updated by remote pull @ 3 draft a-D - b555f63b6063 | o 2 draft a-C - 54acac6f23ab | o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ hg push -r 2 ../beta pushing to ../beta searching for changes no changes found [1] $ hgph @ 3 draft a-D - b555f63b6063 | o 2 public a-C - 54acac6f23ab | o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ hg push ../beta pushing to ../beta searching for changes no changes found [1] $ hgph @ 3 public a-D - b555f63b6063 | o 2 public a-C - 54acac6f23ab | o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 update must update phase of common changeset too $ hg pull ../beta # getting b-A pulling from ../beta searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ cd ../beta $ hgph # not updated by remote pull o 4 public a-D - b555f63b6063 | o 3 public a-C - 54acac6f23ab | | @ 2 draft b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ hg pull ../alpha pulling from ../alpha searching for changes no changes found $ hgph o 4 public a-D - b555f63b6063 | o 3 public a-C - 54acac6f23ab | | @ 2 public b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 Publish configuration option ---------------------------- Pull ```` changegroup are added without phase movement $ hg bundle -a ../base.bundle 5 changesets found $ cd .. $ hg init mu $ cd mu $ cat > .hg/hgrc << EOF > [phases] > publish=0 > EOF $ hg unbundle ../base.bundle adding changesets adding manifests adding file changes added 5 changesets with 5 changes to 5 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hgph o 4 draft a-D - b555f63b6063 | o 3 draft a-C - 54acac6f23ab | | o 2 draft b-A - f54f1bb90ff3 |/ o 1 draft a-B - 548a3d25dbf0 | o 0 draft a-A - 054250a37db4 $ cd .. Pulling from publish=False to publish=False does not move boundary. $ hg init nu $ cd nu $ cat > .hg/hgrc << EOF > [phases] > publish=0 > EOF $ hg pull ../mu -r 54acac6f23ab pulling from ../mu adding changesets adding manifests adding file changes added 3 changesets with 3 changes to 3 files (run 'hg update' to get a working copy) $ hgph o 2 draft a-C - 54acac6f23ab | o 1 draft a-B - 548a3d25dbf0 | o 0 draft a-A - 054250a37db4 Even for common $ hg pull ../mu -r f54f1bb90ff3 pulling from ../mu searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) (run 'hg heads' to see heads, 'hg merge' to merge) $ hgph o 3 draft b-A - f54f1bb90ff3 | | o 2 draft a-C - 54acac6f23ab |/ o 1 draft a-B - 548a3d25dbf0 | o 0 draft a-A - 054250a37db4 Pulling from Publish=True to Publish=False move boundary in common set. we are in nu $ hg pull ../alpha -r b555f63b6063 pulling from ../alpha searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) $ hgph # f54f1bb90ff3 stay draft, not ancestor of -r o 4 public a-D - b555f63b6063 | | o 3 draft b-A - f54f1bb90ff3 | | o | 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 pulling from Publish=False to publish=False with some public $ hg up -q f54f1bb90ff3 $ mkcommit n-A $ mkcommit n-B $ hgph @ 6 draft n-B - 145e75495359 | o 5 draft n-A - d6bcb4f74035 | | o 4 public a-D - b555f63b6063 | | o | 3 draft b-A - f54f1bb90ff3 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ cd ../mu $ hg pull ../nu pulling from ../nu searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (run 'hg update' to get a working copy) $ hgph o 6 draft n-B - 145e75495359 | o 5 draft n-A - d6bcb4f74035 | | o 4 public a-D - b555f63b6063 | | | o 3 public a-C - 54acac6f23ab | | o | 2 draft b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ cd .. pulling into publish=True $ cd alpha $ hgph o 4 public b-A - f54f1bb90ff3 | | @ 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ hg pull ../mu pulling from ../mu searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (run 'hg update' to get a working copy) $ hgph o 6 draft n-B - 145e75495359 | o 5 draft n-A - d6bcb4f74035 | o 4 public b-A - f54f1bb90ff3 | | @ 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ cd .. pulling back into original repo $ cd nu $ hg pull ../alpha pulling from ../alpha searching for changes no changes found $ hgph @ 6 public n-B - 145e75495359 | o 5 public n-A - d6bcb4f74035 | | o 4 public a-D - b555f63b6063 | | o | 3 public b-A - f54f1bb90ff3 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 Push ```` (inserted) Test that phase are pushed even when they are nothing to pus (this might be tested later bu are very convenient to not alter too much test) Push back to alpha $ hg push ../alpha # from nu pushing to ../alpha searching for changes no changes found [1] $ cd .. $ cd alpha $ hgph o 6 public n-B - 145e75495359 | o 5 public n-A - d6bcb4f74035 | o 4 public b-A - f54f1bb90ff3 | | @ 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 (end insertion) initial setup $ hg log -G # of alpha o changeset: 6:145e75495359 | tag: tip | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: n-B | o changeset: 5:d6bcb4f74035 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: n-A | o changeset: 4:f54f1bb90ff3 | parent: 1:548a3d25dbf0 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: b-A | | @ changeset: 3:b555f63b6063 | | user: test | | date: Thu Jan 01 00:00:00 1970 +0000 | | summary: a-D | | | o changeset: 2:54acac6f23ab |/ user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: a-C | o changeset: 1:548a3d25dbf0 | user: test | date: Thu Jan 01 00:00:00 1970 +0000 | summary: a-B | o changeset: 0:054250a37db4 user: test date: Thu Jan 01 00:00:00 1970 +0000 summary: a-A $ mkcommit a-E $ mkcommit a-F $ mkcommit a-G $ hg up d6bcb4f74035 -q $ mkcommit a-H created new head $ hgph @ 10 draft a-H - 967b449fbc94 | | o 9 draft a-G - 3e27b6f1eee1 | | | o 8 draft a-F - b740e3e5c05d | | | o 7 draft a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | o | 4 public b-A - f54f1bb90ff3 | | | o 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 Pulling from bundle does not alter phases of changeset not present in the bundle $ hg bundle --base 1 -r 6 -r 3 ../partial-bundle.hg 5 changesets found $ hg pull ../partial-bundle.hg pulling from ../partial-bundle.hg searching for changes no changes found $ hgph @ 10 draft a-H - 967b449fbc94 | | o 9 draft a-G - 3e27b6f1eee1 | | | o 8 draft a-F - b740e3e5c05d | | | o 7 draft a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | o | 4 public b-A - f54f1bb90ff3 | | | o 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 Pushing to Publish=False (unknown changeset) $ hg push ../mu -r b740e3e5c05d # a-F pushing to ../mu searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files $ hgph @ 10 draft a-H - 967b449fbc94 | | o 9 draft a-G - 3e27b6f1eee1 | | | o 8 draft a-F - b740e3e5c05d | | | o 7 draft a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | o | 4 public b-A - f54f1bb90ff3 | | | o 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ cd ../mu $ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft, > # not ancestor of -r o 8 draft a-F - b740e3e5c05d | o 7 draft a-E - e9f537e46dea | | o 6 draft n-B - 145e75495359 | | | o 5 draft n-A - d6bcb4f74035 | | o | 4 public a-D - b555f63b6063 | | o | 3 public a-C - 54acac6f23ab | | | o 2 draft b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 Pushing to Publish=True (unknown changeset) $ hg push ../beta -r b740e3e5c05d pushing to ../beta searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files $ hgph # again f54f1bb90ff3, d6bcb4f74035 and 145e75495359 stay draft, > # not ancestor of -r o 8 public a-F - b740e3e5c05d | o 7 public a-E - e9f537e46dea | | o 6 draft n-B - 145e75495359 | | | o 5 draft n-A - d6bcb4f74035 | | o | 4 public a-D - b555f63b6063 | | o | 3 public a-C - 54acac6f23ab | | | o 2 draft b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 Pushing to Publish=True (common changeset) $ cd ../beta $ hg push ../alpha pushing to ../alpha searching for changes no changes found [1] $ hgph o 6 public a-F - b740e3e5c05d | o 5 public a-E - e9f537e46dea | o 4 public a-D - b555f63b6063 | o 3 public a-C - 54acac6f23ab | | @ 2 public b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ cd ../alpha $ hgph @ 10 draft a-H - 967b449fbc94 | | o 9 draft a-G - 3e27b6f1eee1 | | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | o | 4 public b-A - f54f1bb90ff3 | | | o 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 Pushing to Publish=False (common changeset that change phase + unknown one) $ hg push ../mu -r 967b449fbc94 -f pushing to ../mu searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) $ hgph @ 10 draft a-H - 967b449fbc94 | | o 9 draft a-G - 3e27b6f1eee1 | | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | o | 4 public b-A - f54f1bb90ff3 | | | o 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ cd ../mu $ hgph # d6bcb4f74035 should have changed phase > # 145e75495359 is still draft. not ancestor of -r o 9 draft a-H - 967b449fbc94 | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 draft n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | | o 4 public a-D - b555f63b6063 | | | o 3 public a-C - 54acac6f23ab | | o | 2 public b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 Pushing to Publish=True (common changeset from publish=False) (in mu) $ hg push ../alpha pushing to ../alpha searching for changes no changes found [1] $ hgph o 9 public a-H - 967b449fbc94 | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | | o 4 public a-D - b555f63b6063 | | | o 3 public a-C - 54acac6f23ab | | o | 2 public b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ hgph -R ../alpha # a-H should have been synced to 0 @ 10 public a-H - 967b449fbc94 | | o 9 draft a-G - 3e27b6f1eee1 | | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | o | 4 public b-A - f54f1bb90ff3 | | | o 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 Bare push with next changeset and common changeset needing sync (issue3575) (reset some stat on remote repo to avoid confusing other tests) $ hg -R ../alpha --config extensions.strip= strip --no-backup 967b449fbc94 0 files updated, 0 files merged, 1 files removed, 0 files unresolved $ hg phase --force --draft b740e3e5c05d 967b449fbc94 $ hg push -fv ../alpha pushing to ../alpha searching for changes 1 changesets found uncompressed size of bundle content: 192 (changelog) 165 (manifests) 131 a-H adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) $ hgph o 9 public a-H - 967b449fbc94 | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | | o 4 public a-D - b555f63b6063 | | | o 3 public a-C - 54acac6f23ab | | o | 2 public b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ hg -R ../alpha update 967b449fbc94 #for latter test consistency 1 files updated, 0 files merged, 0 files removed, 0 files unresolved $ hgph -R ../alpha @ 10 public a-H - 967b449fbc94 | | o 9 draft a-G - 3e27b6f1eee1 | | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | o | 4 public b-A - f54f1bb90ff3 | | | o 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 Discovery locally secret changeset on a remote repository: - should make it non-secret $ cd ../alpha $ mkcommit A-secret --config phases.new-commit=2 $ hgph @ 11 secret A-secret - 435b5d83910c | o 10 public a-H - 967b449fbc94 | | o 9 draft a-G - 3e27b6f1eee1 | | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | o | 4 public b-A - f54f1bb90ff3 | | | o 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ hg bundle --base 'parents(.)' -r . ../secret-bundle.hg 1 changesets found $ hg -R ../mu unbundle ../secret-bundle.hg adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) $ hgph -R ../mu o 10 draft A-secret - 435b5d83910c | o 9 public a-H - 967b449fbc94 | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | | o 4 public a-D - b555f63b6063 | | | o 3 public a-C - 54acac6f23ab | | o | 2 public b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 $ hg pull ../mu pulling from ../mu searching for changes no changes found $ hgph @ 11 draft A-secret - 435b5d83910c | o 10 public a-H - 967b449fbc94 | | o 9 draft a-G - 3e27b6f1eee1 | | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | o | 4 public b-A - f54f1bb90ff3 | | | o 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 pushing a locally public and draft changesets remotely secret should make them appear on the remote side. $ hg -R ../mu phase --secret --force 967b449fbc94 $ hg push -r 435b5d83910c ../mu pushing to ../mu searching for changes abort: push creates new remote head 435b5d83910c! (merge or see "hg help push" for details about pushing new heads) [255] $ hg push -fr 435b5d83910c ../mu # because the push will create new visible head pushing to ../mu searching for changes adding changesets adding manifests adding file changes added 0 changesets with 0 changes to 2 files $ hgph -R ../mu o 10 draft A-secret - 435b5d83910c | o 9 public a-H - 967b449fbc94 | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | | o 4 public a-D - b555f63b6063 | | | o 3 public a-C - 54acac6f23ab | | o | 2 public b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 pull new changeset with common draft locally $ hg up -q 967b449fbc94 # create a new root for draft $ mkcommit 'alpha-more' created new head $ hg push -fr . ../mu pushing to ../mu searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (+1 heads) $ cd ../mu $ hg phase --secret --force 1c5cfd894796 $ hg up -q 435b5d83910c $ mkcommit 'mu-more' $ cd ../alpha $ hg pull ../mu pulling from ../mu searching for changes adding changesets adding manifests adding file changes added 1 changesets with 1 changes to 1 files (run 'hg update' to get a working copy) $ hgph o 13 draft mu-more - 5237fb433fc8 | | @ 12 draft alpha-more - 1c5cfd894796 | | o | 11 draft A-secret - 435b5d83910c |/ o 10 public a-H - 967b449fbc94 | | o 9 draft a-G - 3e27b6f1eee1 | | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | o | 4 public b-A - f54f1bb90ff3 | | | o 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 Test that test are properly ignored on remote event when existing locally $ cd .. $ hg clone -qU -r b555f63b6063 -r f54f1bb90ff3 beta gamma # pathological case are # # * secret remotely # * known locally # * repo have uncommon changeset $ hg -R beta phase --secret --force f54f1bb90ff3 $ hg -R gamma phase --draft --force f54f1bb90ff3 $ cd gamma $ hg pull ../beta pulling from ../beta searching for changes adding changesets adding manifests adding file changes added 2 changesets with 2 changes to 2 files (run 'hg update' to get a working copy) $ hg phase f54f1bb90ff3 2: draft same over the wire $ cd ../beta $ hg serve -p $HGPORT -d --pid-file=../beta.pid -E ../beta-error.log $ cat ../beta.pid >> $DAEMON_PIDS $ cd ../gamma $ hg pull http://localhost:$HGPORT/ --config experimental.bundle2-exp=True pulling from http://localhost:$HGPORT/ searching for changes no changes found $ hg phase f54f1bb90ff3 2: draft enforce bundle1 $ hg pull http://localhost:$HGPORT/ --config experimental.bundle2-exp=False pulling from http://localhost:$HGPORT/ searching for changes no changes found $ hg phase f54f1bb90ff3 2: draft check that secret local on both side are not synced to public $ hg push -r b555f63b6063 http://localhost:$HGPORT/ pushing to http://localhost:$HGPORT/ searching for changes no changes found [1] $ hg phase f54f1bb90ff3 2: draft put the changeset in the draft state again (first test after this one expect to be able to copy) $ cd .. Test Clone behavior A. Clone without secret changeset 1. cloning non-publishing repository (Phase should be preserved) # make sure there is no secret so we can use a copy clone $ hg -R mu phase --draft 'secret()' $ hg clone -U mu Tau $ hgph -R Tau o 12 draft mu-more - 5237fb433fc8 | | o 11 draft alpha-more - 1c5cfd894796 | | o | 10 draft A-secret - 435b5d83910c |/ o 9 public a-H - 967b449fbc94 | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | | o 4 public a-D - b555f63b6063 | | | o 3 public a-C - 54acac6f23ab | | o | 2 public b-A - f54f1bb90ff3 |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 2. cloning publishing repository (everything should be public) $ hg clone -U alpha Upsilon $ hgph -R Upsilon o 13 public mu-more - 5237fb433fc8 | | o 12 public alpha-more - 1c5cfd894796 | | o | 11 public A-secret - 435b5d83910c |/ o 10 public a-H - 967b449fbc94 | | o 9 public a-G - 3e27b6f1eee1 | | | o 8 public a-F - b740e3e5c05d | | | o 7 public a-E - e9f537e46dea | | +---o 6 public n-B - 145e75495359 | | o | 5 public n-A - d6bcb4f74035 | | o | 4 public b-A - f54f1bb90ff3 | | | o 3 public a-D - b555f63b6063 | | | o 2 public a-C - 54acac6f23ab |/ o 1 public a-B - 548a3d25dbf0 | o 0 public a-A - 054250a37db4 #if unix-permissions no-root Pushing From an unlockable repo -------------------------------- (issue3684) Unability to lock the source repo should not prevent the push. It will prevent the retrieval of remote phase during push. For example, pushing to a publishing server won't turn changeset public. 1. Test that push is not prevented $ hg init Phi $ cd Upsilon $ chmod -R -w .hg $ hg push ../Phi pushing to ../Phi searching for changes adding changesets adding manifests adding file changes added 14 changesets with 14 changes to 14 files (+3 heads) $ chmod -R +w .hg 2. Test that failed phases movement are reported $ hg phase --force --draft 3 $ chmod -R -w .hg $ hg push ../Phi pushing to ../Phi searching for changes no changes found cannot lock source repo, skipping local public phase update [1] $ chmod -R +w .hg $ hgph Upsilon $ cd .. $ killdaemons.py #endif mercurial-3.7.3/tests/test-bdiff.py0000644000175000017500000000271412676531525016676 0ustar mpmmpm00000000000000import struct from mercurial import bdiff, mpatch def test1(a, b): d = bdiff.bdiff(a, b) c = a if d: c = mpatch.patches(a, [d]) if c != b: print "***", repr(a), repr(b) print "bad:" print repr(c)[:200] print repr(d) def test(a, b): print "***", repr(a), repr(b) test1(a, b) test1(b, a) test("a\nc\n\n\n\n", "a\nb\n\n\n") test("a\nb\nc\n", "a\nc\n") test("", "") test("a\nb\nc", "a\nb\nc") test("a\nb\nc\nd\n", "a\nd\n") test("a\nb\nc\nd\n", "a\nc\ne\n") test("a\nb\nc\n", "a\nc\n") test("a\n", "c\na\nb\n") test("a\n", "") test("a\n", "b\nc\n") test("a\n", "c\na\n") test("", "adjfkjdjksdhfksj") test("", "ab") test("", "abc") test("a", "a") test("ab", "ab") test("abc", "abc") test("a\n", "a\n") test("a\nb", "a\nb") #issue1295 def showdiff(a, b): bin = bdiff.bdiff(a, b) pos = 0 while pos < len(bin): p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12]) pos += 12 print p1, p2, repr(bin[pos:pos + l]) pos += l showdiff("x\n\nx\n\nx\n\nx\n\nz\n", "x\n\nx\n\ny\n\nx\n\nx\n\nz\n") showdiff("x\n\nx\n\nx\n\nx\n\nz\n", "x\n\nx\n\ny\n\nx\n\ny\n\nx\n\nz\n") print "done" def testfixws(a, b, allws): c = bdiff.fixws(a, allws) if c != b: print "*** fixws", repr(a), repr(b), allws print "got:" print repr(c) testfixws(" \ta\r b\t\n", "ab\n", 1) testfixws(" \ta\r b\t\n", " a b\n", 0) testfixws("", "", 1) testfixws("", "", 0) print "done" mercurial-3.7.3/tests/test-debugbundle.t0000644000175000017500000000355312676531525017721 0ustar mpmmpm00000000000000 Create a test repository: $ hg init repo $ cd repo $ touch a ; hg add a ; hg ci -ma $ touch b ; hg add b ; hg ci -mb $ touch c ; hg add c ; hg ci -mc $ hg bundle --base 0 --rev tip bundle.hg -v --type v1 2 changesets found uncompressed size of bundle content: 332 (changelog) 282 (manifests) 93 b 93 c Terse output: $ hg debugbundle bundle.hg 0e067c57feba1a5694ca4844f05588bb1bf82342 991a3460af53952d10ec8a295d3d2cc2e5fa9690 Verbose output: $ hg debugbundle --all bundle.hg format: id, p1, p2, cset, delta base, len(delta) changelog 0e067c57feba1a5694ca4844f05588bb1bf82342 3903775176ed42b1458a6281db4a0ccf4d9f287a 0000000000000000000000000000000000000000 0e067c57feba1a5694ca4844f05588bb1bf82342 3903775176ed42b1458a6281db4a0ccf4d9f287a 80 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0e067c57feba1a5694ca4844f05588bb1bf82342 80 manifest 686dbf0aeca417636fa26a9121c681eabbb15a20 8515d4bfda768e04af4c13a69a72e28c7effbea7 0000000000000000000000000000000000000000 0e067c57feba1a5694ca4844f05588bb1bf82342 8515d4bfda768e04af4c13a69a72e28c7effbea7 55 ae25a31b30b3490a981e7b96a3238cc69583fda1 686dbf0aeca417636fa26a9121c681eabbb15a20 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 686dbf0aeca417636fa26a9121c681eabbb15a20 55 b b80de5d138758541c5f05265ad144ab9fa86d1db 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 0e067c57feba1a5694ca4844f05588bb1bf82342 0000000000000000000000000000000000000000 0 c b80de5d138758541c5f05265ad144ab9fa86d1db 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 991a3460af53952d10ec8a295d3d2cc2e5fa9690 0000000000000000000000000000000000000000 0 $ cd .. mercurial-3.7.3/tests/test-convert-tla.t0000644000175000017500000000777012676531525017704 0ustar mpmmpm00000000000000#require tla symlink $ tla my-id "mercurial " $ echo "[extensions]" >> $HGRCPATH $ echo "convert=" >> $HGRCPATH create tla archive $ tla make-archive tla@mercurial--convert `pwd`/hg-test-convert-tla initialize tla repo $ mkdir tla-repo $ cd tla-repo/ $ tla init-tree tla@mercurial--convert/tla--test--0 $ tla import * creating version tla@mercurial--convert/tla--test--0 * imported tla@mercurial--convert/tla--test--0 create initial files $ echo 'this is a file' > a $ tla add a $ mkdir src $ tla add src $ cd src $ dd count=1 if=/dev/zero of=b > /dev/null 2> /dev/null $ tla add b $ tla commit -s "added a file, src and src/b (binary)" A/ .arch-ids A/ src A/ src/.arch-ids A .arch-ids/a.id A a A src/.arch-ids/=id A src/.arch-ids/b.id A src/b * update pristine tree (tla@mercurial--convert/tla--test--0--base-0 => tla--test--0--patch-1) * committed tla@mercurial--convert/tla--test--0--patch-1 create link file and modify a $ ln -s ../a a-link $ tla add a-link $ echo 'this a modification to a' >> ../a $ tla commit -s "added link to a and modify a" A src/.arch-ids/a-link.id A src/a-link M a * update pristine tree (tla@mercurial--convert/tla--test--0--patch-1 => tla--test--0--patch-2) * committed tla@mercurial--convert/tla--test--0--patch-2 create second link and modify b $ ln -s ../a a-link-2 $ tla add a-link-2 $ dd count=1 seek=1 if=/dev/zero of=b > /dev/null 2> /dev/null $ tla commit -s "added second link and modify b" A src/.arch-ids/a-link-2.id A src/a-link-2 Mb src/b * update pristine tree (tla@mercurial--convert/tla--test--0--patch-2 => tla--test--0--patch-3) * committed tla@mercurial--convert/tla--test--0--patch-3 b file to link and a-link-2 to regular file $ rm -f a-link-2 $ echo 'this is now a regular file' > a-link-2 $ ln -sf ../a b $ tla commit -s "file to link and link to file test" fl src/b lf src/a-link-2 * update pristine tree (tla@mercurial--convert/tla--test--0--patch-3 => tla--test--0--patch-4) * committed tla@mercurial--convert/tla--test--0--patch-4 move a-link-2 file and src directory $ cd .. $ tla mv src/a-link-2 c $ tla mv src test $ tla commit -s "move and rename a-link-2 file and src directory" D/ src/.arch-ids A/ test/.arch-ids /> src test => src/.arch-ids/a-link-2.id .arch-ids/c.id => src/a-link-2 c => src/.arch-ids/=id test/.arch-ids/=id => src/.arch-ids/a-link.id test/.arch-ids/a-link.id => src/.arch-ids/b.id test/.arch-ids/b.id * update pristine tree (tla@mercurial--convert/tla--test--0--patch-4 => tla--test--0--patch-5) * committed tla@mercurial--convert/tla--test--0--patch-5 $ cd .. converting tla repo to Mercurial $ hg convert tla-repo tla-repo-hg initializing destination tla-repo-hg repository analyzing tree version tla@mercurial--convert/tla--test--0... scanning source... sorting... converting... 5 initial import 4 added a file, src and src/b (binary) 3 added link to a and modify a 2 added second link and modify b 1 file to link and link to file test 0 move and rename a-link-2 file and src directory $ tla register-archive -d tla@mercurial--convert $ glog() > { > hg log -G --template '{rev} "{desc|firstline}" files: {files}\n' "$@" > } show graph log $ glog -R tla-repo-hg o 5 "move and rename a-link-2 file and src directory" files: c src/a-link src/a-link-2 src/b test/a-link test/b | o 4 "file to link and link to file test" files: src/a-link-2 src/b | o 3 "added second link and modify b" files: src/a-link-2 src/b | o 2 "added link to a and modify a" files: a src/a-link | o 1 "added a file, src and src/b (binary)" files: a src/b | o 0 "initial import" files: $ hg up -q -R tla-repo-hg $ hg -R tla-repo-hg manifest --debug c4072c4b72e1cabace081888efa148ee80ca3cbb 644 a 0201ac32a3a8e86e303dff60366382a54b48a72e 644 c c0067ba5ff0b7c9a3eb17270839d04614c435623 644 @ test/a-link 375f4263d86feacdea7e3c27100abd1560f2a973 644 @ test/b mercurial-3.7.3/tests/test-mq-guards.t0000644000175000017500000002501112676531525017332 0ustar mpmmpm00000000000000 $ echo "[extensions]" >> $HGRCPATH $ echo "mq=" >> $HGRCPATH $ hg init $ hg qinit $ echo x > x $ hg ci -Ama adding x $ hg qnew a.patch $ echo a > a $ hg add a $ hg qrefresh $ hg qnew b.patch $ echo b > b $ hg add b $ hg qrefresh $ hg qnew c.patch $ echo c > c $ hg add c $ hg qrefresh $ hg qpop -a popping c.patch popping b.patch popping a.patch patch queue now empty should fail $ hg qguard does-not-exist.patch +bleh abort: no patch named does-not-exist.patch [255] should fail $ hg qguard +fail abort: no patches applied [255] $ hg qpush applying a.patch now at: a.patch should guard a.patch $ hg qguard +a should print +a $ hg qguard a.patch: +a $ hg qpop popping a.patch patch queue now empty should fail $ hg qpush a.patch cannot push 'a.patch' - guarded by '+a' [1] $ hg qguard a.patch a.patch: +a should push b.patch $ hg qpush applying b.patch now at: b.patch $ hg qpop popping b.patch patch queue now empty test selection of an empty guard $ hg qselect "" abort: guard cannot be an empty string [255] $ hg qselect a number of unguarded, unapplied patches has changed from 2 to 3 should push a.patch $ hg qpush applying a.patch now at: a.patch $ hg qguard -- c.patch -a should print -a $ hg qguard c.patch c.patch: -a should skip c.patch $ hg qpush -a applying b.patch skipping c.patch - guarded by '-a' now at: b.patch $ hg qnext all patches applied [1] should display b.patch $ hg qtop b.patch $ hg qguard -n c.patch should push c.patch $ hg qpush -a applying c.patch now at: c.patch $ hg qpop -a popping c.patch popping b.patch popping a.patch patch queue now empty $ hg qselect -n guards deactivated number of unguarded, unapplied patches has changed from 3 to 2 should push all $ hg qpush -a applying b.patch applying c.patch now at: c.patch $ hg qpop -a popping c.patch popping b.patch patch queue now empty $ hg qguard a.patch +1 $ hg qguard b.patch +2 $ hg qselect 1 number of unguarded, unapplied patches has changed from 1 to 2 should push a.patch, not b.patch $ hg qpush applying a.patch now at: a.patch $ hg qpush applying c.patch now at: c.patch $ hg qpop -a popping c.patch popping a.patch patch queue now empty $ hg qselect 2 should push b.patch $ hg qpush applying b.patch now at: b.patch $ hg qpush -a applying c.patch now at: c.patch $ hg qprev b.patch Used to be an issue with holes in the patch sequence So, put one hole on the base and ask for topmost patch. $ hg qtop c.patch $ hg qpop -a popping c.patch popping b.patch patch queue now empty $ hg qselect 1 2 number of unguarded, unapplied patches has changed from 2 to 3 should push a.patch, b.patch $ hg qpush applying a.patch now at: a.patch $ hg qpush applying b.patch now at: b.patch $ hg qpop -a popping b.patch popping a.patch patch queue now empty $ hg qguard -- a.patch +1 +2 -3 $ hg qselect 1 2 3 number of unguarded, unapplied patches has changed from 3 to 2 list patches and guards $ hg qguard -l a.patch: +1 +2 -3 b.patch: +2 c.patch: unguarded have at least one patch applied to test coloring $ hg qpush applying b.patch now at: b.patch list patches and guards with color $ hg --config extensions.color= qguard --config color.mode=ansi \ > -l --color=always \x1b[0;30;1ma.patch\x1b[0m: \x1b[0;33m+1\x1b[0m \x1b[0;33m+2\x1b[0m \x1b[0;31m-3\x1b[0m (esc) \x1b[0;34;1;4mb.patch\x1b[0m: \x1b[0;33m+2\x1b[0m (esc) \x1b[0;30;1mc.patch\x1b[0m: \x1b[0;32munguarded\x1b[0m (esc) should pop b.patch $ hg qpop popping b.patch patch queue now empty list series $ hg qseries -v 0 G a.patch 1 U b.patch 2 U c.patch list guards $ hg qselect 1 2 3 should push b.patch $ hg qpush applying b.patch now at: b.patch $ hg qpush -a applying c.patch now at: c.patch $ hg qselect -n --reapply -v guards deactivated popping guarded patches popping c.patch popping b.patch patch queue now empty reapplying unguarded patches skipping a.patch - guarded by '+1' '+2' skipping b.patch - guarded by '+2' skipping a.patch - guarded by '+1' '+2' skipping b.patch - guarded by '+2' applying c.patch patching file c adding c committing files: c committing manifest committing changelog now at: c.patch guards in series file: +1 +2 -3 $ hg qselect -s +1 +2 -3 should show c.patch $ hg qapplied c.patch $ hg qrename a.patch new.patch should show : new.patch: +1 +2 -3 b.patch: +2 c.patch: unguarded $ hg qguard -l new.patch: +1 +2 -3 b.patch: +2 c.patch: unguarded $ hg qnew d.patch $ hg qpop popping d.patch now at: c.patch should show new.patch and b.patch as Guarded, c.patch as Applied and d.patch as Unapplied $ hg qseries -v 0 G new.patch 1 G b.patch 2 A c.patch 3 U d.patch qseries again, but with color $ hg --config extensions.color= --config color.mode=ansi qseries -v --color=always 0 G \x1b[0;30;1mnew.patch\x1b[0m (esc) 1 G \x1b[0;30;1mb.patch\x1b[0m (esc) 2 A \x1b[0;34;1;4mc.patch\x1b[0m (esc) 3 U \x1b[0;30;1md.patch\x1b[0m (esc) $ hg qguard d.patch +2 new.patch, b.patch: Guarded. c.patch: Applied. d.patch: Guarded. $ hg qseries -v 0 G new.patch 1 G b.patch 2 A c.patch 3 G d.patch $ qappunappv() > { > for command in qapplied "qapplied -v" qunapplied "qunapplied -v"; do > echo % hg $command > hg $command > done > } $ hg qpop -a popping c.patch patch queue now empty $ hg qguard -l new.patch: +1 +2 -3 b.patch: +2 c.patch: unguarded d.patch: +2 $ qappunappv % hg qapplied % hg qapplied -v % hg qunapplied c.patch % hg qunapplied -v 0 G new.patch 1 G b.patch 2 U c.patch 3 G d.patch $ hg qselect 1 number of unguarded, unapplied patches has changed from 1 to 2 $ qappunappv % hg qapplied % hg qapplied -v % hg qunapplied new.patch c.patch % hg qunapplied -v 0 U new.patch 1 G b.patch 2 U c.patch 3 G d.patch $ hg qpush -a applying new.patch skipping b.patch - guarded by '+2' applying c.patch skipping d.patch - guarded by '+2' now at: c.patch $ qappunappv % hg qapplied new.patch c.patch % hg qapplied -v 0 A new.patch 1 G b.patch 2 A c.patch % hg qunapplied % hg qunapplied -v 3 G d.patch $ hg qselect 2 number of unguarded, unapplied patches has changed from 0 to 1 $ qappunappv % hg qapplied new.patch c.patch % hg qapplied -v 0 A new.patch 1 U b.patch 2 A c.patch % hg qunapplied d.patch % hg qunapplied -v 3 U d.patch $ for patch in `hg qseries`; do > echo % hg qapplied $patch > hg qapplied $patch > echo % hg qunapplied $patch > hg qunapplied $patch > done % hg qapplied new.patch new.patch % hg qunapplied new.patch b.patch d.patch % hg qapplied b.patch new.patch % hg qunapplied b.patch d.patch % hg qapplied c.patch new.patch c.patch % hg qunapplied c.patch d.patch % hg qapplied d.patch new.patch c.patch % hg qunapplied d.patch hg qseries -m: only b.patch should be shown the guards file was not ignored in the past $ hg qdelete -k b.patch $ hg qseries -m b.patch hg qseries -m with color $ hg --config extensions.color= --config color.mode=ansi qseries -m --color=always \x1b[0;31;1mb.patch\x1b[0m (esc) excercise corner cases in "qselect --reapply" $ hg qpop -a popping c.patch popping new.patch patch queue now empty $ hg qguard -- new.patch -not-new $ hg qguard -- c.patch -not-c $ hg qguard -- d.patch -not-d $ hg qpush -a applying new.patch applying c.patch applying d.patch patch d.patch is empty now at: d.patch $ hg qguard -l new.patch: -not-new c.patch: -not-c d.patch: -not-d $ hg qselect --reapply not-d popping guarded patches popping d.patch now at: c.patch reapplying unguarded patches cannot push 'd.patch' - guarded by '-not-d' $ hg qser -v 0 A new.patch 1 A c.patch 2 G d.patch $ hg qselect --reapply -n guards deactivated $ hg qpush applying d.patch patch d.patch is empty now at: d.patch $ hg qser -v 0 A new.patch 1 A c.patch 2 A d.patch $ hg qselect --reapply not-c popping guarded patches popping d.patch popping c.patch now at: new.patch reapplying unguarded patches applying d.patch patch d.patch is empty now at: d.patch $ hg qser -v 0 A new.patch 1 G c.patch 2 A d.patch $ hg qselect --reapply not-new popping guarded patches popping d.patch popping new.patch patch queue now empty reapplying unguarded patches applying c.patch applying d.patch patch d.patch is empty now at: d.patch $ hg qser -v 0 G new.patch 1 A c.patch 2 A d.patch test that qselect shows "number of guarded, applied patches" correctly $ hg qimport -q -e b.patch adding b.patch to series file $ hg qguard -- b.patch -not-b $ hg qpop -a -q patch queue now empty $ hg qunapplied -v 0 G new.patch 1 U c.patch 2 U d.patch 3 U b.patch $ hg qselect not-new not-c number of unguarded, unapplied patches has changed from 3 to 2 $ hg qpush -q -a patch d.patch is empty now at: b.patch $ hg qapplied -v 0 G new.patch 1 G c.patch 2 A d.patch 3 A b.patch $ hg qselect --none guards deactivated $ hg qselect not-new not-c not-d number of guarded, applied patches has changed from 0 to 1 test that "qselect --reapply" reapplies patches successfully when the already applied patch becomes unguarded and it follows the already guarded (= not yet applied) one. $ hg qpop -q -a patch queue now empty $ hg qselect not-new not-c number of unguarded, unapplied patches has changed from 1 to 2 $ hg qpush -q -a patch d.patch is empty now at: b.patch $ hg qapplied -v 0 G new.patch 1 G c.patch 2 A d.patch 3 A b.patch $ hg qselect -q --reapply not-c not-b now at: d.patch cannot push 'b.patch' - guarded by '-not-b' $ hg qseries -v 0 U new.patch 1 G c.patch 2 A d.patch 3 G b.patch test that "qselect --reapply" checks applied patches correctly when no applied patches becomes guarded but some of unapplied ones become unguarded. $ hg qpop -q -a patch queue now empty $ hg qselect not-new not-c not-d number of unguarded, unapplied patches has changed from 2 to 1 $ hg qpush -q -a now at: b.patch $ hg qapplied -v 0 G new.patch 1 G c.patch 2 G d.patch 3 A b.patch $ hg qselect -q --reapply not-new not-c $ hg qseries -v 0 G new.patch 1 G c.patch 2 U d.patch 3 A b.patch